def getCriteriaItems( self ): """ Return a sequence of items to be used to build the catalog query. """ if self.value is not None: field = self.Field() value = self.value operation = self.operation # Negate the value for 'old' days if self.daterange == 'old' and value != 0: value = -value # Also reverse the operator to match what a user would expect. # Queries such as "More than 2 days ago" should match dates # *earlier* than "today minus 2", and "Less than 2 days ago" # would be expected to return dates *later* then "today minus # two". if operation == 'max': operation = 'min' elif operation == 'min': operation = 'max' date = DateTime() + value if operation == 'within_day': # When items within a day are requested, the range is between # the earliest and latest time of that particular day range = ( date.earliestTime(), date.latestTime() ) return ( ( field, {'query': range, 'range': 'min:max'} ), ) elif operation == 'min': if value != 0: if self.daterange == 'old': date_range = (date, DateTime()) return ( ( field, { 'query': date_range , 'range': 'min:max' } ), ) else: return ( ( field, { 'query': date.earliestTime() , 'range': operation } ), ) else: # Value 0 means "Now", so get everything from now on return ( ( field, {'query': date,'range': operation } ), ) elif operation == 'max': if value != 0: if self.daterange == 'old': return ((field, {'query': date, 'range': operation}),) else: date_range = (DateTime(), date.latestTime()) return ( ( field, { 'query': date_range , 'range': 'min:max' } ), ) else: # Value is 0, meaning "Now", get everything before "Now" return ( ( field, {'query': date, 'range': operation} ), ) else: return ()
def _convert( self, value, default=None ): """Convert Date/Time value to our internal representation""" if isinstance( value, DateTime ): t_tup = value.parts() elif type( value ) in (FloatType, IntType): t_tup = time.gmtime( value ) elif type( value ) is StringType: t_obj = DateTime( value ) t_tup = t_obj.parts() else: return default yr = t_tup[0] mo = t_tup[1] dy = t_tup[2] hr = t_tup[3] mn = t_tup[4] t_val = ( ( ( ( yr * 12 + mo ) * 31 + dy ) * 24 + hr ) * 60 + mn ) try: # t_val must be IntType, not LongType return int(t_val) except OverflowError: raise OverflowError, ( "%s is not within the range of indexable dates (index: %s)" % (value, self.id))
def _FSCacheHeaders(obj): # Old-style setting of modified headers for FS-based objects REQUEST = getattr(obj, 'REQUEST', None) if REQUEST is None: return False RESPONSE = REQUEST.RESPONSE header = REQUEST.get_header('If-Modified-Since', None) last_mod = obj._file_mod_time if header is not None: header = header.split(';')[0] # Some proxies seem to send invalid date strings for this # header. If the date string is not valid, we ignore it # rather than raise an error to be generally consistent # with common servers such as Apache (which can usually # understand the screwy date string as a lucky side effect # of the way they parse it). try: mod_since=DateTime(header) mod_since=long(mod_since.timeTime()) except TypeError: mod_since=None if mod_since is not None: if last_mod > 0 and last_mod <= mod_since: RESPONSE.setStatus(304) return True #Last-Modified will get stomped on by a cache policy if there is #one set.... RESPONSE.setHeader('Last-Modified', rfc1123_date(last_mod))
def getMonthAndYear(self): """ Retrieve month/year tuple """ caltool = getUtility(ICalendarTool) current = DateTime() session = None # First priority goes to the data in the request year = self.request.get('year', None) month = self.request.get('month', None) # Next get the data from the SESSION if caltool.getUseSession(): session = self.request.get('SESSION', None) if session: if not year: year = session.get('calendar_year', None) if not month: month = session.get('calendar_month', None) # Last resort to today if not year: year = current.year() if not month: month = current.month() # Then store the results in the session for next time if session: session.set('calendar_year', year) session.set('calendar_month', month) # Finally return the results return (year, month)
def _convert(self, value, default=None): """Convert Date/Time value to our internal representation""" if isinstance(value, DateTime): t_tup = value.toZone('UTC').parts() elif isinstance(value, (float, int)): t_tup = time.gmtime(value) elif isinstance(value, str) and value: t_obj = DateTime(value).toZone('UTC') t_tup = t_obj.parts() elif isinstance(value, datetime): if self.index_naive_time_as_local and value.tzinfo is None: value = value.replace(tzinfo=Local) # else if tzinfo is None, naive time interpreted as UTC t_tup = value.utctimetuple() elif isinstance(value, date): t_tup = value.timetuple() else: return default yr = t_tup[0] mo = t_tup[1] dy = t_tup[2] hr = t_tup[3] mn = t_tup[4] t_val = ((((yr * 12 + mo) * 31 + dy) * 24 + hr) * 60 + mn) if t_val > MAX32: # t_val must be integer fitting in the 32bit range raise OverflowError( "%s is not within the range of indexable dates (index: %s)" % (value, self.id)) return t_val
def test_timezone_metadata(self): # http://www.zope.org/Collectors/CMF/325 # If an item's timestamp(s) are stored in another timezone, # e.g. 4 hours further away from UTC, the DC date methods # should still return it in the local timezone so that all # user-visible dates can be compared to each other by eye. site = DummySite('site').__of__(self.root) item = self._makeDummyContent('item').__of__(site) dates_and_methods = ( ('modification_date', 'ModificationDate'), ('effective_date', 'EffectiveDate'), ('effective_date', 'Date'), ('expiration_date', 'ExpirationDate'), ('creation_date', 'CreationDate')) offset = 4 # arbitrary, any value should work. for datename, dc_methodname in dates_and_methods: orig = getattr(item, datename) # Some default to None, fix that. if orig is None: orig = DateTime() setattr(item, datename, orig) orig_DC = getattr(item, dc_methodname)() # Change the timezone of the date. local_offset = orig.tzoffset() % (3600*24) other_offset = (local_offset + offset) % 24 otherzone = 'GMT+%d' % other_offset setattr(item, datename, orig.toZone(otherzone)) # Finally, verify that display has not changed. new_DC = getattr(item, dc_methodname)() self.assertEqual(orig_DC, new_DC)
def DateTime_to_datetime(Zope_DateTime): """ Convert from Zope DateTime to Python datetime and strip timezone """ from DateTime.DateTime import DateTime naive = DateTime(str(Zope_DateTime).rsplit(' ', 1)[0]) return naive.asdatetime()
def tick(self): """ Perform tick event firing when needed. """ # Check current time. current = DateTime() # Get lastTick. If it is invalid, set it to the minimum possible value. last = self.getLastTick() if not isinstance(last, DateTime): last = DateTime(0) else: pass # Get interval. Make sure the value used here is no lesser than 0. interval = self.getInterval() if interval < 0: interval = 0 # If current time less lastTick is equal to or greater than # (0.9 * interval) then set lastTick to the current time and # execute _notify(). Otherwise do nothing. if current.timeTime() - last.timeTime() >= 0.9 * interval: self.setLastTick(current) notify(TickEvent(current, self.getNextTickEstimation( last_tick=current, interval=interval)))
def _convertDateTime( self, value ): if value is None: return value if type( value ) == type( '' ): dt_obj = DateTime( value ) value = dt_obj.millis() / 1000 / 60 # flatten to minutes if isinstance( value, DateTime ): value = value.millis() / 1000 / 60 # flatten to minutes return int( value )
def report(self, current, *args, **kw): if current > 0: if current % self._steps == 0: seconds_so_far = time.time() - self._start seconds_to_go = (seconds_so_far / current * (self._max - current)) end = DateTime(time.time() + seconds_to_go) self.output('%d/%d (%.2f%%) Estimated termination: %s' % \ (current, self._max, (100.0 * current / self._max), end.strftime('%Y/%m/%d %H:%M:%Sh')))
def testValidationRequiredSplitDate(self): s1 = getattr(self, 's1') sdq1 = getattr(s1, 'sdq1') sdq1.setRequired(True) now = DateTime() now_value = str(now.year()) + '/' + str(now.month()) + '/' + str(now.day()) + ' ' + str(now.hour()) + ':' + str(now.minute()) + ':00 GMT' self.layer['request'].form['sdq1_ampm'] = '' self.layer['request'].form['sdq1_day'] = str(now.day()) self.layer['request'].form['sdq1_hour'] = str(now.hour()) self.layer['request'].form['sdq1_minute'] = str(now.minute()) self.layer['request'].form['sdq1_month'] = str(now.month()) self.layer['request'].form['sdq1_year'] = str(now.year()) dummy_controller_state = ControllerState( id='survey_view', context=s1, button='submit', status='success', errors={}, next_action=None,) controller = self.portal.portal_form_controller controller_state = controller.validate(dummy_controller_state, self.layer['request'], ['validate_survey',]) assert controller_state.getErrors() == {}, "Validation error raised: %s" % controller_state.getErrors() userid = s1.getSurveyId() assert userid == "test_user_1_", "Not default test user" questions = s1.getQuestions() for question in questions: if question.portal_type == 'Survey Date Question': assert question.getAnswerFor(userid) == now_value, "Answer not saved correctly: %s" % question.getAnswerFor(userid)
def getUpdateBase(self, obj=None): """ Return the base date formatted as RFC 822 to be used with the update frequency and the update period to calculate a publishing schedule. """ if obj is not None: base = self.getSyndicationInfo(obj).base else: base = self.base as_zope = DateTime(base.isoformat()) return as_zope.rfc822()
def test_Today(self): friendly = self._makeOne("foo", "foofield") friendly.apply(self.today) self.assertEqual(friendly.daterange, "ahead") now = DateTime() result = friendly.getCriteriaItems() self.assertEqual(len(result), 1) self.assertEqual(result[0][0], "foofield") self.assertEqual(result[0][1]["query"], (now.earliestTime(), now.latestTime())) self.assertEqual(result[0][1]["range"], "min:max")
def _convertDateTime( self, value ): if value is None: return value if type( value ) == type( '' ): dt_obj = DateTime( value ) value = dt_obj.millis() / 1000 / 60 # flatten to minutes if isinstance( value, DateTime ): value = value.millis() / 1000 / 60 # flatten to minutes result = int( value ) if isinstance(result, long): # this won't work (Python 2.3) raise OverflowError( '%s is not within the range of dates allowed' 'by a DateRangeIndex' % value) return result
def test_Today( self ): friendly = self._makeOne('foo', 'foofield') friendly.apply( self.today ) self.assertEqual( friendly.daterange, 'ahead' ) now = DateTime() result = friendly.getCriteriaItems() self.assertEqual( len(result), 1 ) self.assertEqual( result[0][0], 'foofield' ) self.assertEqual( result[0][1]['query'], ( now.earliestTime(), now.latestTime() ) ) self.assertEqual( result[0][1]['range'], 'min:max' )
def getMonth(self, data): context = aq_inner(self.context) month = DateTime.month(data) self._ts = getToolByName(context, 'translation_service') monthName = PLMF(self._ts.month_msgid(month), default=self._ts.month_english(month)) return monthName
def is_import_allowed(self): # Checking if auto-import enabled in bika setup. Return False if not. interval = self.portal.bika_setup.getAutoImportInterval() if interval < 10: return False caches = self.portal.listFolderContents(contentFilter={ "portal_type": 'BikaCache'}) cache = None for c in caches: if c and c.getKey() == 'LastAutoImport': cache = c now = DateTime.strftime(DateTime(), '%Y-%m-%d %H:%M:%S') if not cache: _id = self.portal.invokeFactory("BikaCache", id=tmpID(), Key='LastAutoImport', Value=now) item = self.portal[_id] item.markCreationFlag() return True else: last_import = cache.getValue() diff = datetime.now() - datetime.strptime(last_import, '%Y-%m-%d %H:%M:%S') if diff.seconds < interval * 60: return False cache.edit(Value=now) return True
def getHeaders( self, expr_context ): """ Does this request match our predicate? If so, return a sequence of caching headers as ( key, value ) tuples. Otherwise, return an empty sequence. """ headers = [] if self._predicate( expr_context ): mtime = self._mtime_func( expr_context ) if type( mtime ) is type( '' ): mtime = DateTime( mtime ) if mtime is not None: mtime_flt = mtime.timeTime() mtime_str = rfc1123_date(mtime_flt) headers.append( ( 'Last-modified', mtime_str ) ) control = [] if self._max_age_secs is not None: now = expr_context.vars[ 'time' ] exp_time_str = rfc1123_date(now.timeTime() + self._max_age_secs) headers.append( ( 'Expires', exp_time_str ) ) control.append( 'max-age=%d' % self._max_age_secs ) if self._no_cache: control.append( 'no-cache' ) if self._no_store: control.append( 'no-store' ) if self._must_revalidate: control.append( 'must-revalidate' ) if control: headers.append( ( 'Cache-control', ', '.join( control ) ) ) if self.getVary(): headers.append( ( 'Vary', self._vary ) ) if self.getETagFunc(): headers.append( ( 'ETag', self._etag_func( expr_context ) ) ) return headers
def format_log_data(self, instrument, interface, result, filename): log = DateTime.strftime(DateTime(), '%Y-%m-%d %H:%M:%S') log += ' - ' + instrument log += ' - ' + interface log += ' - ' + filename r = ''.join(result) log += ' - ' + r return log
def render(self): context = aq_inner(self.context) annotations = IAnnotations(context) event_title = context.Title() event_start = context.startDate event_day = DateTime.day(event_start) event_month = DateTime.month(event_start) event_year = DateTime.year(event_start) event_hour = DateTime.Time(event_start) event_link = context.absolute_url() mailhost = getToolByName(context, 'MailHost') urltool = getToolByName(context, 'portal_url') portal = urltool.getPortalObject() email_charset = portal.getProperty('email_charset') to_address = '*****@*****.**' from_name = portal.getProperty('email_from_name') from_address = portal.getProperty('email_from') titulo_web = portal.getProperty('title') mtool = self.context.portal_membership userid = mtool.getAuthenticatedMember().id source = "%s <%s>" % (from_name, from_address) subject = "[Nou esdeveniment] %s" % (titulo_web) message = MESSAGE_TEMPLATE % dict(titolGW=titulo_web, titleEvent=event_title, dayEvent=event_day, monthEvent=event_month, yearEvent=event_year, hourEvent=event_hour, linkEvent=event_link, from_address=from_address, from_name=from_name, user_name=userid) mailhost.secureSend(message, to_address, source, subject=subject, subtype='plain', charset=email_charset, debug=False, ) if 'eventsent' not in annotations: annotations['eventsent'] = True # confirm = _(u"Mail sent.") confirm = _(u"Gràcies per la vostra col·laboració. Les dades de l\'activitat s\'han enviat correctament i seran publicades com més aviat millor.") IStatusMessage(self.request).addStatusMessage(confirm, type='info') self.request.response.redirect(self.context.absolute_url())
def getDaysClass(self, day, month, year, event=None): """ Determine the CSS class to use for the given day """ current = DateTime() if (current.year() == year and current.month() == month and current.day() == int(day)): if event: return 'todayevent' else: return 'todaynoevent' if event: return 'event' else: return ''
def test_Today( self ): from Products.CMFTopic.DateCriteria import FriendlyDateCriterion friendly = FriendlyDateCriterion( 'foo', 'foofield' ) friendly.apply( self.today ) self.assertEqual( friendly.daterange, 'ahead' ) now = DateTime() result = friendly.getCriteriaItems() self.assertEqual( len( result ), 2 ) self.assertEqual( result[0][0], 'foofield' ) self.assertEqual( result[0][1], ( now.earliestTime() , now.latestTime() ) ) self.assertEqual( result[1][0], 'foofield_usage' ) self.assertEqual( result[1][1], 'range:min:max' )
def strptime(context, value): """given a string, this function tries to return a DateTime.DateTime object with the date formats from i18n translations """ val = "" for fmt in ['date_format_long', 'date_format_short']: fmtstr = context.translate(fmt, domain='bika', mapping={}) fmtstr = fmtstr.replace(r"${", '%').replace('}', '') try: val = _strptime(value, fmtstr) except ValueError: continue try: val = DateTime(*list(val)[:-6]) except DateTimeError: val = "" if val.timezoneNaive(): # Use local timezone for tz naive strings # see http://dev.plone.org/plone/ticket/10141 zone = val.localZone(safelocaltime(val.timeTime())) parts = val.parts()[:-1] + (zone,) val = DateTime(*parts) break else: try: # The following will handle an rfc822 string. value = value.split(" +", 1)[0] val = DateTime(value) except: logger.warning("DateTimeField failed to format date " "string '%s' with '%s'" % (value, fmtstr)) return val
def get_birthdaysToday(self, type_filter): results = [] if type_filter == 1: date_start = date.today().strftime('%Y-%m-%d') date_end = date.today().strftime('%Y-%m-%d') results = ModelsDadosFuncdetails().get_FuncBirthdays(date_start,date_end) elif type_filter == 7: now = DateTime() dow = now.dow() date_start = (now - dow).strftime('%Y-%m-%d') date_end = (now - dow + 6).strftime('%Y-%m-%d') results = ModelsDadosFuncdetails().get_FuncBirthdays(date_start,date_end) elif type_filter == 30: now = DateTime() dia = calendar.monthrange(now.year(),now.month())[1] date_start = now.strftime('%Y-%m-1') date_end = now.strftime('%Y-%m-'+str(dia)) results = ModelsDadosFuncdetails().get_FuncBirthdays(date_start,date_end) elif type_filter == 'prox': results = ModelsDadosFuncdetails().get_FuncBirthdays('','','proximo') if results: return results #results[:int(quant)] else: return []
def _convertDateTime(self, value): if value is None: return value if isinstance(value, (str, datetime)): dt_obj = DateTime(value) value = dt_obj.millis() / 1000 / 60 # flatten to minutes elif isinstance(value, DateTime): value = value.millis() / 1000 / 60 # flatten to minutes if value > MAX32 or value < -MAX32: # t_val must be integer fitting in the 32bit range raise OverflowError("%s is not within the range of dates allowed" "by a DateRangeIndex" % value) value = int(value) # handle values outside our specified range if value > self.ceiling_value: return None elif value < self.floor_value: return None return value
def convert_one(self, value): if isinstance(value, DateTime): t_tup = value.toZone('UTC').parts() elif isinstance(value, (float, int, long)): t_tup = time.gmtime(value) elif isinstance(value, basestring): t_obj = DateTime(value).toZone('UTC') t_tup = t_obj.parts() elif isinstance(value, date): t_tup = value.timetuple() elif isinstance(value, datetime): t_tup = value.utctimetuple() else: # can't interpret raise TypeError("Not a date value: %s" % repr(value)) converted = '%04d-%02d-%02dT%02d:%02d:%06.3fZ' % t_tup[:6] return converted
def __set__(self, inst, value): field = self._field.bind(inst) field.validate(value) if field.readonly: raise ValueError(self._field.__name__, "field is readonly") if isinstance(value, unicode) and inst.encoding: value = value.encode(inst.encoding) elif isinstance(value, datetime): value = DateTime(value.isoformat()) elif isinstance(value, (set, tuple, list)): if inst.encoding: value = [isinstance(v, unicode) and v.encode(inst.encoding) or v for v in value] if not self._field._type == list: value = tuple(value) if self._set_name: getattr(inst.context, self._set_name)(value) elif inst.context.hasProperty(self._get_name): inst.context._updateProperty(self._get_name, value) else: setattr(inst.context, self._get_name, value)
def set(self, instance, value, **kwargs): """ Check if value is an actual date/time value. If not, attempt to convert it to one; otherwise, set to None. Assign all properties passed as kwargs to object. """ val = value if not value: val = None elif not isinstance(value, DateTime): for fmt in ['date_format_long', 'date_format_short']: fmtstr = instance.translate(fmt, domain='bika', mapping={}) fmtstr = fmtstr.replace(r"${", '%').replace('}', '') try: val = strptime(value, fmtstr) except ValueError: continue try: val = DateTime(*list(val)[:-6]) except DateTimeError: val = None if val.timezoneNaive(): # Use local timezone for tz naive strings # see http://dev.plone.org/plone/ticket/10141 zone = val.localZone(safelocaltime(val.timeTime())) parts = val.parts()[:-1] + (zone,) val = DateTime(*parts) break else: logger.warning("DateTimeField failed to format date " "string '%s' with '%s'" % (value, fmtstr)) super(DateTimeField, self).set(instance, val, **kwargs)
def getCriteriaItems( self ): """ Return a sequence of items to be used to build the catalog query. """ if self.value is not None: field = self.Field() value = self.value # Negate the value for 'old' days if self.daterange == 'old': value = -value date = DateTime() + value operation = self.operation if operation == 'within_day': range = ( date.earliestTime(), date.latestTime() ) return ( ( field, {'query': range, 'range': 'min:max'} ), ) else: return ( ( field, {'query': date, 'range': operation} ), ) else: return ()
def test_unicode_zuite_result(self): from DateTime.DateTime import DateTime from Products.Zelenium.zuite import ZuiteResults from OFS.Image import File zuite = self._makeOne().__of__( self.root ) # simulating zuite.postResults to create ZuiteResults object completed = DateTime('2010-05-30') result_id = 'result_%s' % completed.strftime( '%Y%m%d_%H%M%S' ) zresults = ZuiteResults(result_id).__of__(zuite) zresults.manage_changeProperties(completed=completed, passed=False, time_secs=0, tests_passed=0, tests_failed=0, commands_passed=0, commands_failed=0, commands_with_errors=0, user_agent='unknown', remote_addr='unknown', http_host='unknown', server_software='unknown', product_info=zuite._listProductInfo()) zresults._setObject( 'suite.html' , File( 'suite.html' , 'Test Suite' , CONTENT_suite_html , 'text/html' ) ) zresults._setObject( 'testTable.1' , File( 'testTable.1' , 'Test case: testTable.1' , CONTENT_testTable_1 , 'text/html' ) ) self.failUnless( u'azértï' in zresults.index_html() )
def getCreationTime(self): return DateTime(self._created)
class DefaultDublinCoreImpl(PropertyManager): """ Mix-in class which provides Dublin Core methods """ __implements__ = DublinCore, CatalogableDublinCore, MutableDublinCore security = ClassSecurityInfo() def __init__(self, title='', subject=(), description='', contributors=(), effective_date=None, expiration_date=None, format='text/html', language='', rights=''): self.creation_date = DateTime() self._editMetadata(title, subject, description, contributors, effective_date, expiration_date, format, language, rights) # # DublinCore interface query methods # security.declarePublic('Title') def Title(self): "Dublin Core element - resource name" return self.title security.declarePublic('Creator') def Creator(self): # XXX: fixme using 'portal_membership' -- should iterate over # *all* owners "Dublin Core element - resource creator" owner = self.getOwner() if hasattr(owner, 'getUserName'): return owner.getUserName() return 'No owner' security.declarePublic('Subject') def Subject(self): "Dublin Core element - resource keywords" return self.subject security.declarePublic('Publisher') def Publisher(self): "Dublin Core element - resource publisher" # XXX: fixme using 'portal_metadata' return 'No publisher' security.declarePublic('Description') def Description(self): "Dublin Core element - resource summary" return self.description security.declarePublic('Contributors') def Contributors(self): "Dublin Core element - additional contributors to resource" # XXX: fixme return self.contributors security.declarePublic('Date') def Date(self): "Dublin Core element - default date" # Return effective_date if set, modification date otherwise date = getattr(self, 'effective_date', None) if date is None: date = self.bobobase_modification_time() return date.ISO() security.declarePublic('CreationDate') def CreationDate(self): """ Dublin Core element - date resource created. """ return self.creation_date.ISO() security.declarePublic('EffectiveDate') def EffectiveDate(self): """ Dublin Core element - date resource becomes effective. """ return self.effective_date and self.effective_date.ISO() or 'None' security.declarePublic('ExpirationDate') def ExpirationDate(self): """ Dublin Core element - date resource expires. """ return self.expiration_date and self.expiration_date.ISO() or 'None' security.declarePublic('ModificationDate') def ModificationDate(self): """ Dublin Core element - date resource last modified. """ return self.bobobase_modification_time().ISO() security.declarePublic('Type') def Type(self): "Dublin Core element - Object type" if hasattr(aq_base(self), 'getTypeInfo'): ti = self.getTypeInfo() if ti is not None: return ti.Type() return self.meta_type security.declarePublic('Format') def Format(self): """ Dublin Core element - resource format """ return self.format security.declarePublic('Identifier') def Identifier(self): "Dublin Core element - Object ID" # XXX: fixme using 'portal_metadata' (we need to prepend the # right prefix to self.getPhysicalPath(). return self.absolute_url() security.declarePublic('Language') def Language(self): """ Dublin Core element - resource language """ return self.language security.declarePublic('Rights') def Rights(self): """ Dublin Core element - resource copyright """ return self.rights # # DublinCore utility methods # def content_type(self): """ WebDAV needs this to do the Right Thing (TM). """ return self.Format() security.declarePublic('isEffective') def isEffective(self, date): """ Is the date within the resource's effective range? """ pastEffective = (self.effective_date is None or self.effective_date <= date) beforeExpiration = (self.expiration_date is None or self.expiration_date >= date) return pastEffective and beforeExpiration # # CatalogableDublinCore methods # security.declarePublic('created') def created(self): """ Dublin Core element - date resource created, returned as DateTime. """ return self.creation_date __FLOOR_DATE = DateTime(1000, 0) # alwasy effective security.declarePublic('effective') def effective(self): """ Dublin Core element - date resource becomes effective, returned as DateTime. """ marker = [] date = getattr(self, 'effective_date', marker) if date is marker: date = getattr(self, 'creation_date', None) return date is None and self.__FLOOR_DATE or date __CEILING_DATE = DateTime(9999, 0) # never expires security.declarePublic('expires') def expires(self): """ Dublin Core element - date resource expires, returned as DateTime. """ date = getattr(self, 'expiration_date', None) return date is None and self.__CEILING_DATE or date security.declarePublic('modified') def modified(self): """ Dublin Core element - date resource last modified, returned as DateTime. """ return self.bobobase_modification_time() security.declarePublic('getMetadataHeaders') def getMetadataHeaders(self): """ Return RFC-822-style headers. """ hdrlist = [] hdrlist.append(('Title', self.Title())) hdrlist.append(('Subject', string.join(self.Subject(), ', '))) hdrlist.append(('Publisher', self.Publisher())) hdrlist.append(('Description', self.Description())) hdrlist.append(('Contributors', string.join(self.Contributors(), '; '))) hdrlist.append(('Effective_date', self.EffectiveDate())) hdrlist.append(('Expiration_date', self.ExpirationDate())) hdrlist.append(('Type', self.Type())) hdrlist.append(('Format', self.Format())) hdrlist.append(('Language', self.Language())) hdrlist.append(('Rights', self.Rights())) return hdrlist # # MutableDublinCore methods # security.declarePrivate('_datify') def _datify(self, attrib): if attrib == 'None': attrib = None elif not isinstance(attrib, DateTime): if attrib is not None: attrib = DateTime(attrib) return attrib security.declareProtected(CMFCorePermissions.ModifyPortalContent, 'setTitle') def setTitle(self, title): "Dublin Core element - resource name" self.title = title security.declareProtected(CMFCorePermissions.ModifyPortalContent, 'setSubject') def setSubject(self, subject): "Dublin Core element - resource keywords" self.subject = tuplize('subject', subject) security.declareProtected(CMFCorePermissions.ModifyPortalContent, 'setDescription') def setDescription(self, description): "Dublin Core element - resource summary" self.description = description security.declareProtected(CMFCorePermissions.ModifyPortalContent, 'setContributors') def setContributors(self, contributors): "Dublin Core element - additional contributors to resource" # XXX: fixme self.contributors = tuplize('contributors', contributors, semi_split) security.declareProtected(CMFCorePermissions.ModifyPortalContent, 'setEffectiveDate') def setEffectiveDate(self, effective_date): """ Dublin Core element - date resource becomes effective. """ self.effective_date = self._datify(effective_date) security.declareProtected(CMFCorePermissions.ModifyPortalContent, 'setExpirationDate') def setExpirationDate(self, expiration_date): """ Dublin Core element - date resource expires. """ self.expiration_date = self._datify(expiration_date) security.declareProtected(CMFCorePermissions.ModifyPortalContent, 'setFormat') def setFormat(self, format): """ Dublin Core element - resource format """ self.format = format security.declareProtected(CMFCorePermissions.ModifyPortalContent, 'setLanguage') def setLanguage(self, language): """ Dublin Core element - resource language """ self.language = language security.declareProtected(CMFCorePermissions.ModifyPortalContent, 'setRights') def setRights(self, rights): """ Dublin Core element - resource copyright """ self.rights = rights # # Management tab methods # security.declarePrivate('_editMetadata') def _editMetadata(self, title='', subject=(), description='', contributors=(), effective_date=None, expiration_date=None, format='text/html', language='en-US', rights=''): """ Update the editable metadata for this resource. """ self.setTitle(title) self.setSubject(subject) self.setDescription(description) self.setContributors(contributors) self.setEffectiveDate(effective_date) self.setExpirationDate(expiration_date) self.setFormat(format) self.setLanguage(language) self.setRights(rights) self.reindexObject() security.declareProtected(CMFCorePermissions.ModifyPortalContent, 'manage_metadata') manage_metadata = DTMLFile('zmi_metadata', _dtmldir) security.declareProtected(CMFCorePermissions.ModifyPortalContent, 'manage_editMetadata') def manage_editMetadata(self, title, subject, description, contributors, effective_date, expiration_date, format, language, rights, REQUEST): """ Update metadata from the ZMI. """ self._editMetadata(title, subject, description, contributors, effective_date, expiration_date, format, language, rights) REQUEST['RESPONSE'].redirect(self.absolute_url() + '/manage_metadata' + '?manage_tabs_message=Metadata+updated.') security.declareProtected(CMFCorePermissions.ModifyPortalContent, 'editMetadata') editMetadata = WorkflowAction(_editMetadata)
import Zope from App.Common import rfc1123_date import unittest from DateTime.DateTime import DateTime ACCLARK = DateTime('2001/01/01') class DummyContent: __allow_access_to_unprotected_subobjects__ = 1 def __init__(self, modified): self.modified = modified def Type(self): return 'Dummy' def modified(self): return self.modified class CachingPolicyTests(unittest.TestCase): def setUp(self): self._epoch = DateTime('1970/01/01') def _makePolicy(self, policy_id, **kw): from Products.CMFCore.CachingPolicyManager import CachingPolicy return CachingPolicy(policy_id, **kw)
def getActivityChartData(self, segment_height, REQUEST=None): """Returns information for generating an activity chart. """ am = self._getActivityMonitor() if am is None: return None if REQUEST is not None: start = float(REQUEST.get('chart_start', 0)) end = float(REQUEST.get('chart_end', 0)) divisions = int(REQUEST.get('chart_divisions', 10)) analysis = am.getActivityAnalysis(start, end, divisions) else: analysis = am.getActivityAnalysis() total_load_count = 0 total_store_count = 0 total_connections = 0 limit = 0 divs = [] for div in analysis: total_store_count = total_store_count + div['stores'] total_load_count = total_load_count + div['loads'] total_connections = total_connections + div['connections'] sum = div['stores'] + div['loads'] if sum > limit: limit = sum if analysis: segment_time = analysis[0]['end'] - analysis[0]['start'] else: segment_time = 0 for div in analysis: stores = div['stores'] if stores > 0: store_len = max(int(segment_height * stores / limit), 1) else: store_len = 0 loads = div['loads'] if loads > 0: load_len = max(int(segment_height * loads / limit), 1) else: load_len = 0 t = div['end'] - analysis[-1]['end'] # Show negative numbers. if segment_time >= 3600: # Show hours. time_offset = '%dh' % (t / 3600) elif segment_time >= 60: # Show minutes. time_offset = '%dm' % (t / 60) elif segment_time >= 1: # Show seconds. time_offset = '%ds' % t else: # Show fractions. time_offset = '%.2fs' % t divs.append({ 'store_len': store_len, 'load_len': load_len, 'trans_len': max(segment_height - store_len - load_len, 0), 'store_count': div['stores'], 'load_count': div['loads'], 'connections': div['connections'], 'start': div['start'], 'end': div['end'], 'time_offset': time_offset, }) if analysis: start_time = DateTime(divs[0]['start']).aCommonZ() end_time = DateTime(divs[-1]['end']).aCommonZ() else: start_time = '' end_time = '' res = {'start_time': start_time, 'end_time': end_time, 'divs': divs, 'total_store_count': total_store_count, 'total_load_count': total_load_count, 'total_connections': total_connections, } return res
def ZopeFindAndApply(self, obj, obj_ids=None, obj_metatypes=None, obj_searchterm=None, obj_expr=None, obj_mtime=None, obj_mspec=None, obj_permission=None, obj_roles=None, search_sub=0, REQUEST=None, result=None, pre='', apply_func=None, apply_path=''): """Zope Find interface and apply This is a *great* hack. Zope find just doesn't do what we need here; the ability to apply a method to all the objects *as they're found* and the need to pass the object's path into that method. """ if result is None: result = [] if obj_metatypes and 'all' in obj_metatypes: obj_metatypes = None if obj_mtime and isinstance(obj_mtime, str): obj_mtime = DateTime(obj_mtime).timeTime() if obj_permission: obj_permission = getPermissionIdentifier(obj_permission) if obj_roles and isinstance(obj_roles, str): obj_roles = [obj_roles] if obj_expr: # Setup expr machinations md = td() obj_expr = (Eval(obj_expr), md, md._push, md._pop) base = aq_base(obj) if not hasattr(base, 'objectItems'): return result try: items = obj.objectItems() except Exception: return result try: add_result = result.append except Exception: raise AttributeError(repr(result)) for id, ob in items: if pre: p = "%s/%s" % (pre, id) else: p = id dflag = 0 if hasattr(ob, '_p_changed') and (ob._p_changed is None): dflag = 1 bs = aq_base(ob) if ((not obj_ids or absattr(bs.id) in obj_ids) and (not obj_metatypes or (hasattr(bs, 'meta_type') and bs.meta_type in obj_metatypes)) and (not obj_searchterm or (hasattr(ob, 'PrincipiaSearchSource') and ob.PrincipiaSearchSource().find(obj_searchterm) >= 0) ) # noqa: E501 and (not obj_expr or expr_match(ob, obj_expr)) and (not obj_mtime or mtime_match(ob, obj_mtime, obj_mspec)) and ((not obj_permission or not obj_roles) or role_match(ob, obj_permission, obj_roles))): if apply_func: apply_func(ob, (apply_path + '/' + p)) else: add_result((p, ob)) dflag = 0 if search_sub and hasattr(bs, 'objectItems'): self.ZopeFindAndApply(ob, obj_ids, obj_metatypes, obj_searchterm, obj_expr, obj_mtime, obj_mspec, obj_permission, obj_roles, search_sub, REQUEST, result, p, apply_func, apply_path) if dflag: ob._p_deactivate() return result
def english_month_names(): names = {} for x in range(1, 13): faux = DateTime(2004, x, 1) names[x] = faux.Month() return names
def notifyModified(self): # Take appropriate action after the resource has been modified. # For now, change the modification_date. self.setModificationDate(DateTime()) if shasattr(self, 'http__refreshEtag'): self.http__refreshEtag()
<element value="Foo"/> <element value="Tokens"/></property> <property name="foo_selection" type="selection" select_variable="foobarbaz">Foo</property> <property name="foo_mselection"> <element value="Foo"/> <element value="Baz"/></property> """ _NORMAL_PROPERTY_INFO = ( { 'id': 'foo_boolean', 'value': True, 'elements': (), 'type': 'boolean', 'select_variable': None }, { 'id': 'foo_date', 'value': DateTime('2000/01/01'), 'elements': (), 'type': 'date', 'select_variable': None }, { 'id': 'foo_float', 'value': 1.1, 'elements': (), 'type': 'float', 'select_variable': None }, { 'id': 'foo_int', 'value': 1, 'elements': (), 'type': 'int', 'select_variable': None }, { 'id': 'foo_lines', 'value': '',
def setUp(self): self._epoch = DateTime()
class CachingPolicyTests(unittest.TestCase): def setUp(self): self._epoch = DateTime('1970/01/01') def _makePolicy(self, policy_id, **kw): from Products.CMFCore.CachingPolicyManager import CachingPolicy return CachingPolicy(policy_id, **kw) def _makeContext(self, **kw): from Products.CMFCore.CachingPolicyManager import createCPContext from Products.CMFCore.CachingPolicyManager import createCPContext return createCPContext(DummyContent(self._epoch), 'foo_view', kw, self._epoch) def test_empty(self): policy = self._makePolicy('empty') context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 1) self.assertEqual(headers[0][0], 'Last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) def test_noPassPredicate(self): policy = self._makePolicy('noPassPredicate', predicate='nothing') context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 0) def test_typePredicate(self): policy = self._makePolicy('typePredicate', predicate='python:content.Type() == "Dummy"') context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 1) self.assertEqual(headers[0][0], 'Last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) def test_typePredicateMiss(self): policy = self._makePolicy( 'typePredicate', predicate='python:content.Type() == "Foolish"') context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 0) def test_viewPredicate(self): policy = self._makePolicy('viewPredicate', predicate='python:view == "foo_view"') context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 1) self.assertEqual(headers[0][0], 'Last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) def test_viewPredicateMiss(self): policy = self._makePolicy('viewPredicateMiss', predicate='python:view == "bar_view"') context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 0) def test_kwPredicate(self): policy = self._makePolicy('kwPredicate', predicate='python:"foo" in keywords.keys()') context = self._makeContext(foo=1) headers = policy.getHeaders(context) self.assertEqual(len(headers), 1) self.assertEqual(headers[0][0], 'Last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) def test_kwPredicateMiss(self): policy = self._makePolicy('kwPredicateMiss', predicate='python:"foo" in keywords.keys()') context = self._makeContext(bar=1) headers = policy.getHeaders(context) self.assertEqual(len(headers), 0) context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 0) def test_mtimeFunc(self): policy = self._makePolicy('mtimeFunc', mtime_func='string:2001/01/01') context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 1) self.assertEqual(headers[0][0], 'Last-modified') self.assertEqual(headers[0][1], rfc1123_date(ACCLARK.timeTime())) def test_mtimeFuncNone(self): policy = self._makePolicy('mtimeFuncNone', mtime_func='nothing') context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 0) def test_maxAge(self): policy = self._makePolicy('aged', max_age_secs=86400) context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 3) self.assertEqual(headers[0][0].lower(), 'last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) self.assertEqual(headers[1][0].lower(), 'expires') self.assertEqual(headers[1][1], rfc1123_date((self._epoch + 1).timeTime())) self.assertEqual(headers[2][0].lower(), 'cache-control') self.assertEqual(headers[2][1], 'max-age=86400') def test_noCache(self): policy = self._makePolicy('noCache', no_cache=1) context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 2) self.assertEqual(headers[0][0].lower(), 'last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) self.assertEqual(headers[1][0].lower(), 'cache-control') self.assertEqual(headers[1][1], 'no-cache') def test_noStore(self): policy = self._makePolicy('noStore', no_store=1) context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 2) self.assertEqual(headers[0][0].lower(), 'last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) self.assertEqual(headers[1][0].lower(), 'cache-control') self.assertEqual(headers[1][1], 'no-store') def test_mustRevalidate(self): policy = self._makePolicy('mustRevalidate', must_revalidate=1) context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 2) self.assertEqual(headers[0][0].lower(), 'last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) self.assertEqual(headers[1][0].lower(), 'cache-control') self.assertEqual(headers[1][1], 'must-revalidate') def test_combined(self): policy = self._makePolicy('noStore', no_cache=1, no_store=1) context = self._makeContext() headers = policy.getHeaders(context) self.assertEqual(len(headers), 2) self.assertEqual(headers[0][0].lower(), 'last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) self.assertEqual(headers[1][0].lower(), 'cache-control') self.assertEqual(headers[1][1], 'no-cache, no-store')
def setUp(self): self._epoch = DateTime('1970/01/01')
def modified(self): return DateTime(_FILE_MOD_TIME)
def _range_request_handler(self, REQUEST, RESPONSE): # HTTP Range header handling: return True if we've served a range # chunk out of our data. range = REQUEST.get_header('Range', None) request_range = REQUEST.get_header('Request-Range', None) if request_range is not None: # Netscape 2 through 4 and MSIE 3 implement a draft version # Later on, we need to serve a different mime-type as well. range = request_range if_range = REQUEST.get_header('If-Range', None) if range is not None: ranges = HTTPRangeSupport.parseRange(range) if if_range is not None: # Only send ranges if the data isn't modified, otherwise send # the whole object. Support both ETags and Last-Modified dates! if len(if_range) > 1 and if_range[:2] == 'ts': # ETag: if if_range != self.http__etag(): # Modified, so send a normal response. We delete # the ranges, which causes us to skip to the 200 # response. ranges = None else: # Date date = if_range.split(';')[0] try: mod_since = int(DateTime(date).timeTime()) except Exception: mod_since = None if mod_since is not None: if self._p_mtime: last_mod = int(self._p_mtime) else: last_mod = 0 if last_mod > mod_since: # Modified, so send a normal response. We delete # the ranges, which causes us to skip to the 200 # response. ranges = None if ranges: # Search for satisfiable ranges. satisfiable = 0 for start, end in ranges: if start < self.size: satisfiable = 1 break if not satisfiable: RESPONSE.setHeader('Content-Range', 'bytes */%d' % self.size) RESPONSE.setHeader('Accept-Ranges', 'bytes') RESPONSE.setHeader('Last-Modified', rfc1123_date(self._p_mtime)) RESPONSE.setHeader('Content-Type', self.content_type) RESPONSE.setHeader('Content-Length', self.size) RESPONSE.setStatus(416) return True ranges = HTTPRangeSupport.expandRanges(ranges, self.size) if len(ranges) == 1: # Easy case, set extra header and return partial set. start, end = ranges[0] size = end - start RESPONSE.setHeader('Last-Modified', rfc1123_date(self._p_mtime)) RESPONSE.setHeader('Content-Type', self.content_type) RESPONSE.setHeader('Content-Length', size) RESPONSE.setHeader('Accept-Ranges', 'bytes') RESPONSE.setHeader( 'Content-Range', 'bytes %d-%d/%d' % (start, end - 1, self.size)) RESPONSE.setStatus(206) # Partial content data = self.data if isinstance(data, binary_type): RESPONSE.write(data[start:end]) return True # Linked Pdata objects. Urgh. pos = 0 while data is not None: l = len(data.data) pos = pos + l if pos > start: # We are within the range lstart = l - (pos - start) if lstart < 0: lstart = 0 # find the endpoint if end <= pos: lend = l - (pos - end) # Send and end transmission RESPONSE.write(data[lstart:lend]) break # Not yet at the end, transmit what we have. RESPONSE.write(data[lstart:]) data = data.next return True else: boundary = _make_boundary() # Calculate the content length size = ( 8 + len(boundary) + # End marker length len(ranges) * ( # Constant lenght per set 49 + len(boundary) + len(self.content_type) + len('%d' % self.size))) for start, end in ranges: # Variable length per set size = (size + len('%d%d' % (start, end - 1)) + end - start) # Some clients implement an earlier draft of the spec, they # will only accept x-byteranges. draftprefix = (request_range is not None) and 'x-' or '' RESPONSE.setHeader('Content-Length', size) RESPONSE.setHeader('Accept-Ranges', 'bytes') RESPONSE.setHeader('Last-Modified', rfc1123_date(self._p_mtime)) RESPONSE.setHeader( 'Content-Type', 'multipart/%sbyteranges; boundary=%s' % (draftprefix, boundary)) RESPONSE.setStatus(206) # Partial content data = self.data # The Pdata map allows us to jump into the Pdata chain # arbitrarily during out-of-order range searching. pdata_map = {} pdata_map[0] = data for start, end in ranges: RESPONSE.write(b'\r\n--' + boundary.encode('ascii') + b'\r\n') RESPONSE.write(b'Content-Type: ' + self.content_type.encode('ascii') + b'\r\n') RESPONSE.write(b'Content-Range: bytes ' + str(start).encode('ascii') + b'-' + str(end - 1).encode('ascii') + b'/' + str(self.size).encode('ascii') + b'\r\n\r\n') if isinstance(data, binary_type): RESPONSE.write(data[start:end]) else: # Yippee. Linked Pdata objects. The following # calculations allow us to fast-forward through the # Pdata chain without a lot of dereferencing if we # did the work already. first_size = len(pdata_map[0].data) if start < first_size: closest_pos = 0 else: closest_pos = (( (start - first_size) >> 16 << 16) + first_size) pos = min(closest_pos, max(pdata_map.keys())) data = pdata_map[pos] while data is not None: l = len(data.data) pos = pos + l if pos > start: # We are within the range lstart = l - (pos - start) if lstart < 0: lstart = 0 # find the endpoint if end <= pos: lend = l - (pos - end) # Send and loop to next range RESPONSE.write(data[lstart:lend]) break # Not yet at the end, # transmit what we have. RESPONSE.write(data[lstart:]) data = data.next # Store a reference to a Pdata chain link # so we don't have to deref during # this request again. pdata_map[pos] = data # Do not keep the link references around. del pdata_map RESPONSE.write(b'\r\n--' + boundary.encode('ascii') + b'--\r\n') return True
def bobobase_modification_time(self): """Get the modification time the file did have last time it was read. """ return DateTime(self._file_mod_time)
class MemberDataTool(UniqueObject, SimpleItem, PropertyManager): """ This tool wraps user objects, making them act as Member objects. """ id = 'portal_memberdata' meta_type = 'CMF Member Data Tool' zmi_icon = 'fas fa-database' _properties = ( { 'id': 'email', 'type': 'string', 'mode': 'w' }, { 'id': 'portal_skin', 'type': 'string', 'mode': 'w' }, { 'id': 'listed', 'type': 'boolean', 'mode': 'w' }, { 'id': 'login_time', 'type': 'date', 'mode': 'w' }, { 'id': 'last_login_time', 'type': 'date', 'mode': 'w' }, { 'id': 'fullname', 'type': 'string', 'mode': 'w' }, ) email = '' fullname = '' last_login_time = DateTime('1970/01/01 00:00:00 UTC') # epoch listed = False login_time = DateTime('1970/01/01 00:00:00 UTC') # epoch portal_skin = '' security = ClassSecurityInfo() manage_options = (({ 'label': 'Overview', 'action': 'manage_overview' }, { 'label': 'Contents', 'action': 'manage_showContents' }) + PropertyManager.manage_options + SimpleItem.manage_options) # # ZMI methods # security.declareProtected(ManagePortal, 'manage_overview') manage_overview = DTMLFile('explainMemberDataTool', _dtmldir) security.declareProtected(ViewManagementScreens, 'manage_showContents') manage_showContents = DTMLFile('memberdataContents', _dtmldir) def __init__(self): self._members = OOBTree() # # 'portal_memberdata' interface methods # @security.private def getMemberDataContents(self): ''' Return the number of members stored in the _members BTree and some other useful info ''' mtool = getUtility(IMembershipTool) members = self._members user_list = mtool.listMemberIds() member_list = members.keys() member_count = len(members) orphan_count = 0 for member in member_list: if member not in user_list: orphan_count = orphan_count + 1 return [{'member_count': member_count, 'orphan_count': orphan_count}] @security.private def searchMemberData(self, search_param, search_term, attributes=()): """ Search members. """ res = [] if not search_param: return res mtool = getUtility(IMembershipTool) if len(attributes) == 0: attributes = ('id', 'email') if search_param == 'username': search_param = 'id' for user_id in self._members.keys(): u = mtool.getMemberById(user_id) if u is not None: memberProperty = u.getProperty searched = memberProperty(search_param, None) if searched is not None and searched.find(search_term) != -1: user_data = {} for desired in attributes: if desired == 'id': user_data['username'] = memberProperty(desired, '') else: user_data[desired] = memberProperty(desired, '') res.append(user_data) return res @security.private def searchMemberDataContents(self, search_param, search_term): """ Search members. This method will be deprecated soon. """ res = [] if search_param == 'username': search_param = 'id' mtool = getUtility(IMembershipTool) for member_id in self._members.keys(): user_wrapper = mtool.getMemberById(member_id) if user_wrapper is not None: memberProperty = user_wrapper.getProperty searched = memberProperty(search_param, None) if searched is not None and searched.find(search_term) != -1: res.append({ 'username': memberProperty('id'), 'email': memberProperty('email', '') }) return res @security.private def pruneMemberDataContents(self): """ Delete data contents of all members not listet in acl_users. """ mtool = getUtility(IMembershipTool) members = self._members user_list = mtool.listMemberIds() for member_id in list(members.keys()): if member_id not in user_list: del members[member_id] @security.private def wrapUser(self, u): ''' If possible, returns the Member object that corresponds to the given User object. ''' return getMultiAdapter((u, self), IMember) @security.private def registerMemberData(self, m, id): """ Add the given member data to the _members btree. """ self._members[id] = aq_base(m) @security.private def deleteMemberData(self, member_id): """ Delete member data of specified member. """ members = self._members if member_id in members: del members[member_id] return 1 else: return 0
class CachingPolicyManagerTests(unittest.TestCase): def setUp(self): self._epoch = DateTime() def _makeOne(self): from Products.CMFCore.CachingPolicyManager import CachingPolicyManager return CachingPolicyManager() def assertEqualDelta(self, lhs, rhs, delta): self.failUnless(abs(lhs - rhs) <= delta) def test_interface(self): from Products.CMFCore.CachingPolicyManager import CachingPolicyManager from Products.CMFCore.interfaces.CachingPolicyManager \ import CachingPolicyManager as ICachingPolicyManager try: from Interface import verify_class_implementation as verifyClass except ImportError: from Interface.Verify import verifyClass verifyClass(ICachingPolicyManager, CachingPolicyManager) def test_empty(self): mgr = self._makeOne() self.assertEqual(len(mgr.listPolicies()), 0) headers = mgr.getHTTPCachingHeaders(content=DummyContent(self._epoch), view_method='foo_view', keywords={}, time=self._epoch) self.assertEqual(len(headers), 0) self.assertRaises(KeyError, mgr._updatePolicy, 'xyzzy', None, None, None, None, None, None) self.assertRaises(KeyError, mgr._removePolicy, 'xyzzy') self.assertRaises(KeyError, mgr._reorderPolicy, 'xyzzy', -1) def test_addPolicy(self): mgr = self._makeOne() mgr._addPolicy('first', 'python:1', None, 0, 0, 0, 0) headers = mgr.getHTTPCachingHeaders(content=DummyContent(self._epoch), view_method='foo_view', keywords={}, time=self._epoch) self.assertEqual(len(headers), 3) self.assertEqual(headers[0][0].lower(), 'last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) self.assertEqual(headers[1][0].lower(), 'expires') self.assertEqual(headers[1][1], rfc1123_date(self._epoch.timeTime())) self.assertEqual(headers[2][0].lower(), 'cache-control') self.assertEqual(headers[2][1], 'max-age=0') def test_reorder(self): mgr = self._makeOne() policy_ids = ('foo', 'bar', 'baz', 'qux') for policy_id in policy_ids: mgr._addPolicy(policy_id, 'python:"%s" in keywords.keys()' % policy_id, None, 0, 0, 0, 0) ids = tuple(map(lambda x: x[0], mgr.listPolicies())) self.assertEqual(ids, policy_ids) mgr._reorderPolicy('bar', 3) ids = tuple(map(lambda x: x[0], mgr.listPolicies())) self.assertEqual(ids, ('foo', 'baz', 'qux', 'bar')) def _makeOneWithPolicies(self): mgr = self._makeOne() policy_tuples = (('foo', None), ('bar', 0), ('baz', 3600), ('qux', 86400)) for policy_id, max_age_secs in policy_tuples: mgr._addPolicy(policy_id, 'python:"%s" in keywords.keys()' % policy_id, None, max_age_secs, 0, 0, 0) return mgr def test_lookupNoMatch(self): mgr = self._makeOneWithPolicies() headers = mgr.getHTTPCachingHeaders(content=DummyContent(self._epoch), view_method='foo_view', keywords={}, time=self._epoch) self.assertEqual(len(headers), 0) def test_lookupMatchFoo(self): mgr = self._makeOneWithPolicies() headers = mgr.getHTTPCachingHeaders(content=DummyContent(self._epoch), view_method='foo_view', keywords={'foo': 1}, time=self._epoch) self.assertEqual(len(headers), 1) self.assertEqual(headers[0][0].lower(), 'last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) def test_lookupMatchBar(self): mgr = self._makeOneWithPolicies() headers = mgr.getHTTPCachingHeaders(content=DummyContent(self._epoch), view_method='foo_view', keywords={'bar': 1}, time=self._epoch) self.assertEqual(len(headers), 3) self.assertEqual(headers[0][0].lower(), 'last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) self.assertEqual(headers[1][0].lower(), 'expires') self.assertEqual(headers[1][1], rfc1123_date(self._epoch.timeTime())) self.assertEqual(headers[2][0].lower(), 'cache-control') self.assertEqual(headers[2][1], 'max-age=0') def test_lookupMatchBaz(self): mgr = self._makeOneWithPolicies() headers = mgr.getHTTPCachingHeaders(content=DummyContent(self._epoch), view_method='foo_view', keywords={'baz': 1}, time=self._epoch) self.assertEqual(len(headers), 3) self.assertEqual(headers[0][0].lower(), 'last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) self.assertEqual(headers[1][0].lower(), 'expires') exp_time = DateTime(headers[1][1]) target = self._epoch + (1.0 / 24.0) self.assertEqualDelta(exp_time, target, 0.01) self.assertEqual(headers[2][0].lower(), 'cache-control') self.assertEqual(headers[2][1], 'max-age=3600') def test_lookupMatchQux(self): mgr = self._makeOneWithPolicies() headers = mgr.getHTTPCachingHeaders(content=DummyContent(self._epoch), view_method='foo_view', keywords={'qux': 1}, time=self._epoch) self.assertEqual(len(headers), 3) self.assertEqual(headers[0][0].lower(), 'last-modified') self.assertEqual(headers[0][1], rfc1123_date(self._epoch.timeTime())) self.assertEqual(headers[1][0].lower(), 'expires') exp_time = DateTime(headers[1][1]) target = self._epoch + 1.0 self.assertEqualDelta(exp_time, target, 0.01) self.assertEqual(headers[2][0].lower(), 'cache-control') self.assertEqual(headers[2][1], 'max-age=86400')
from Acquisition import aq_base from AccessControl import ClassSecurityInfo from AccessControl import Unauthorized from DateTime.DateTime import DateTime from App.class_init import InitializeClass from App.special_dtml import DTMLFile from Products.CMFCore import permissions from Products.CMFCore.utils import getToolByName from Products.CMFPlone.log import log_deprecated from ComputedAttribute import ComputedAttribute _marker = [] # http://www.zope.org/Collectors/CMF/325 # http://www.zope.org/Collectors/CMF/476 _zone = DateTime().timezone() FLOOR_DATE = DateTime(1000, 1) # always effective CEILING_DATE = DateTime(2500, 0) # never expires # We import this conditionally, in order not to introduce a hard dependency try: from plone.i18n.locales.interfaces import IMetadataLanguageAvailability HAS_PLONE_I18N = True except ImportError: HAS_PLONE_I18N = False # MIXIN @implementer(IExtensibleMetadata) class ExtensibleMetadata(Persistence.Persistent):
def getHeaders(self, expr_context): """ Does this request match our predicate? If so, return a sequence of caching headers as ( key, value ) tuples. Otherwise, return an empty sequence. """ headers = [] if self.testPredicate(expr_context): if self.getLastModified(): mtime = self._mtime_func(expr_context) if type(mtime) is type(''): mtime = DateTime(mtime) if mtime is not None: mtime_str = rfc1123_date(mtime.timeTime()) headers.append(('Last-modified', mtime_str)) control = [] if self.getMaxAgeSecs() is not None: now = expr_context.vars['time'] exp_time_str = rfc1123_date(now.timeTime() + self._max_age_secs) headers.append(('Expires', exp_time_str)) control.append('max-age=%d' % self._max_age_secs) if self.getSMaxAgeSecs() is not None: control.append('s-maxage=%d' % self._s_max_age_secs) if self.getNoCache(): control.append('no-cache') # The following is for HTTP 1.0 clients headers.append(('Pragma', 'no-cache')) if self.getNoStore(): control.append('no-store') if self.getPublic(): control.append('public') if self.getPrivate(): control.append('private') if self.getMustRevalidate(): control.append('must-revalidate') if self.getProxyRevalidate(): control.append('proxy-revalidate') if self.getNoTransform(): control.append('no-transform') pre_check = self.getPreCheck() if pre_check is not None: control.append('pre-check=%d' % pre_check) post_check = self.getPostCheck() if post_check is not None: control.append('post-check=%d' % post_check) if control: headers.append(('Cache-control', ', '.join(control))) if self.getVary(): headers.append(('Vary', self._vary)) if self.getETagFunc(): headers.append(('ETag', self._etag_func(expr_context))) return headers
def result(self, date=None, use_ampm=False, starting_year=None, ending_year=None, future_years=None, minute_step=5): """Returns a dict with date information. """ ptool = getToolByName(self.context, 'portal_properties') site_props = ptool.site_properties # Get the date format from the locale context = aq_inner(self.context) portal_state = getMultiAdapter((context, self.request), name=u'plone_portal_state') dates = portal_state.locale().dates timepattern = dates.getFormatter('time').getPattern() if 'a' in timepattern: use_ampm = True month_names = dates.getFormatter('date').calendar.months # 'id' is what shows up. December for month 12. # 'value' is the value for the form. # 'selected' is whether or not it is selected. default = 0 years = [] days = [] months = [] hours = [] minutes = [] ampm = [] now = DateTime() if isinstance(date, basestring): date = date.strip() if not date: date = None else: # Please see datecomponents.txt for an explanation of # the next few lines. Also see #11423 dateParts = date.split(" ") dateParts[0] = dateParts[0].replace("-", "/") date = ' '.join(dateParts) if date is None: date = now default = 1 elif not isinstance(date, DateTime): try: date = DateTime(date) except (TypeError, DateTimeError): date = now default = 1 # Anything above PLONE_CEILING should be PLONE_CEILING if date.greaterThan(PLONE_CEILING): date = PLONE_CEILING # Represent the date in the local timezone try: local_zone = date.localZone(localtime(date.timeTime())) except ValueError: # Dates before 1970 use a negative timeTime() value, which on # on some platforms are not handled well and lead to a ValueError. # In those cases, calculate the local timezone (which is DST based) # from the same date in the *current year* instead. This is better # than failing altogether! timeZoneDate = DateTime(localtime().tm_year, *date.parts()[1:]) local_zone = date.localZone(localtime(timeZoneDate.timeTime())) date = date.toZone(local_zone) # Get portal year range if starting_year is None: min_year = site_props.getProperty('calendar_starting_year', 1999) else: min_year = int(starting_year) if ending_year is None: if future_years is None: max_year = site_props.getProperty( 'calendar_future_years_available', 5) + now.year() else: max_year = int(future_years) + now.year() else: max_year = int(ending_year) # keeps the existing date if it's out of range if not default: if min_year > date.year(): min_year = date.year() if max_year < date.year(): max_year = date.year() year = date.year() if default: years.append({'id': '--', 'value': '0000', 'selected': 1}) else: years.append({'id': '--', 'value': '0000', 'selected': None}) for x in range(min_year, max_year + 1): d = {'id': x, 'value': x, 'selected': None} if x == year and not default: d['selected'] = 1 years.append(d) month = date.month() if default: months.append({ 'id': '--', 'value': '00', 'selected': 1, 'title': '--' }) else: months.append({ 'id': '--', 'value': '00', 'selected': None, 'title': '--' }) for x in range(1, 13): d = { 'id': ENGLISH_MONTH_NAMES[x], 'value': '%02d' % x, 'selected': None } if x == month and not default: d['selected'] = 1 d['title'] = month_names[x][0] months.append(d) day = date.day() if default: days.append({'id': '--', 'value': '00', 'selected': 1}) else: days.append({'id': '--', 'value': '00', 'selected': None}) for x in range(1, 32): d = {'id': x, 'value': '%02d' % x, 'selected': None} if x == day and not default: d['selected'] = 1 days.append(d) if use_ampm: hours_range = [12] + range(1, 12) hour_default = '12' hour = int(date.h_12()) else: hours_range = range(0, 24) hour_default = '00' hour = int(date.h_24()) if default: hours.append({'id': '--', 'value': hour_default, 'selected': 1}) else: hours.append({'id': '--', 'value': hour_default, 'selected': None}) for x in hours_range: d = {'id': '%02d' % x, 'value': '%02d' % x, 'selected': None} if x == hour and not default: d['selected'] = 1 hours.append(d) if default: minutes.append({'id': '--', 'value': '00', 'selected': 1}) else: minutes.append({'id': '--', 'value': '00', 'selected': None}) minute = date.minute() if minute_step is None: minute_step = 5 if minute + minute_step >= 60: # edge case. see doctest for explanation minute = 60 - minute_step for x in range(0, 60, minute_step): d = {'id': '%02d' % x, 'value': '%02d' % x, 'selected': None} if (x == minute or minute < x < minute + minute_step) and not default: d['selected'] = 1 minutes.append(d) if use_ampm: p = date.strftime('%p') if default: ampm.append({'id': '--', 'value': 'AM', 'selected': 1}) else: ampm.append({'id': '--', 'value': 'AM', 'selected': None}) for x in ('AM', 'PM'): d = {'id': x, 'value': x, 'selected': None} if x == p and not default: d['selected'] = 1 ampm.append(d) return { 'years': years, 'months': months, 'days': days, 'hours': hours, 'minutes': minutes, 'ampm': ampm }
def setUp(self): self._now = DateTime() self._old_as_of = _replace_DC__as_of(lambda: self._now)
from time import localtime from zope.interface import implements from zope.component import getMultiAdapter from Acquisition import aq_inner from DateTime.DateTime import DateTime from DateTime.DateTime import DateTimeError from Products.CMFCore.utils import getToolByName from Products.Five.browser import BrowserView from interfaces import IDateComponents CEILING = DateTime(9999, 0) FLOOR = DateTime(1970, 0) PLONE_CEILING = DateTime(2500, 0) # 2499-12-31 def english_month_names(): names = {} for x in range(1, 13): faux = DateTime(2004, x, 1) names[x] = faux.Month() return names ENGLISH_MONTH_NAMES = english_month_names() class DateComponents(BrowserView): """A view that provides some helper methods useful in date widgets.
def _DateTime(*args, **kw): return DateTime(*args, **kw)
def viewDay(self): """ Return a DateTime for a passed-in date or today """ date = self.request.get('date', None) or DateTime().aCommon()[:12] return DateTime(date)
def _renderValueAsSearchText(self, value, operator): return '"%s"' % (DateTime(value).ISO(), )
class UserPropertySheetTests(unittest.TestCase, IPropertySheet_conformance): _LONG_TYPE = 'long' if six.PY2 else 'int' _SCHEMA = (('s', 'string'), ('i', 'int'), ('f', 'float'), ('n', _LONG_TYPE), ('d', 'date'), ('l', 'lines'), ('t', 'lines'), ('b', 'boolean'), ('img', 'image')) _STRING_VALUE = 'string' _INT_VALUE = 42 _FLOAT_VALUE = 9.8 _LONG_VALUE = sys.maxsize + 1 _DATE_VALUE = DateTime() _LIST_VALUE = ['a', 'b', 'c'] _TUPLE_VALUE = ('d', 'e', 'f') _BOOL_VALUE = True with open(img_path, 'rb') as img_file: _IMG_VALUE = Image('image', 'Test Image', img_file) def _getTargetClass(self): from ..UserPropertySheet import UserPropertySheet return UserPropertySheet def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_ctor_id_noschema_novalues(self): ups = self._makeOne('empty') self.assertEqual(ups.getId(), 'empty') self.assertFalse(ups.hasProperty('empty')) self.assertFalse(ups.hasProperty('foo')) self.assertFalse(ups.hasProperty('bar')) self.assertEqual(ups.getProperty('foo'), None) self.assertEqual(ups.getPropertyType('foo'), None) self.assertEqual(len(ups.propertyMap()), 0) self.assertEqual(len(ups.propertyIds()), 0) self.assertEqual(len(ups.propertyValues()), 0) self.assertEqual(len(ups.propertyItems()), 0) self.assertEqual(len(ups.propertyIds()), 0) def _checkStockSchema(self, ups, values_are_none=False): self.assertFalse(ups.hasProperty('x')) self.assertTrue(ups.hasProperty('s')) self.assertTrue(ups.hasProperty('i')) self.assertTrue(ups.hasProperty('f')) self.assertTrue(ups.hasProperty('n')) self.assertTrue(ups.hasProperty('d')) self.assertTrue(ups.hasProperty('l')) self.assertTrue(ups.hasProperty('t')) self.assertTrue(ups.hasProperty('b')) self.assertTrue(ups.hasProperty('img')) self.assertEqual(ups.getPropertyType('s'), 'string') self.assertEqual(ups.propertyInfo('s')['type'], 'string') if values_are_none: self.assertEqual(ups.getProperty('s'), None) else: self.assertEqual(ups.getProperty('s'), self._STRING_VALUE) self.assertEqual(ups.getPropertyType('i'), 'int') self.assertEqual(ups.propertyInfo('i')['type'], 'int') if values_are_none: self.assertEqual(ups.getProperty('i'), None) else: self.assertEqual(ups.getProperty('i'), self._INT_VALUE) self.assertEqual(ups.getPropertyType('f'), 'float') self.assertEqual(ups.propertyInfo('f')['type'], 'float') if values_are_none: self.assertEqual(ups.getProperty('f'), None) else: self.assertEqual(ups.getProperty('f'), self._FLOAT_VALUE) self.assertEqual(ups.getPropertyType('n'), self._LONG_TYPE) self.assertEqual(ups.propertyInfo('n')['type'], self._LONG_TYPE) if values_are_none: self.assertEqual(ups.getProperty('n'), None) else: self.assertEqual(ups.getProperty('n'), self._LONG_VALUE) self.assertEqual(ups.getPropertyType('d'), 'date') self.assertEqual(ups.propertyInfo('d')['type'], 'date') if values_are_none: self.assertEqual(ups.getProperty('d'), None) else: self.assertEqual(ups.getProperty('d'), self._DATE_VALUE) self.assertEqual(ups.getPropertyType('b'), 'boolean') self.assertEqual(ups.propertyInfo('b')['type'], 'boolean') if values_are_none: self.assertEqual(ups.getProperty('b'), None) else: self.assertEqual(ups.getProperty('b'), self._BOOL_VALUE) self.assertEqual(ups.getPropertyType('l'), 'lines') self.assertEqual(ups.propertyInfo('l')['type'], 'lines') if values_are_none: self.assertEqual(ups.getProperty('l'), None) else: got = ups.getProperty('l') self.assertEqual(type(got), type(())) self.assertEqual(len(got), len(self._LIST_VALUE)) for i in range(len(self._LIST_VALUE)): self.assertEqual(got[i], self._LIST_VALUE[i]) self.assertEqual(ups.getPropertyType('t'), 'lines') self.assertEqual(ups.propertyInfo('t')['type'], 'lines') if values_are_none: self.assertEqual(ups.getProperty('t'), None) else: got = ups.getProperty('t') self.assertEqual(type(got), type(())) self.assertEqual(len(got), len(self._TUPLE_VALUE)) for i in range(len(self._TUPLE_VALUE)): self.assertEqual(got[i], self._TUPLE_VALUE[i]) self.assertEqual(ups.getPropertyType('img'), 'image') self.assertEqual(ups.propertyInfo('img')['type'], 'image') if values_are_none: self.assertEqual(ups.getProperty('img'), None) else: got = ups.getProperty('img') self.assertEqual(type(got), Image) self.assertEqual(got.size, self._IMG_VALUE.size) self.assertEqual(got, self._IMG_VALUE) pmap = ups.propertyMap() self.assertEqual(len(pmap), len(self._SCHEMA)) for i in range(len(pmap)): info = pmap[i] spec = [x for x in self._SCHEMA if x[0] == info['id']][0] self.assertEqual(info['id'], spec[0]) self.assertEqual(info['type'], spec[1]) self.assertEqual(info['mode'], '') # readonly, no delete def test_ctor__guessSchema(self): ups = self._makeOne('guessed', s=self._STRING_VALUE, i=self._INT_VALUE, f=self._FLOAT_VALUE, n=self._LONG_VALUE, d=self._DATE_VALUE, l=self._LIST_VALUE, t=self._TUPLE_VALUE, # noqa b=self._BOOL_VALUE, img=self._IMG_VALUE) self._checkStockSchema(ups) def test_ctor_w_schema(self): ups = self._makeOne('w_schema', self._SCHEMA, s=self._STRING_VALUE, i=self._INT_VALUE, f=self._FLOAT_VALUE, n=self._LONG_VALUE, d=self._DATE_VALUE, l=self._LIST_VALUE, t=self._TUPLE_VALUE, # noqa b=self._BOOL_VALUE, img=self._IMG_VALUE) self._checkStockSchema(ups) def test_ctor_w_schema_no_values(self): ups = self._makeOne('w_schema', self._SCHEMA) self._checkStockSchema(ups, values_are_none=True)
def undoable_transactions(self, first_transaction=None, last_transaction=None, PrincipiaUndoBatchSize=None): if first_transaction is None: first_transaction = self._get_request_var_or_attr( 'first_transaction', 0) if PrincipiaUndoBatchSize is None: PrincipiaUndoBatchSize = self._get_request_var_or_attr( 'PrincipiaUndoBatchSize', 20) if last_transaction is None: last_transaction = self._get_request_var_or_attr( 'last_transaction', first_transaction + PrincipiaUndoBatchSize) spec = {} # A user is allowed to undo transactions that were initiated # by any member of a user folder in the place where the user # is defined. user = getSecurityManager().getUser() user_parent = aq_parent(user) if user_parent is not None: path = '/'.join(user_parent.getPhysicalPath()[1:-1]) else: path = '' if path: spec['user_name'] = Prefix(path) if getattr(aq_parent(aq_inner(self)), '_p_jar', None) == self._p_jar: # We only want to undo things done here (and not in mounted # databases) opath = '/'.join(self.getPhysicalPath()) else: # Special case: at the root of a database, # allow undo of any path. opath = None if opath: spec['description'] = Prefix(opath) r = self._p_jar.db().undoInfo(first_transaction, last_transaction, spec) for d in r: d['time'] = t = DateTime(d['time']) desc = d['description'] tid = d['id'] if desc: desc = desc.split() d1 = desc[0] desc = ' '.join(desc[1:]) if len(desc) > 60: desc = desc[:56] + ' ...' tid = "%s %s %s %s" % (encode64(tid), t, d1, desc) else: tid = "%s %s" % (encode64(tid), t) d['id'] = tid return r
def modifyRequest(self, req, resp): """Copies cookie-supplied credentials to the basic auth fields. Returns a flag indicating what the user is trying to do with cookies: ATTEMPT_NONE, ATTEMPT_LOGIN, or ATTEMPT_RESUME. If cookie login is disabled for this request, raises CookieCrumblerDisabled. """ if (req.__class__ is not HTTPRequest or not req['REQUEST_METHOD'] in ('HEAD', 'GET', 'PUT', 'POST') or req.environ.has_key('WEBDAV_SOURCE_PORT')): raise CookieCrumblerDisabled # attempt may contain information about an earlier attempt to # authenticate using a higher-up cookie crumbler within the # same request. attempt = getattr(req, '_cookie_auth', ATTEMPT_NONE) if attempt == ATTEMPT_NONE: if req._auth: # An auth header was provided and no cookie crumbler # created it. The user must be using basic auth. raise CookieCrumblerDisabled if req.has_key(self.pw_cookie) and req.has_key(self.name_cookie): # Attempt to log in and set cookies. attempt = ATTEMPT_LOGIN name = req[self.name_cookie] pw = req[self.pw_cookie] ac = encodestring('%s:%s' % (name, pw)).rstrip() self._setAuthHeader(ac, req, resp) if req.get(self.persist_cookie, 0): # Persist the user name (but not the pw or session) expires = (DateTime() + 365).toZone('GMT').rfc822() resp.setCookie(self.name_cookie, name, path=self.getCookiePath(), expires=expires) else: # Expire the user name resp.expireCookie(self.name_cookie, path=self.getCookiePath()) method = self.getCookieMethod('setAuthCookie', self.defaultSetAuthCookie) method(resp, self.auth_cookie, quote(ac)) self.delRequestVar(req, self.name_cookie) self.delRequestVar(req, self.pw_cookie) elif req.has_key(self.auth_cookie): # Attempt to resume a session if the cookie is valid. # Copy __ac to the auth header. ac = unquote(req[self.auth_cookie]) if ac and ac != 'deleted': try: decodestring(ac) except: # Not a valid auth header. pass else: attempt = ATTEMPT_RESUME self._setAuthHeader(ac, req, resp) self.delRequestVar(req, self.auth_cookie) method = self.getCookieMethod('twiddleAuthCookie', None) if method is not None: method(resp, self.auth_cookie, quote(ac)) req._cookie_auth = attempt return attempt
class SyndicationTool(UniqueObject, SimpleItem): """ The syndication tool manages the site-wide policy for syndication of folder content as RSS. """ implements(ISyndicationTool) id = 'portal_syndication' meta_type = 'Default Syndication Tool' security = ClassSecurityInfo() #Default Sitewide Values isAllowed = 0 syUpdatePeriod = 'daily' syUpdateFrequency = 1 syUpdateBase = DateTime() max_items = 15 #ZMI Methods manage_options = (({ 'label': 'Overview', 'action': 'overview', 'help': ('CMFDefault', 'Syndication-Tool_Overview.stx') }, { 'label': 'Properties', 'action': 'propertiesForm', 'help': ('CMFDefault', 'Syndication-Tool_Properties.stx') }, { 'label': 'Policies', 'action': 'policiesForm', 'help': ('CMFDefault', 'Syndication-Tool_Policies.stx') }, { 'label': 'Reports', 'action': 'reportForm', 'help': ('CMFDefault', 'Syndication-Tool_Reporting.stx') })) security.declareProtected(ManagePortal, 'overview') overview = HTMLFile('synOverview', _dtmldir) security.declareProtected(ManagePortal, 'propertiesForm') propertiesForm = HTMLFile('synProps', _dtmldir) security.declareProtected(ManagePortal, 'policiesForm') policiesForm = HTMLFile('synPolicies', _dtmldir) security.declareProtected(ManagePortal, 'reportForm') reportForm = HTMLFile('synReports', _dtmldir) security.declareProtected(ManagePortal, 'editProperties') def editProperties(self, updatePeriod=None, updateFrequency=None, updateBase=None, isAllowed=None, max_items=None, REQUEST=None): """ Edit the properties for the SystemWide defaults on the SyndicationTool. """ if isAllowed is not None: self.isAllowed = isAllowed if updatePeriod is not None: self.syUpdatePeriod = updatePeriod else: try: del self.syUpdatePeriod except (AttributeError, KeyError): pass if updateFrequency is not None: self.syUpdateFrequency = int(updateFrequency) else: try: del self.syUpdateFrequency except (AttributeError, KeyError): pass if updateBase is not None: if type(updateBase) is type(''): updateBase = DateTime(updateBase) self.syUpdateBase = updateBase else: try: del self.syUpdateBase except (AttributeError, KeyError): pass if max_items is not None: self.max_items = int(max_items) else: try: del self.max_items except (AttributeError, KeyError): pass if REQUEST is not None: REQUEST['RESPONSE'].redirect(self.absolute_url() + '/propertiesForm' + '?manage_tabs_message=Tool+Updated.') security.declarePublic('editSyInformationProperties') def editSyInformationProperties(self, obj, updatePeriod=None, updateFrequency=None, updateBase=None, max_items=None, REQUEST=None): """ Edit syndication properties for the obj being passed in. These are held on the syndication_information object. Not Sitewide Properties. """ if not _checkPermission(ManageProperties, obj): raise AccessControl_Unauthorized syInfo = getattr(obj, 'syndication_information', None) if syInfo is None: raise 'Syndication is Disabled' if updatePeriod is not None: syInfo.syUpdatePeriod = updatePeriod else: syInfo.syUpdatePeriod = self.syUpdatePeriod if updateFrequency is not None: syInfo.syUpdateFrequency = int(updateFrequency) else: syInfo.syUpdateFrequency = self.syUpdateFrequency if updateBase is not None: if type(updateBase) is type(''): updateBase = DateTime(updateBase) syInfo.syUpdateBase = updateBase else: syInfo.syUpdateBase = self.syUpdateBase if max_items is not None: syInfo.max_items = int(max_items) else: syInfo.max_items = self.max_items security.declarePublic('enableSyndication') def enableSyndication(self, obj): """ Enable syndication for the obj """ if not self.isSiteSyndicationAllowed(): raise 'Syndication is Disabled' if hasattr(aq_base(obj), 'syndication_information'): raise 'Syndication Information Exists' syInfo = SyndicationInformation() obj._setObject('syndication_information', syInfo) syInfo = obj._getOb('syndication_information') syInfo.syUpdatePeriod = self.syUpdatePeriod syInfo.syUpdateFrequency = self.syUpdateFrequency syInfo.syUpdateBase = self.syUpdateBase syInfo.max_items = self.max_items syInfo.description = "Channel Description" security.declarePublic('disableSyndication') def disableSyndication(self, obj): """ Disable syndication for the obj; and remove it. """ syInfo = getattr(obj, 'syndication_information', None) if syInfo is None: raise 'This object does not have Syndication Information' obj._delObject('syndication_information') security.declarePublic('getSyndicatableContent') def getSyndicatableContent(self, obj): """ An interface for allowing folderish items to implement an equivalent of PortalFolderBase.contentValues() """ if hasattr(obj, 'synContentValues'): values = obj.synContentValues() else: values = PortalFolderBase.contentValues(obj) return values security.declarePublic('buildUpdatePeriods') def buildUpdatePeriods(self): """ Return a list of possible update periods for the xmlns: sy """ updatePeriods = (('hourly', 'Hourly'), ('daily', 'Daily'), ('weekly', 'Weekly'), ('monthly', 'Monthly'), ('yearly', 'Yearly')) return updatePeriods security.declarePublic('isSiteSyndicationAllowed') def isSiteSyndicationAllowed(self): """ Return sitewide syndication policy """ return self.isAllowed security.declarePublic('isSyndicationAllowed') def isSyndicationAllowed(self, obj=None): """ Check whether syndication is enabled for the site. This provides for extending the method to check for whether a particular obj is enabled, allowing for turning on only specific folders for syndication. """ syInfo = getattr(aq_base(obj), 'syndication_information', None) if syInfo is None: return 0 else: return self.isSiteSyndicationAllowed() security.declarePublic('getUpdatePeriod') def getUpdatePeriod(self, obj=None): """ Return the update period for the RSS syn namespace. This is either on the object being passed or the portal_syndication tool (if a sitewide value or default is set) NOTE: Need to add checks for sitewide policies!!! """ if not self.isSiteSyndicationAllowed(): raise 'Syndication is Not Allowed' if obj is None: return self.syUpdatePeriod syInfo = getattr(obj, 'syndication_information', None) if syInfo is not None: return syInfo.syUpdatePeriod else: return 'Syndication is Not Allowed' security.declarePublic('getUpdateFrequency') def getUpdateFrequency(self, obj=None): """ Return the update frequency (as a positive integer) for the syn namespace. This is either on the object being pass or the portal_syndication tool (if a sitewide value or default is set). Note: Need to add checks for sitewide policies!!! """ if not self.isSiteSyndicationAllowed(): raise 'Syndication is not Allowed' if obj is None: return self.syUpdateFrequency syInfo = getattr(obj, 'syndication_information', None) if syInfo is not None: return syInfo.syUpdateFrequency else: return 'Syndication is not Allowed' security.declarePublic('getUpdateBase') def getUpdateBase(self, obj=None): """ Return the base date to be used with the update frequency and the update period to calculate a publishing schedule. Note: I'm not sure what's best here, creation date, last modified date (of the folder being syndicated) or some arbitrary date. For now, I'm going to build a updateBase time from zopetime and reformat it to meet the W3CDTF. Additionally, sitewide policy checks might have a place here... """ if not self.isSiteSyndicationAllowed(): raise 'Syndication is not Allowed' if obj is None: when = self.syUpdateBase return when.ISO() syInfo = getattr(obj, 'syndication_information', None) if syInfo is not None: when = syInfo.syUpdateBase return when.ISO() else: return 'Syndication is not Allowed' security.declarePublic('getHTML4UpdateBase') def getHTML4UpdateBase(self, obj=None): """ Return HTML4 formated UpdateBase DateTime """ if not self.isSiteSyndicationAllowed(): raise 'Syndication is not Allowed' if obj is None: when = self.syUpdateBase return when.HTML4() syInfo = getattr(obj, 'syndication_information', None) if syInfo is not None: when = syInfo.syUpdateBase return when.HTML4() else: return 'Syndication is not Allowed' def getMaxItems(self, obj=None): """ Return the max_items to be displayed in the syndication """ if not self.isSiteSyndicationAllowed(): raise 'Syndication is not Allowed' if obj is None: return self.max_items syInfo = getattr(obj, 'syndication_information', None) if syInfo is not None: return syInfo.max_items else: return 'Syndication is not Allowed'
def test15_storageStatistics(self): self.maxDiff = None portal_storage = self.portal.portal_historiesstorage cmf_uid = 1 obj1 = CMFDummy('obj', cmf_uid) obj1.text = 'v1 of text' portal_storage.register(cmf_uid, ObjectData(obj1), metadata=self.buildMetadata('saved v1')) obj2 = CMFDummy('obj', cmf_uid) obj2.text = 'v2 of text' portal_storage.save(cmf_uid, ObjectData(obj2), metadata=self.buildMetadata('saved v2')) obj3 = CMFDummy('obj', cmf_uid) obj3.text = 'v3 of text' portal_storage.save(cmf_uid, ObjectData(obj3), metadata=self.buildMetadata('saved v3')) obj4 = CMFDummy('obj', cmf_uid) obj4.text = 'v4 of text' self.portal._setObject('obj', obj4) self.portal.portal_catalog.indexObject(self.portal.obj) portal_storage.save(cmf_uid, ObjectData(obj4), metadata=self.buildMetadata('saved v4')) cmf_uid = 2 tomorrow = DateTime() + 1 obj5 = CMFDummy('tomorrow', cmf_uid, effective=tomorrow) obj5.allowedRolesAndUsers = ['Anonymous'] self.portal._setObject('tomorrow', obj5) self.portal.portal_catalog.indexObject(self.portal.tomorrow) portal_storage.register( cmf_uid, ObjectData(obj5), metadata=self.buildMetadata('effective tomorrow')) cmf_uid = 3 yesterday = DateTime() - 1 obj6 = CMFDummy('yesterday', cmf_uid, expires=yesterday) obj6.allowedRolesAndUsers = ['Anonymous'] self.portal._setObject('yesterday', obj6) self.portal.portal_catalog.indexObject(self.portal.yesterday) portal_storage.register( cmf_uid, ObjectData(obj6), metadata=self.buildMetadata('expired yesterday')) cmf_uid = 4 obj7 = CMFDummy('public', cmf_uid) obj7.text = 'visible for everyone' obj7.allowedRolesAndUsers = ['Anonymous'] self.portal._setObject('public', obj7) self.portal.portal_catalog.indexObject(self.portal.public) portal_storage.register(cmf_uid, ObjectData(obj7), metadata=self.buildMetadata('saved public')) got = portal_storage.zmi_getStorageStatistics() expected = { 'deleted': [], 'summaries': { 'totalHistories': 4, 'deletedVersions': 0, 'existingVersions': 7, 'deletedHistories': 0, 'time': '0.00', 'totalVersions': 7, 'existingAverage': '1.8', 'existingHistories': 4, 'deletedAverage': 'n/a', 'totalAverage': '1.8' }, 'existing': [{ 'url': 'http://nohost/plone/obj', 'history_id': 1, 'length': 4, 'path': '/obj', 'sizeState': 'approximate', 'portal_type': 'Dummy', 'size': 1718 }, { 'url': 'http://nohost/plone/tomorrow', 'history_id': 2, 'length': 1, 'path': '/tomorrow', 'sizeState': 'approximate', 'portal_type': 'Dummy', 'size': 555 }, { 'url': 'http://nohost/plone/yesterday', 'history_id': 3, 'length': 1, 'path': '/yesterday', 'sizeState': 'approximate', 'portal_type': 'Dummy', 'size': 557 }, { 'url': 'http://nohost/plone/public', 'history_id': 4, 'length': 1, 'path': '/public', 'sizeState': 'approximate', 'portal_type': 'Dummy', 'size': 557 }] } self.assertEqual(expected, got)
def ZopeFindAndApply(self, obj, obj_ids=None, obj_metatypes=None, obj_searchterm=None, obj_expr=None, obj_mtime=None, obj_mspec=None, obj_permission=None, obj_roles=None, search_sub=0, REQUEST=None, result=None, pre='', apply_func=None, apply_path=''): """Zope Find interface and apply.""" if result is None: result = [] if obj_metatypes and 'all' in obj_metatypes: obj_metatypes = None if obj_mtime and isinstance(obj_mtime, str): obj_mtime = DateTime(obj_mtime).timeTime() if obj_permission: obj_permission = getPermissionIdentifier(obj_permission) if obj_roles and isinstance(obj_roles, str): obj_roles = [obj_roles] if obj_expr: # Setup expr machinations md = td() obj_expr = (Eval(obj_expr), md, md._push, md._pop) base = aq_base(obj) if not hasattr(base, 'objectItems'): return result try: items = obj.objectItems() except Exception: return result try: add_result = result.append except Exception: raise AttributeError(repr(result)) for id, ob in items: if pre: p = "%s/%s" % (pre, id) else: p = id dflag = 0 if hasattr(ob, '_p_changed') and (ob._p_changed is None): dflag = 1 bs = aq_base(ob) if obj_searchterm: if isinstance(obj_searchterm, TaintedString): obj_searchterm = str(obj_searchterm) if not isinstance(obj_searchterm, str): obj_searchterm = obj_searchterm.decode( default_encoding) if hasattr(ob, 'PrincipiaSearchSource'): pss = ob.PrincipiaSearchSource() if not isinstance(pss, str): try: pss = pss.decode(default_encoding) except UnicodeDecodeError: pss = '' if hasattr(ob, 'SearchableText'): st = ob.SearchableText() if not isinstance(st, str): try: st = st.decode(default_encoding) except UnicodeDecodeError: st = '' else: pss = st = '' if ((not obj_ids or absattr(bs.getId()) in obj_ids) and (not obj_metatypes or (hasattr(bs, 'meta_type') and bs.meta_type in obj_metatypes)) and (not obj_searchterm or (hasattr(ob, 'PrincipiaSearchSource') and obj_searchterm in pss) or (hasattr(ob, 'SearchableText') and obj_searchterm in st)) and (not obj_expr or expr_match(ob, obj_expr)) and (not obj_mtime or mtime_match(ob, obj_mtime, obj_mspec)) and ((not obj_permission or not obj_roles) or role_match(ob, obj_permission, obj_roles))): if apply_func: apply_func(ob, (apply_path + '/' + p)) else: add_result((p, ob)) dflag = 0 if search_sub and hasattr(bs, 'objectItems'): self.ZopeFindAndApply(ob, obj_ids, obj_metatypes, obj_searchterm, obj_expr, obj_mtime, obj_mspec, obj_permission, obj_roles, search_sub, REQUEST, result, p, apply_func, apply_path) if dflag: ob._p_deactivate() return result