def set_cookie(self, key, value, expires=None, path='/', across_domain_cookie_=False, RESPONSE=None, **kw): """ set a cookie in REQUEST 'across_domain_cookie_' sets the cookie across all subdomains eg. www.mobilexpenses.com and mobile.mobilexpenses.com etc. This rule will only apply if the current domain name plus sub domain contains at least two dots. """ if expires is None: then = DateTime()+365 then = then.rfc822() elif isinstance(expires, int): then = DateTime()+expires then = then.rfc822() elif type(expires)==DateTimeType: # convert it to RFC822() then = expires.rfc822() else: then = expires if across_domain_cookie_ and not kw.get('domain'): # set kw['domain'] = '.domainname.com' if possible cookie_domain = self._getCookieDomain() if cookie_domain: kw['domain'] = cookie_domain if RESPONSE is None: RESPONSE = self.REQUEST.RESPONSE RESPONSE.setCookie(key, value, expires=then, path=path, **kw)
def test_mixed( self ): from Products.CMFCore.PortalFolder import ContentFilter creation_date = DateTime('2001/01/01') tz = creation_date.timezone() cfilter = ContentFilter( created=DateTime( '2001/01/01' ) , created_usage='range:max' , Title='foo' ) dummy = self.dummy self.failIf(cfilter(dummy)) dummy.created_date = DateTime( '2000/12/31' ) self.failIf(cfilter(dummy)) dummy.created_date = DateTime( '2001/12/31' ) self.failIf(cfilter(dummy)) dummy.created_date = DateTime( '2001/01/01' ) self.failIf(cfilter(dummy)) dummy.title = 'ohsofoolish' del dummy.created_date self.failIf(cfilter(dummy)) dummy.created_date = DateTime( '2000/12/31' ) self.failUnless(cfilter(dummy)) dummy.created_date = DateTime( '2001/12/31' ) self.failIf(cfilter(dummy)) dummy.created_date = DateTime( '2001/01/01' ) self.failUnless(cfilter(dummy)) desc = str( cfilter ) lines = desc.split('; ') self.assertEquals(len(lines), 2) self.failUnless('Created before: 2001/01/01 00:00:00 %s' % tz in lines) self.failUnless('Title: foo' in lines)
def test_04_Every3Hours(self, quiet=0, run=run_all_test): if not run: return if not quiet: message = 'Test Every 3 Hours' ZopeTestCase._print('\n%s ' % message) LOG('Testing... ',0,message) alarm = self.newAlarm(enabled=True) now = DateTime().toZone('UTC') hour_to_remove = now.hour() % 3 now = addToDate(now,hour=-hour_to_remove) date = addToDate(now,day=2) alarm.setPeriodicityStartDate(date) alarm.setPeriodicityHourFrequency(3) self.tic() alarm.setNextAlarmDate(current_date=now) self.assertEqual(alarm.getAlarmDate(),date) LOG(message + ' now :',0,now) now = addToDate(now,day=2) LOG(message + ' now :',0,now) alarm.setNextAlarmDate(current_date=now) next_date = addToDate(date,hour=3) self.assertEqual(alarm.getAlarmDate(),next_date) now = addToDate(now,hour=3,minute=7,second=4) alarm.setNextAlarmDate(current_date=now) next_date = addToDate(next_date,hour=3) self.assertEqual(alarm.getAlarmDate(),next_date)
def _calculate_two_weeks(): two_weeks = [[None] * 7, [None] * 7] brains = portal_catalog.searchResults( {'path' : path, 'portal_type': 'Milestone', 'end': {"query": [today, today+14], "range": "minmax"}, }, ) _upcoming = {} for b in brains: end = b.end end = DateTime(end.year(), end.month(), end.day()) _upcoming.setdefault(end, []).append(b) day = today for i in range(2): for j in range(7): two_weeks[i][j] = {} two_weeks[i][j]["day"] = day.day() if _upcoming.has_key(day): two_weeks[i][j]["its"] = _upcoming[day] day += 1 nextday = today+1 two_weeks[0][0]["day"] = "TODAY" two_weeks[0][0]["is_today"] = True two_weeks[0][1]["day"] = "%s %d" % (nextday.aMonth(), nextday.day()) return two_weeks
def test_12_Every5Minutes(self, quiet=0, run=run_all_test): if not run: return if not quiet: message = 'Test Every 5 Minutes' ZopeTestCase._print('\n%s ' % message) LOG('Testing... ',0,message) alarm = self.newAlarm(enabled=True) now = DateTime() minute_to_remove = now.minute() % 5 now = addToDate(now,minute=-minute_to_remove) date = addToDate(now,day=2) alarm.setPeriodicityStartDate(date) alarm.setPeriodicityMinuteFrequency(5) self.tic() alarm.setNextAlarmDate(current_date=now) self.assertEqual(alarm.getAlarmDate(),date) LOG(message + ' now :',0,now) now = addToDate(now,day=2) LOG(message + ' now :',0,now) alarm.setNextAlarmDate(current_date=now) next_date = addToDate(date,minute=5) self.assertEqual(alarm.getAlarmDate(),next_date) now = addToDate(now,minute=5,second=14) alarm.setNextAlarmDate(current_date=now) next_date = addToDate(next_date,minute=5) self.assertEqual(alarm.getAlarmDate(),next_date)
def testTZ2(self): # Time zone manipulation test 2 dt = DateTime() dt1 = dt.toZone('GMT') s = dt.second() s1 = dt1.second() self.assertEqual(s, s1, (dt, dt1, s, s1))
def testEDTTimezone(self): # should be able to parse EDT timezones: see lp:599856. dt = DateTime("Mon, 28 Jun 2010 10:12:25 EDT") self.assertEqual(dt.Day(), 'Monday') self.assertEqual(dt.day(), 28) self.assertEqual(dt.Month(), 'June') self.assertEqual(dt.timezone(), 'GMT-4')
def testConstructor6(self): # Constructor from year and julian date # This test must normalize the time zone, or it *will* break when # DST changes! dt1 = DateTime(2000, 5.500000578705) dt = DateTime('2000/1/5 12:00:00.050 pm %s' % dt1.localZone()) self._compare(dt, dt1)
def testDayOfWeek(self): # Compare to the datetime.date value to make it locale independent expected = date(2000, 6, 16).strftime('%A') # strftime() used to always be passed a day of week of 0 dt = DateTime('2000/6/16') s = dt.strftime('%A') self.assertEqual(s, expected, (dt, s))
def __set__(self, inst, value): field = self._field.bind(inst) field.validate(value) if field.readonly: raise ValueError(self._field.__name__, 'field is readonly') if isinstance(value, datetime): # The ensures that the converted DateTime value is in the # server's local timezone rather than GMT. value = DateTime(value.year, value.month, value.day, value.hour, value.minute) elif value is not None: if IText.providedBy(self._field): value = value.encode('utf-8') elif ISequence.providedBy(self._field): if IText.providedBy(self._field.value_type): value = type(value)( item.encode('utf-8') for item in value ) if self._set_name: getattr(inst.context, self._set_name)(value) elif inst.context.hasProperty(self._get_name): inst.context._updateProperty(self._get_name, value) else: setattr(inst.context, self._get_name, value)
def testConstructor5(self): # Constructor from time float and timezone dt = DateTime() dt1 = DateTime(float(dt), dt.timezone()) self.assertEqual(str(dt), str(dt1), (dt, dt1)) dt1 = DateTime(float(dt), unicode(dt.timezone())) self.assertEqual(str(dt), str(dt1), (dt, dt1))
def _FSCacheHeaders(obj): REQUEST = getattr(obj, 'REQUEST', None) if REQUEST is None: return False RESPONSE = REQUEST.RESPONSE header = REQUEST.get_header('If-Modified-Since', None) last_mod = obj._file_mod_time if header is not None: header = header.split(';')[0] # Some proxies seem to send invalid date strings for this # header. If the date string is not valid, we ignore it # rather than raise an error to be generally consistent # with common servers such as Apache (which can usually # understand the screwy date string as a lucky side effect # of the way they parse it). try: mod_since=DateTime(header) mod_since=long(mod_since.timeTime()) except TypeError: mod_since=None if mod_since is not None: if last_mod > 0 and last_mod <= mod_since: RESPONSE.setStatus(304) return True #Last-Modified will get stomped on by a cache policy if there is #one set.... RESPONSE.setHeader('Last-Modified', rfc1123_date(last_mod))
def isExpired(content): """ Find out if the object is expired (copied from skin script) """ expiry = None # NOTE: We also accept catalog brains as 'content' so that the # catalog-based folder_contents will work. It's a little magic, but # it works. # ExpirationDate should have an ISO date string, which we need to # convert to a DateTime # Try DC accessor first if base_hasattr(content, 'ExpirationDate'): expiry = content.ExpirationDate # Try the direct way if not expiry and base_hasattr(content, 'expires'): expiry = content.expires # See if we have a callable if safe_callable(expiry): expiry = expiry() # Convert to DateTime if necessary, ExpirationDate may return 'None' if expiry and expiry != 'None' and isinstance(expiry, basestring): expiry = DateTime(expiry) if isinstance(expiry, DateTime) and expiry.isPast(): return 1 return 0
def expired(self, datetime, now=None): """Tells whether a DateTime or timestamp 'datetime' is expired with regards to either 'now', if provided, or the current time.""" if not now: now = DateTime() return now.greaterThanEqualTo(datetime)
def test_04_migrateEmptyFolder(self): """ migrate empty folder from btree to hbtree """ # Create some objects self.assertEquals(self.folder.getIdGenerator(), '') self.assertEquals(len(self.folder), 0) self.assertEqual(len(self.folder.objectIds()), 0) # call migration script self.folder.migrateToHBTree(migration_generate_id_method=None, new_generate_id_method="_generatePerDayId") self.tic() # check we now have a hbtree self.assertEqual(self.folder.isBTree(), False) self.assertEqual(self.folder.isHBTree(), True) self.assertEqual(len(self.folder.objectIds()), 0) # check new object ids obj1 = self.newContent() from DateTime import DateTime date = DateTime().Date() date = date.replace("/", "") self.failUnless(date in obj1.getId()) # check we still have a hbtree self.assertEqual(self.folder.isBTree(), False) self.assertEqual(self.folder.isHBTree(), True) self.assertEqual(len(self.folder.objectIds()), 1)
def date_vocab(context): """Provides a list of dates for searching with""" mtool = getUtility(IMembershipTool) dates = [SimpleTerm(date(1970, 1, 1), date(1970, 1, 1), _(u'Ever'))] if not mtool.isAnonymousUser(): member = mtool.getAuthenticatedMember() login_time = member.getProperty('last_login_time') if not hasattr(login_time, 'parts'): login_time = DateTime(login_time) login = date(*login_time.parts()[:3]) dates.append(SimpleTerm( login, login, _(u'Last login')) ) today = date.today() dates.append(SimpleTerm(today - timedelta(days=1), today - timedelta(days=1), _(u'Yesterday') ) ) dates.append(SimpleTerm(today - timedelta(days=7), today - timedelta(days=7), _(u'Last week') ) ) dates.append(SimpleTerm(today - timedelta(days=31), today - timedelta(days=31), _(u'Last month') ) ) return SimpleVocabulary(dates)
def handle_event(self, instance, event): # Note this event self._stats.incr(self.STAT_FORMAT.format(region=instance.region.name, event_code=event.code)) # GOTCHA: DateTime arithmatics are done in days. Convert hours into days. threshold_time = DateTime(event.not_before) - (float(self.urgent_threshold) / 24) msg = self.MESSAGE_FORMAT.format( az=instance.placement, name=instance.tags['Name'], code=event.code, id=instance.id, description=event.description, start_time=event.not_before, end_time=event.not_after, ) if threshold_time.isFuture(): # The event will happen after the threshold time. # Just regular notification will suffice. self._logger.debug('This event is not urgent yet: %s', msg) self._regular_events.append(msg) else: # The event will happen within the next threshold time. # Highlight this event. self._logger.debug('This event is urgent: %s', msg) self._urgent_events.append(msg)
def testEDTTimezone(self): """Should be able to parse EDT timezones""" dt = DateTime("Mon, 28 Jun 2010 10:12:25 EDT") self.assertEqual(dt.Day(), 'Monday') self.assertEqual(dt.day(), 28) self.assertEqual(dt.Month(), 'June') self.assertEqual(dt.timezone(), 'GMT-0400')
def test_02_checkLineIsReindexedOnSupplyChange(self, quiet=0, run=run_all_test): """ Check that Supply Line is properly reindexed (in predicate table) when date is change on Supply. """ if not run: return original_date = DateTime().earliestTime() # lower precision of date new_date = DateTime(original_date + 10) self.assertNotEquals(original_date, new_date) supply = self._makeSupply(start_date_range_min=original_date) supply_line = self._makeSupplyLine(supply) kw = {} kw['predicate.uid'] = supply_line.getUid() kw['select_expression'] = 'predicate.start_date_range_min' # check supply line in predicate table result = self.catalog_tool(**kw) self.assertEquals(1, len(result) ) result = result[0] self.assertEquals(result.start_date_range_min, original_date.toZone('UTC')) # set new date on supply... supply.edit(start_date_range_min=new_date) self.tic() # ...and check supply line kw['predicate.uid'] = supply_line.getUid() result = self.catalog_tool(**kw) self.assertEquals(1, len(result) ) result = result[0] self.assertEquals(result.start_date_range_min, new_date.toZone('UTC'))
def results(self, start, until=None): today = DateTime() today = DateTime(today.year(), today.month(), today.day()) start = DateTime(start) start = DateTime(start.year(), start.month(), start.day()) query = Indexed('chimpfeeds') & \ In('review_state', ('published', )) & \ Ge('feedSchedule', start) if until: try: until = DateTime(until) except DateTime.SyntaxError: pass else: query = query & Le('feedSchedule', until) site = getToolByName(self.context, "portal_url").getPortalObject() settings = IFeedSettings(site) if settings.use_moderation: query = query & Eq('feedModerate', True) catalog = getToolByName(self.context, "portal_catalog") extras = [] utilities = getUtilitiesFor(IGroupExtras) groups = InterestGroupVocabulary()(self.context) for name, util in utilities: for group in groups: extras.extend(util.items(group.title, start, until)) return list(catalog.evalAdvancedQuery( query, (('feedSchedule', 'desc'), ))) + extras
def testDateFieldConvertedToSalesforceFormat(self): """ Prove that DateField values get converted to the format expected by Salesforce (mm/dd/yyyy). """ self.ff1.invokeFactory('FormDateField', 'date') self.ff1.date.setTitle('date') self.ff1.invokeFactory('SalesforcePFGAdapter', 'salesforce') self.ff1.setActionAdapter( ('salesforce',) ) sf = self.ff1.salesforce fieldmap = sf.getFieldMap() fieldmap[-1]['sf_field'] = 'date' sf.setFieldMap(fieldmap) from DateTime import DateTime now = DateTime() now_plone = now.strftime('%m-%d-%Y %H:%M') request = FakeRequest(topic = 'test subject', replyto='*****@*****.**', date = now_plone) from Products.Archetypes.interfaces.field import IField fields = [fo for fo in self.ff1._getFieldObjects() if not IField.isImplementedBy(fo)] sObject = self.ff1.salesforce._buildSObjectFromForm(fields, REQUEST=request) from time import strptime try: res = strptime(sObject['date'], '%Y-%m-%dT%H:%M:%SZ') except ValueError: self.fail("Doesn't look like the date was converted to Salesforce format properly.")
def __call__(self): """ Return csv content as http response or return info IStatusMessage """ csv_content = self.logged_requests_csv() if csv_content is not None: now = DateTime() nice_filename = '%s_%s' % ('requestlog_', now.strftime('%Y%m%d')) self.request.response.setHeader("Content-Disposition", "attachment; filename=%s.csv" % nice_filename) self.request.response.setHeader("Content-Type", "text/csv") self.request.response.setHeader("Content-Length", len(csv_content)) self.request.response.setHeader('Last-Modified', DateTime.rfc822(DateTime())) self.request.response.setHeader("Cache-Control", "no-store") self.request.response.setHeader("Pragma", "no-cache") self.request.response.write(csv_content) else: msg = _('No log entries exist') IStatusMessage(self.request).addStatusMessage(msg,"info") return csv_content
def adapter_enforce_gmt(dt=None): if dt != None: dt = DateTime(dt) t = dt.timeTime() gmt_date = DateTime(t,gmt_offset(dt)) return gmt_date return DateTime()
def purge_now(self): last_purge = self.last_purge() now = DateTime() if (not last_purge) or now - last_purge > 30 or now.month() != last_purge.month(): return True else: return False
def datehandler(value): # TODO: we might want to handle datetime and time as well; # check the enfold.solr implementation if value is None or value is '': raise AttributeError if isinstance(value, str) and not value.endswith('Z'): try: value = DateTime(value) except SyntaxError: raise AttributeError if isinstance(value, DateTime): v = value.toZone('UTC') value = '%04d-%02d-%02dT%02d:%02d:%06.3fZ' % ( v.year(), v.month(), v.day(), v.hour(), v.minute(), v.second() ) elif isinstance(value, datetime): # Convert a timezone aware timetuple to a non timezone aware time # tuple representing utc time. Does nothing if object is not # timezone aware value = datetime(*value.utctimetuple()[:7]) value = '%s.%03dZ' % ( value.strftime('%Y-%m-%dT%H:%M:%S'), value.microsecond % 1000 ) elif isinstance(value, date): value = '%s.000Z' % value.strftime('%Y-%m-%dT%H:%M:%S') return value
def lectureTakesPlace(self, datetime=None): """ Return True if the lecture takes place on the given date. If no date is specified, the current date will be used. TODO: Currently not implemented for monthly and yearly recurrence. """ result = False if not datetime: datetime = DateTime() if datetime >= self.startDate.earliestTime() \ and datetime <= self.endDate.latestTime(): if self.recurrence == NO_RECURRENCE: result = self.startDate.isCurrentDay() elif self.recurrence == DAILY: result = True elif self.recurrence == WEEKLY: result = datetime.dow() == self.startDate.dow() elif self.recurrence == MONTHLY: # TODO result = False elif self.recurrence == YEARLY: # TODO result = False return result
def newTrashBin(self, bt_title='trash', bt=None): """ Create a new trash bin at upgrade of bt """ # construct date date = DateTime() start_date = date.strftime('%Y-%m-%d') def getBaseTrashId(): ''' A little function to get an id without leading underscore ''' base_id = '%s' % start_date if bt_title not in ('', None): base_id = '%s_%s' % (bt_title, base_id) return base_id # generate id trash_ids = self.objectIds() n = 0 new_trash_id = getBaseTrashId() while new_trash_id in trash_ids: n += 1 new_trash_id = '%s_%s' % (getBaseTrashId(), n) # create trash bin trashbin = self.newContent( portal_type = 'Trash Bin' , id = new_trash_id , title = bt_title , start_date = start_date , causality_value = bt ) return trashbin
def modified(self): """Provide a callable to return the modification time of content items, so stored image scales can be invalidated. """ context = aq_base(self.context) date = DateTime(context._p_mtime) return date.millis()
def _createOrderId(self): """Creates a new unique order id. """ from DateTime import DateTime; now = DateTime() return str(now.millis())
def getMonthGrid(self, month): '''Creates a list of lists of DateTime objects representing the calendar grid to render for a given p_month.''' # Month is a string "YYYY/mm". currentDay = DateTime('%s/01 12:00' % month) currentMonth = currentDay.month() res = [[]] dayOneNb = currentDay.dow() or 7 # This way, Sunday is 7 and not 0. if dayOneNb != 1: previousDate = DateTime(currentDay) # If the 1st day of the month is not a Monday, start the row with # the last days of the previous month. for i in range(1, dayOneNb): previousDate = previousDate - 1 res[0].insert(0, previousDate) finished = False while not finished: # Insert currentDay in the grid if len(res[-1]) == 7: # Create a new row res.append([currentDay]) else: res[-1].append(currentDay) currentDay = currentDay + 1 if currentDay.month() != currentMonth: finished = True # Complete, if needed, the last row with the first days of the next # month. if len(res[-1]) != 7: while len(res[-1]) != 7: res[-1].append(currentDay) currentDay = currentDay + 1 return res
def afterSetUp(self): self.calendar = self.portal.portal_calendar self.calendar.firstweekday = 0 self.workflow = self.portal.portal_workflow self.event_date = DateTime('2008-02-08 0:00:00') self.populateSite()
def addToDate(date, to_add=None, **kw): """ Return a new DateTime object with the corresponding added values. Values can be negative. """ return_value = {} if to_add is not None: kw.update(to_add) to_add = kw key_list = ('second', 'minute', 'hour', 'day', 'month', 'year') reverse_key_list = key_list[::-1] for key in reverse_key_list: method = getattr(date, key) return_value[key] = method() larger_key_dict = { 'second': 'minute', 'minute': 'hour', 'hour': 'day', 'month': 'year' } number_of_in_dict = { 'second': number_of_seconds_in_minute, 'minute': number_of_minutes_in_hour, 'hour': number_of_hours_in_day, 'day': getNumberOfDayInMonth(date), 'month': number_of_months_in_year } lesser_key_dict = { 'minute': 'second', 'hour': 'minute', 'day': 'hour', 'month': 'day', 'year': 'month' } number_less_of_in_dict = { 'minute': number_of_seconds_in_minute, 'hour': number_of_minutes_in_hour, 'day': number_of_hours_in_day, 'month': getNumberOfDayInMonth(date), 'year': number_of_months_in_year } def treatNegativeValues(return_value, key): while key != 'day' and (key == 'month' and return_value[key] <= 0) or \ (key != 'month' and return_value[key] < 0): return_value[key] = return_value[key] + number_of_in_dict[key] return_value[ larger_key_dict[key]] = return_value[larger_key_dict[key]] - 1 def treatPositiveValues(return_value, key): while key != 'day' and (key == 'month' and return_value[key] >\ number_of_in_dict[key]) or (key != 'month' and return_value[key] >= \ number_of_in_dict[key]): return_value[key] = return_value[key] - number_of_in_dict[key] return_value[ larger_key_dict[key]] = return_value[larger_key_dict[key]] + 1 for key in key_list: if to_add.get(key, None) is not None: return_value[key] = return_value[key] + to_add[key] del to_add[key] if key not in ('day', 'year'): treatNegativeValues(return_value, key) treatPositiveValues(return_value, key) for key in reverse_key_list[:-1]: if 1 > return_value[key] % 1 > 0: return_value[lesser_key_dict[ key]] += return_value[key] % 1 * number_less_of_in_dict[key] return_value[key] = int(return_value[key]) for local_key in return_value.keys(): if local_key not in ('day', 'year'): treatPositiveValues(return_value, local_key) day_to_add = return_value['day'] - 1 if to_add.get('day', None) is not None: day_to_add += to_add['day'] return_value['day'] = 1 return_date = DateTime( '%i/%i/%i %i:%i:%d %s' % (return_value['year'], return_value['month'], return_value['day'], return_value['hour'], return_value['minute'], return_value['second'], date.timezone())) return_date += day_to_add return return_date
def getIntervalListBetweenDates(from_date=None, to_date=None, keys={ 'year': 1, 'month': 1, 'week': 1, 'day': 1 }): """ Return the list of years, months and days (if each is equal to 1 in keys) between the both given dates including the current one. If one of the given dates is None, the date used is the current time. """ # key -> format dict format_dict = { 'year': '%Y', 'month': '%Y-%m', 'week': '%Y-%V', 'day': '%Y-%m-%d', } if from_date is None: from_date = DateTime() if to_date is None: to_date = DateTime() if from_date - to_date > 0: from_date, to_date = to_date, from_date to_inverse = 1 else: to_inverse = 0 diff_value_dict = {} for current_key in ('year', 'month', 'week', 'day'): if keys.get(current_key, None): new_date = from_date while new_date <= to_date: if current_key == 'day': new_strftime = new_date.ISO() new_strftime = new_strftime[:new_strftime.index(' ')] diff_value_dict.setdefault(current_key, []).append(new_strftime) else: diff_value_dict.setdefault(current_key, []).append( new_date.strftime(format_dict[current_key])) if current_key == "week": new_date = addToDate(new_date, to_add={'day': 7}) else: new_date = addToDate(new_date, to_add={current_key: 1}) if to_date.strftime(format_dict[current_key]) not in\ diff_value_dict[current_key]: diff_value_dict.setdefault(current_key, []).append( to_date.strftime(format_dict[current_key])) returned_value_dict = {} for key, value in six.iteritems(diff_value_dict): if to_inverse: value.reverse() returned_value_dict[key] = value else: returned_value_dict[key] = value return returned_value_dict
from AccessControl import ModuleSecurityInfo from DateTime import DateTime from datetime import datetime import six security = ModuleSecurityInfo(__name__) security.declarePublic( 'addToDate', 'getClosestDate', 'getIntervalBetweenDates', 'getMonthAndDaysBetween', 'getCompletedMonthBetween', 'getRoundedMonthBetween', 'getMonthFraction', 'getYearFraction', 'getAccountableYearFraction', 'getBissextilCompliantYearFraction', 'getIntervalListBetweenDates', 'getDecimalNumberOfYearsBetween', 'roundMonthToGreaterEntireYear', 'roundDate', 'convertDateToHour', 'getNumberOfDayInMonth', 'atTheEndOfPeriod', 'copyDate') millis = DateTime('2000/01/01 12:00:00.001') - DateTime('2000/01/01 12:00:00') centis = millis * 10 number_of_months_in_year = 12. number_of_hours_in_day = 24. number_of_minutes_in_hour = 60. number_of_seconds_in_minute = 60. number_of_days_in_year = 365. hour = 1 / 24. same_movement_interval = hour accountable_days_in_month = 30. accountable_months_in_year = 12. number_of_hours_in_year = 8760 def addToDate(date, to_add=None, **kw):
def test_migrate_atevent_to_dxevent(self): """Tests the custom migration by migrating a default type. It is not meant to be used this way but is a nice way to test the migrations. During this migration the event fti is already replaced by the dx one. """ from DateTime import DateTime from plone.app.contenttypes.migration.migration import migrateCustomAT from plone.app.contenttypes.interfaces import IEvent # create an ATEvent self.portal.invokeFactory('Event', 'event') at_event = self.portal['event'] # Date FORMAT = '%Y-%m-%d %H:%M' start = '2013-02-03 12:15' end = '2013-04-05 13:45' at_event.getField('startDate').set(at_event, DateTime(start)) at_event.getField('endDate').set(at_event, DateTime(end)) # Contact at_event.getField('contactPhone').set(at_event, '123456789') at_event.getField('contactEmail').set(at_event, '*****@*****.**') at_event.getField('contactName').set(at_event, u'Näme') # URL at_event.getField('eventUrl').set(at_event, 'http://www.plone.org') # Attendees at_event.getField('attendees').set(at_event, ('Yöu', 'Me')) # Text at_event.setText('Tütensuppe') at_event.setContentType('text/plain') oldTZ = os.environ.get('TZ', None) TZ = 'Asia/Tbilisi' os.environ['TZ'] = TZ timezone = pytz.timezone(TZ) qi = self.portal.portal_quickinstaller # install pac but only install Event qi.installProduct( 'plone.app.contenttypes', profile='plone.app.contenttypes:default', blacklistedSteps=['typeinfo']) installTypeIfNeeded("Event") fields_mapping = ( {'AT_field_name': 'startDate', 'AT_field_type': 'Products.Archetypes.Field.DateTimeField', 'DX_field_name': 'start', 'DX_field_type': 'Datetime', }, {'AT_field_name': 'endDate', 'AT_field_type': 'Products.Archetypes.Field.DateTimeField', 'DX_field_name': 'end', 'DX_field_type': 'Datetime', }, {'AT_field_name': 'text', 'AT_field_type': 'Products.Archetypes.Field.TextField', 'DX_field_name': 'text', 'DX_field_type': 'RichText', }, {'AT_field_name': 'eventUrl', 'AT_field_type': 'Products.Archetypes.Field.StringField', 'DX_field_name': 'event_url', 'DX_field_type': 'StringField', }, {'AT_field_name': 'contactEmail', 'AT_field_type': 'Products.Archetypes.Field.StringField', 'DX_field_name': 'contact_email', 'DX_field_type': 'StringField', }, {'AT_field_name': 'contactName', 'AT_field_type': 'Products.Archetypes.Field.StringField', 'DX_field_name': 'contact_name', 'DX_field_type': 'StringField', }, {'AT_field_name': 'contactPhone', 'AT_field_type': 'Products.Archetypes.Field.StringField', 'DX_field_name': 'contact_phone', 'DX_field_type': 'StringField', }, {'AT_field_name': 'attendees', 'AT_field_type': 'Products.Archetypes.Field.LinesField', 'DX_field_name': 'attendees', 'DX_field_type': 'Tuple', }, ) # migrate ATEvent to new default Event migrateCustomAT(fields_mapping, src_type='Event', dst_type='Event') dx_event = self.portal['event'] self.assertTrue(IEvent.providedBy(dx_event)) self.assertTrue(dx_event is not at_event) self.assertEquals(safe_unicode( at_event.getText()), dx_event.text.output) self.assertEquals(at_event.eventUrl, dx_event.event_url) self.assertEquals(at_event.contactEmail, dx_event.contact_email) self.assertEquals(at_event.contactName, dx_event.contact_name) self.assertEquals(at_event.contactPhone, dx_event.contact_phone) self.assertEquals(at_event.attendees, dx_event.attendees) self.assertEquals( dx_event.start, timezone.localize(datetime.strptime(start, FORMAT))) self.assertEquals( dx_event.end, timezone.localize(datetime.strptime(end, FORMAT))) if oldTZ: os.environ['TZ'] = oldTZ else: del os.environ['TZ']
def test_migrate_atevent_to_dxnewsitem(self): """Tests the custom migration by migrating a default type. It is not meant to be used this way but is a nice way to test the migrations. During this migration the old event fti is still present. """ from DateTime import DateTime from plone.app.contenttypes.migration.migration import migrateCustomAT from plone.app.contenttypes.interfaces import INewsItem # create an ATEvent self.portal.invokeFactory('Event', 'event') at_event = self.portal['event'] # Date at_event.getField('startDate') \ .set(at_event, DateTime('2013-02-03 12:00')) at_event.getField('endDate') \ .set(at_event, DateTime('2013-04-05 13:00')) # Contact at_event.getField('contactPhone').set(at_event, '123456789') at_event.getField('contactEmail').set(at_event, '*****@*****.**') at_event.getField('contactName').set(at_event, u'Näme') # URL at_event.getField('eventUrl').set(at_event, 'http://www.plone.org') # Attendees at_event.getField('attendees').set(at_event, ('You', 'Me')) # Text at_event.setText('Tütensuppe') at_event.setContentType('text/plain') oldTZ = os.environ.get('TZ', None) os.environ['TZ'] = 'Asia/Tbilisi' qi = self.portal.portal_quickinstaller # install pac but only install News Items qi.installProduct( 'plone.app.contenttypes', profile='plone.app.contenttypes:default', blacklistedSteps=['typeinfo']) installTypeIfNeeded("News Item") fields_mapping = ( {'AT_field_name': 'text', 'AT_field_type': 'Products.Archetypes.Field.TextField', 'DX_field_name': 'text', 'DX_field_type': 'RichText', }, {'AT_field_name': 'contactName', 'AT_field_type': 'StringField', 'DX_field_name': 'image_caption', 'DX_field_type': 'StringField', }, ) # migrate ATCTEvent to default DX News Item migrateCustomAT(fields_mapping, src_type='Event', dst_type='News Item') if oldTZ: os.environ['TZ'] = oldTZ else: del os.environ['TZ'] dx_newsitem = self.portal['event'] self.assertTrue(INewsItem.providedBy(dx_newsitem)) self.assertTrue(dx_newsitem is not at_event) self.assertEquals( safe_unicode(at_event.getText()), dx_newsitem.text.output) self.assertEquals( at_event.contactName, dx_newsitem.image_caption)
import json from DateTime import DateTime now = DateTime() now_minus_6 = now - 1.0 / 24 / 60 * 6 now_minus_1 = now - 1.0 / 24 / 60 * 1 catalog_kw = { 'creation_date': { 'query': (now_minus_6, now_minus_1), 'range': 'minmax' }, 'validation_state': 'validated' } portal = context.getPortalObject() portal_catalog = portal.portal_catalog person_count = len(portal_catalog(portal_type="Person", **catalog_kw)) catalog_kw = { 'creation_date': { 'query': (now_minus_6, now_minus_1), 'range': 'minmax' }, 'simulation_state': 'planned' } sale_order_count = len(portal_catalog(portal_type="Sale Order", **catalog_kw)) person_per_hour = 60 * person_count / 5 sale_order_per_hour = 60 * sale_order_count / 5 output_string = """Person: %r doc/hour SaleOrder: %r doc/hour""" % ( person_per_hour, sale_order_per_hour) return json.dumps({ "person_per_hour": person_per_hour, "sale_order_per_hour": sale_order_per_hour })
def addCours(self, value, utilisateur): #print "------- addCours -------" #Recherche du dossier de l'utilisateur utilisateur = utilisateur.split(".json")[0][11:] contenuDossier = os.listdir("%s" % value) #Vérification du fichier d'importation de cours "exportCours.json" idExport = u"exportCours%s.json" % utilisateur if not idExport in contenuDossier: return None #Chargement du fichier try: dicoCours = json.load(open('%s/%s' % (value, idExport), 'r')) except: raise Invalid(_(u"addCours : %s " % utilisateur)) dicoExternes = self.genererExternes( "/home/zope/sites/jalon/jalonv2fait", utilisateur, {}) dicoFichiers = self.genererFichiers( "/home/zope/sites/jalon/jalonv2fait", utilisateur, {}) portal = getSite() Cours = getattr(portal.cours, utilisateur) num = 1 for infosCours in dicoCours: Cours.invokeFactory(type_name='JalonCours', id=infosCours["idcours"]) #print "%s : addCours : %s " % (str(num), infosCours["idcours"]) num = num + 1 cours = getattr(Cours, infosCours["idcours"]) #Mise à jour des permissions cours.manage_setLocalRoles(utilisateur, ["Owner"]) #Mise à jour des informations cours.setTitle(infosCours["titre"].encode("utf-8")) cours.setAuteurs({"auteurs": "auteur", "username": utilisateur}) if infosCours["coAuteurs"]: cours.setAuteurs({ "auteurs": "coAuteurs", "username": infosCours["coAuteurs"] }) for coAuteur in infosCours["coAuteurs"].split(","): dicoExternes = self.genererExternes( "/home/zope/sites/jalon/jalonv2fait", coAuteur, dicoExternes) dicoFichiers = self.genererFichiers( "/home/zope/sites/jalon/jalonv2fait", coAuteur, dicoFichiers) if infosCours["coLecteurs"]: cours.setAuteurs({ "auteurs": "coLecteurs", "username": infosCours["coLecteurs"] }) heure = str(time.clock()).replace(".", "") for composant in infosCours["plan"]: if composant["type"] in ["Titre", "TexteLibre"]: now = DateTime() idElement = "%s-%s-%s" % ( composant["type"], utilisateur, ''.join( [now.strftime('%Y%m%d'), str(heure)])) cours.ajouterInfosElement( idElement.encode("utf-8"), composant["type"].encode("utf-8"), composant["titre"].encode("utf-8"), utilisateur.encode("utf-8")) cours.ajouterElementPlan(idElement.encode("utf-8")) #print "addCours Element : %s , %s" % (idElement.encode("utf-8"), composant["titre"].encode("utf-8")) heure = int(heure) + 1 else: if composant["type"] == "Fichier": try: cours.ajouterElement( composant["id"].encode("utf-8"), dicoFichiers[composant["id"]].encode("utf-8"), composant["titre"].encode("utf-8"), composant["createur"].encode("utf-8")) except: print "addCours Element Fichier : %s , %s" % ( composant["id"].encode("utf-8"), composant["titre"].encode("utf-8")) if composant["type"] == "Ressources Externes": try: cours.ajouterElement( composant["id"].encode("utf-8"), dicoExternes[composant["id"]].encode("utf-8"), composant["titre"].encode("utf-8"), composant["createur"].encode("utf-8")) except: print "addCours Element Ressources Externes : %s , %s" % ( composant["id"].encode("utf-8"), composant["titre"].encode("utf-8")) if composant["type"] == "Webconference": try: cours.ajouterElement( composant["id"].encode("utf-8"), "Webconference".encode("utf-8"), composant["titre"].encode("utf-8"), composant["createur"].encode("utf-8")) except: print "addCours Element Webconference : %s , %s %s" % ( composant["id"].encode("utf-8"), composant["titre"].encode("utf-8"), composant["createur"].encode("utf-8")) if composant["type"] == "Presentation sonirisee": try: cours.ajouterElement( composant["id"].encode("utf-8"), "Presentations sonorisees".encode("utf-8"), composant["titre"].encode("utf-8"), composant["createur"].encode("utf-8")) except: print "addCours Element Presentation sonirisee : %s , %s %s" % ( composant["id"].encode("utf-8"), composant["titre"].encode("utf-8"), composant["createur"].encode("utf-8")) if composant["type"] == "Sujet de dicussion": idactivite = cours.creerSousObjet( "Forum".encode("utf-8"), composant["titre"].encode("utf-8"), composant["description"].encode("utf-8"), composant["createur"].encode("utf-8"), None, None) cours.ajouterElement( idactivite.encode("utf-8"), "Forum".encode("utf-8"), composant["titre"].encode("utf-8"), composant["createur"].encode("utf-8")) #print "addCours Element : %s , %s" % (composant["id"].encode("utf-8"), composant["titre"].encode("utf-8")) if composant[ "type"] == "Boîte de dépôts": idactivite = cours.creerSousObjet( "BoiteDepot".encode("utf-8"), composant["titre"].encode("utf-8"), composant["description"].encode("utf-8"), composant["createur"].encode("utf-8"), None, None) cours.ajouterElement( idactivite, "BoiteDepot".encode("utf-8"), composant["titre"].encode("utf-8"), composant["createur"].encode("utf-8")) if composant["sujets"] != [] or composant[ "corrections"] != []: boite = getattr(cours, idactivite) for sujet in composant["sujets"]: if sujet["typeElement"] == "Fichier": try: boite.ajouterElement( "sujets".encode("utf-8"), sujet["id"].encode("utf-8"), dicoFichiers[sujet["id"]], sujet["titreElement"].encode( "utf-8"), sujet["createurElement"].encode( "utf-8")) except: print "add Element Boite Fichier : %s , %s %s" % ( sujet["id"].encode("utf-8"), sujet["titreElement"].encode( "utf-8"), sujet["createurElement"].encode( "utf-8")) if sujet[ "typeElement"] == "Ressources Externes": try: boite.ajouterElement( "sujets".encode("utf-8"), sujet["id"].encode("utf-8"), dicoExternes[sujet["id"]], sujet["titreElement"].encode( "utf-8"), sujet["createurElement"].encode( "utf-8")) except: print "add Element Boite Ressources Externes : %s , %s %s" % ( sujet["id"].encode("utf-8"), sujet["titreElement"].encode( "utf-8"), sujet["createurElement"].encode( "utf-8")) if sujet["typeElement"] == "Webconference": try: boite.ajouterElement( "sujets".encode("utf-8"), sujet["id"].encode("utf-8"), "Webconference".encode("utf-8"), sujet["titreElement"].encode( "utf-8"), sujet["createurElement"].encode( "utf-8")) except: print "add Element Boite Webconference : %s , %s %s" % ( sujet["id"].encode("utf-8"), sujet["titreElement"].encode( "utf-8"), sujet["createurElement"].encode( "utf-8")) if sujet[ "typeElement"] == "Presentation sonirisee": try: boite.ajouterElement( "sujets".encode("utf-8"), sujet["id"].encode("utf-8"), "Presentations sonorisees".encode( "utf-8"), sujet["titreElement"].encode( "utf-8"), sujet["createurElement"].encode( "utf-8")) except: print "add Element Boite Presentation sonirisee : %s , %s %s" % ( sujet["id"].encode("utf-8"), sujet["titreElement"].encode( "utf-8"), sujet["createurElement"].encode( "utf-8")) for correction in composant["corrections"]: if correction["typeElement"] == "Fichier": try: boite.ajouterElement( "corrections".encode("utf-8"), correction["id"].encode("utf-8"), dicoFichiers[correction["id"]]. encode("utf-8"), correction["titreElement"].encode( "utf-8"), correction["createurElement"]. encode("utf-8")) except: print "add Element Boite Fichier : %s , %s %s" % ( correction["id"], correction["titreElement"].encode( "utf-8"), correction["createurElement"]. encode("utf-8")) if correction[ "typeElement"] == "Ressources Externes": try: boite.ajouterElement( "corrections".encode("utf-8"), correction["id"].encode("utf-8"), dicoExternes[correction["id"]]. encode("utf-8"), correction["titreElement"].encode( "utf-8"), correction["createurElement"]. encode("utf-8")) except: print "add Element Boite Ressources Externes : %s , %s %s" % ( correction["id"], correction["titreElement"].encode( "utf-8"), correction["createurElement"]. encode("utf-8")) if correction[ "typeElement"] == "Webconference": try: boite.ajouterElement( "corrections".encode("utf-8"), correction["id"].encode("utf-8"), "Webconference".encode("utf-8"), correction["titreElement"].encode( "utf-8"), correction["createurElement"]. encode("utf-8")) except: print "add Element Boite Webconference : %s , %s %s" % ( correction["id"].encode("utf-8"), correction["titreElement"].encode( "utf-8"), correction["createurElement"]. encode("utf-8")) if correction[ "typeElement"] == "Presentation sonirisee": try: boite.ajouterElement( "corrections".encode("utf-8"), correction["id"].encode("utf-8"), "Presentations sonorisees".encode( "utf-8"), correction["titreElement"].encode( "utf-8"), correction["createurElement"]. encode("utf-8")) except: print "add Element Boite Presentation sonirisee : %s , %s %s" % ( correction["id"].encode("utf-8"), correction["titreElement"].encode( "utf-8"), correction["createurElement"]. encode("utf-8")) #print "addCours Element : %s , %s" % (composant["id"], composant["titre"].encode("utf-8")) cours.invokeFactory(type_name='Folder', id="annonce") cours.reindexObject() Cours.reindexObject()
def workflow_script_expire(self): """ expire sample """ self.setDateExpired(DateTime()) self.reindexObject()
class TestCalendarTool(PloneTestCase.PloneTestCase): def afterSetUp(self): self.calendar = self.portal.portal_calendar self.calendar.firstweekday = 0 self.workflow = self.portal.portal_workflow self.event_date = DateTime('2008-02-08 0:00:00') self.populateSite() def populateSite(self): self.setRoles(['Manager']) self.portal.invokeFactory('Event', 'event1') event1 = getattr(self.portal, 'event1') event1.edit(startDate=self.event_date, endDate=self.event_date) self.workflow.doActionFor(event1, 'publish', comment='testing') self.portal.invokeFactory('Folder', 'folder1') folder1 = getattr(self.portal, 'folder1') folder1.invokeFactory('Event', 'event11') event11 = getattr(self.portal.folder1, 'event11') event11.edit(startDate=self.event_date, endDate=self.event_date) self.workflow.doActionFor(event11, 'publish', comment='testing') self.setRoles(['Member']) def testGetEventsForCalendar(self): events = self.calendar.getEventsForCalendar( month=self.event_date.month(), year=self.event_date.year()) data = [[{ 'eventslist': [], 'day': 0, 'event': 0 }, { 'eventslist': [], 'day': 0, 'event': 0 }, { 'eventslist': [], 'day': 0, 'event': 0 }, { 'eventslist': [], 'day': 0, 'event': 0 }, { 'eventslist': [], 'day': 1, 'event': 0 }, { 'eventslist': [], 'day': 2, 'event': 0 }, { 'eventslist': [], 'day': 3, 'event': 0 }], [{ 'eventslist': [], 'day': 4, 'event': 0 }, { 'eventslist': [], 'day': 5, 'event': 0 }, { 'eventslist': [], 'day': 6, 'event': 0 }, { 'eventslist': [], 'day': 7, 'event': 0 }, { 'day': 8, 'event': 1, 'eventslist': [{ 'end': '00:00:00', 'start': '00:00:00', 'title': 'event1' }, { 'end': '00:00:00', 'start': '00:00:00', 'title': 'event11' }] }, { 'eventslist': [], 'day': 9, 'event': 0 }, { 'eventslist': [], 'day': 10, 'event': 0 }], [{ 'eventslist': [], 'day': 11, 'event': 0 }, { 'eventslist': [], 'day': 12, 'event': 0 }, { 'eventslist': [], 'day': 13, 'event': 0 }, { 'eventslist': [], 'day': 14, 'event': 0 }, { 'eventslist': [], 'day': 15, 'event': 0 }, { 'eventslist': [], 'day': 16, 'event': 0 }, { 'eventslist': [], 'day': 17, 'event': 0 }], [{ 'eventslist': [], 'day': 18, 'event': 0 }, { 'eventslist': [], 'day': 19, 'event': 0 }, { 'eventslist': [], 'day': 20, 'event': 0 }, { 'eventslist': [], 'day': 21, 'event': 0 }, { 'eventslist': [], 'day': 22, 'event': 0 }, { 'eventslist': [], 'day': 23, 'event': 0 }, { 'eventslist': [], 'day': 24, 'event': 0 }], [{ 'eventslist': [], 'day': 25, 'event': 0 }, { 'eventslist': [], 'day': 26, 'event': 0 }, { 'eventslist': [], 'day': 27, 'event': 0 }, { 'eventslist': [], 'day': 28, 'event': 0 }, { 'eventslist': [], 'day': 29, 'event': 0 }, { 'eventslist': [], 'day': 0, 'event': 0 }, { 'eventslist': [], 'day': 0, 'event': 0 }]] self.assertEqual(events, data) def testGetEventsForCalendarInPath(self): path = "/".join(self.portal.folder1.getPhysicalPath()) events = self.calendar.getEventsForCalendar( month=self.event_date.month(), year=self.event_date.year(), path=path) data = [[{ 'eventslist': [], 'day': 0, 'event': 0 }, { 'eventslist': [], 'day': 0, 'event': 0 }, { 'eventslist': [], 'day': 0, 'event': 0 }, { 'eventslist': [], 'day': 0, 'event': 0 }, { 'eventslist': [], 'day': 1, 'event': 0 }, { 'eventslist': [], 'day': 2, 'event': 0 }, { 'eventslist': [], 'day': 3, 'event': 0 }], [{ 'eventslist': [], 'day': 4, 'event': 0 }, { 'eventslist': [], 'day': 5, 'event': 0 }, { 'eventslist': [], 'day': 6, 'event': 0 }, { 'eventslist': [], 'day': 7, 'event': 0 }, { 'day': 8, 'event': 1, 'eventslist': [{ 'end': '00:00:00', 'start': '00:00:00', 'title': 'event11' }] }, { 'eventslist': [], 'day': 9, 'event': 0 }, { 'eventslist': [], 'day': 10, 'event': 0 }], [{ 'eventslist': [], 'day': 11, 'event': 0 }, { 'eventslist': [], 'day': 12, 'event': 0 }, { 'eventslist': [], 'day': 13, 'event': 0 }, { 'eventslist': [], 'day': 14, 'event': 0 }, { 'eventslist': [], 'day': 15, 'event': 0 }, { 'eventslist': [], 'day': 16, 'event': 0 }, { 'eventslist': [], 'day': 17, 'event': 0 }], [{ 'eventslist': [], 'day': 18, 'event': 0 }, { 'eventslist': [], 'day': 19, 'event': 0 }, { 'eventslist': [], 'day': 20, 'event': 0 }, { 'eventslist': [], 'day': 21, 'event': 0 }, { 'eventslist': [], 'day': 22, 'event': 0 }, { 'eventslist': [], 'day': 23, 'event': 0 }, { 'eventslist': [], 'day': 24, 'event': 0 }], [{ 'eventslist': [], 'day': 25, 'event': 0 }, { 'eventslist': [], 'day': 26, 'event': 0 }, { 'eventslist': [], 'day': 27, 'event': 0 }, { 'eventslist': [], 'day': 28, 'event': 0 }, { 'eventslist': [], 'day': 29, 'event': 0 }, { 'eventslist': [], 'day': 0, 'event': 0 }, { 'eventslist': [], 'day': 0, 'event': 0 }]] self.assertEqual(events, data) def testCatalogGetEvents(self): events = self.calendar.catalog_getevents(month=self.event_date.month(), year=self.event_date.year()) data = [{ 'eventslist': [], 'day': 2, 'event': 0 }, { 'eventslist': [], 'day': 3, 'event': 0 }, { 'eventslist': [], 'day': 4, 'event': 0 }, { 'eventslist': [], 'day': 5, 'event': 0 }, { 'eventslist': [], 'day': 6, 'event': 0 }, { 'eventslist': [], 'day': 7, 'event': 0 }, { 'day': 8, 'event': 1, 'eventslist': [{ 'start': '00:00:00', 'end': '00:00:00', 'title': 'event1' }, { 'end': '00:00:00', 'start': '00:00:00', 'title': 'event11' }] }] self.assertEqual([events[e] for e in range(2, 9)], data) def testCatalogGetEventsInPath(self): path = "/".join(self.portal.folder1.getPhysicalPath()) events = self.calendar.catalog_getevents(month=self.event_date.month(), year=self.event_date.year(), path=path) data = [{ 'eventslist': [], 'day': 2, 'event': 0 }, { 'eventslist': [], 'day': 3, 'event': 0 }, { 'eventslist': [], 'day': 4, 'event': 0 }, { 'eventslist': [], 'day': 5, 'event': 0 }, { 'eventslist': [], 'day': 6, 'event': 0 }, { 'eventslist': [], 'day': 7, 'event': 0 }, { 'day': 8, 'event': 1, 'eventslist': [{ 'end': '00:00:00', 'start': '00:00:00', 'title': 'event11' }] }] self.assertEqual([events[e] for e in range(2, 9)], data)
def edit(self, title=None, description=None, eventType=None, effectiveDay=None, effectiveMo=None, effectiveYear=None, expirationDay=None, expirationMo=None, expirationYear=None, start_time=None, startAMPM=None, stop_time=None, stopAMPM=None, location=None, contact_name=None, contact_email=None, contact_phone=None, event_url=None): """\ """ if title is not None: self.setTitle(title) if description is not None: self.setDescription(description) if eventType is not None: self.setSubject(eventType) start_date = end_date = None if effectiveDay and effectiveMo and effectiveYear and start_time: efdate = '%s/%s/%s %s %s' % (effectiveYear, effectiveMo, effectiveDay, start_time, startAMPM) start_date = DateTime(efdate) if expirationDay and expirationMo and expirationYear and stop_time: exdate = '%s/%s/%s %s %s' % (expirationYear, expirationMo, expirationDay, stop_time, stopAMPM) end_date = DateTime(exdate) if start_date and end_date: if end_date < start_date: end_date = start_date self.setStartDate(start_date) self.setEndDate(end_date) if location is not None: self.location = location if contact_name is not None: self.contact_name = contact_name if contact_email is not None: self.contact_email = contact_email if contact_phone is not None: self.contact_phone = contact_phone if event_url is not None: self.event_url = event_url self.reindexObject()
def workflow_script_dispose(self): """ dispose sample """ self.setDateDisposed(DateTime()) self.reindexObject()
def test_search_restrict_manager(self): catalog = self._makeOne() catalog.addIndex('allowedRolesAndUsers', 'KeywordIndex') catalog.addIndex('effective', 'DateIndex') catalog.addIndex('expires', 'DateIndex') now = DateTime() dummy = DummyContent(catalog=1) self.loginManager() # already expired dummy.effective = now - 4 dummy.expires = now - 2 catalog.catalog_object(dummy, '/dummy') self.assertEqual(1, len(catalog._catalog.searchResults())) self.assertEqual(1, len(catalog.searchResults())) self.assertEqual( 1, len( catalog.searchResults(expires={ 'query': now - 3, 'range': 'min' }))) self.assertEqual( 0, len( catalog.searchResults(expires={ 'query': now - 1, 'range': 'min' }))) self.assertEqual( 0, len( catalog.searchResults(expires={ 'query': now - 3, 'range': 'max' }))) self.assertEqual( 1, len( catalog.searchResults(expires={ 'query': now - 1, 'range': 'max' }))) self.assertEqual( 1, len( catalog.searchResults(expires={ 'query': (now - 3, now - 1), 'range': 'min:max' }))) self.assertEqual( 1, len( catalog.searchResults(expires={ 'query': (now - 3, now - 1), 'range': 'minmax' }))) self.assertEqual( 1, len(catalog.searchResults(expires={'query': now - 2}))) self.assertEqual( 1, len( catalog.searchResults(expires={ 'query': now - 2, 'range': None })))
def current_date(self): return DateTime()
def workflow_script_dispatch(self, state_info): """ dispatch order """ self.setDateDispatched(DateTime()) self.reindexObject()
def test_search_restrict_visible(self): catalog = self._makeOne() catalog.addIndex('allowedRolesAndUsers', 'KeywordIndex') catalog.addIndex('effective', 'DateIndex') catalog.addIndex('expires', 'DateIndex') now = DateTime() dummy = DummyContent(catalog=1) dummy._View_Permission = ('Blob', ) self.loginWithRoles('Blob') # visible dummy.effective = now - 2 dummy.expires = now + 2 catalog.catalog_object(dummy, '/dummy') self.assertEqual(1, len(catalog._catalog.searchResults())) self.assertEqual(1, len(catalog.searchResults())) self.assertEqual( 0, len( catalog.searchResults(effective={ 'query': now - 1, 'range': 'min' }))) self.assertEqual( 1, len( catalog.searchResults(effective={ 'query': now - 1, 'range': 'max' }))) self.assertEqual( 0, len( catalog.searchResults(effective={ 'query': now + 1, 'range': 'min' }))) self.assertEqual( 1, len( catalog.searchResults(effective={ 'query': now + 1, 'range': 'max' }))) self.assertEqual( 0, len( catalog.searchResults(effective={ 'query': (now - 1, now + 1), 'range': 'min:max' }))) self.assertEqual( 0, len( catalog.searchResults(effective={ 'query': (now - 1, now + 1), 'range': 'minmax' }))) self.assertEqual( 1, len( catalog.searchResults(expires={ 'query': now - 2, 'range': None }))) self.assertEqual( 1, len( catalog.searchResults(effective={ 'query': now - 3, 'range': 'min' }))) self.assertEqual( 0, len( catalog.searchResults(effective={ 'query': now - 3, 'range': 'max' }))) self.assertEqual( 0, len( catalog.searchResults(effective={ 'query': now + 3, 'range': 'min' }))) self.assertEqual( 1, len( catalog.searchResults(effective={ 'query': now + 3, 'range': 'max' }))) self.assertEqual( 1, len( catalog.searchResults(effective={ 'query': (now - 3, now + 3), 'range': 'min:max' }))) self.assertEqual( 1, len( catalog.searchResults(effective={ 'query': (now - 3, now + 3), 'range': 'minmax' }))) self.assertEqual( 1, len( catalog.searchResults(expires={ 'query': now - 1, 'range': 'min' }))) self.assertEqual( 0, len( catalog.searchResults(expires={ 'query': now - 1, 'range': 'max' }))) self.assertEqual( 1, len( catalog.searchResults(expires={ 'query': now + 1, 'range': 'min' }))) self.assertEqual( 0, len( catalog.searchResults(expires={ 'query': now + 1, 'range': 'max' }))) self.assertEqual( 0, len( catalog.searchResults(expires={ 'query': (now - 1, now + 1), 'range': 'min:max' }))) self.assertEqual( 0, len( catalog.searchResults(expires={ 'query': (now - 1, now + 1), 'range': 'minmax' }))) self.assertEqual( 1, len( catalog.searchResults(expires={ 'query': now - 3, 'range': 'min' }))) self.assertEqual( 0, len( catalog.searchResults(expires={ 'query': now - 3, 'range': 'max' }))) self.assertEqual( 0, len( catalog.searchResults(expires={ 'query': now + 3, 'range': 'min' }))) self.assertEqual( 1, len( catalog.searchResults(expires={ 'query': now + 3, 'range': 'max' }))) self.assertEqual( 1, len( catalog.searchResults(expires={ 'query': (now - 3, now + 3), 'range': 'min:max' }))) self.assertEqual( 1, len( catalog.searchResults(expires={ 'query': (now - 3, now + 3), 'range': 'minmax' }))) self.assertEqual( 1, len( catalog.searchResults(effective={ 'query': now - 1, 'range': 'max' }, expires={ 'query': now + 1, 'range': 'min' }))) self.assertEqual( 0, len( catalog.searchResults(effective={ 'query': now + 1, 'range': 'max' }, expires={ 'query': now + 3, 'range': 'min' })))
def workflow_script_dispatch(self): """ dispatch order """ self.setDateDispatched(DateTime())
def current_date(self): """ return current date """ return DateTime()
def __call__(self): year = int( self.request.form.get('year_viralloadstatistics', DateTime().year())) month = int(self.request.form.get('month_viralloadstatistics', "1")) category_uid = self.request.form.get('CategoryUID', None) if not category_uid: return logger.warn("year:{}".format(year)) logger.warn("month:{}".format(month)) last_day = calendar.monthrange(year, month)[1] date_from = "{}-{}-01".format(year, month) date_to = "{}-{}-{}".format(year, month, last_day) date_from = api.to_date(date_from, DateTime()) date_to = api.to_date(date_to, DateTime()) this_dir = os.path.dirname(os.path.abspath(__file__)) templates_dir = os.path.join(this_dir, 'excel_files') wb_path = '/'.join([templates_dir, XLS_TEMPLATE]) self.workbook = load_workbook(wb_path) # Fill statistics' header sheet ws_stats = self.workbook.get_sheet_by_name(SHEET_STATISTICS) month_name = datetime.date(year, month, 1).strftime("%B") month_name = month_name.upper() ws_stats["C2"] = "REPORTING MONTH/YEAR: {}/{}".format(month_name, year) query = { 'getCategoryUID': category_uid, 'getDatePublished': { 'query': [date_from, date_to], 'range': 'min:max' }, 'review_state': ['verified', 'published'], 'cancellation_state': 'active', 'sort_on': 'getClientTitle', 'sort_order': 'ascending' } catalog = api.get_tool(CATALOG_ANALYSIS_LISTING) for analysis_brain in catalog(query): patient_brain = self.get_patient_brain(analysis_brain) if not patient_brain: continue self.render_statistics_row(analysis_brain) # Fill statistics sheet row_num_start = 6 row_num = row_num_start # start on row number 6 (headers before) provinces_dict = self.cells.get(SHEET_STATISTICS, dict()) provinces = provinces_dict.keys() provinces.sort() for province in provinces: logger.warn("Province: {}".format(province)) districts_dict = provinces_dict.get(province) districts = districts_dict.keys() districts.sort() for district in districts: logger.warn(" District: {}".format(district)) clients = districts_dict.get(district) for client_uid, row in clients.items(): logger.warn(" Client: {}".format(client_uid)) for column, cell_value in row.items(): cell_id = '{}{}'.format(column, row_num) logger.warn(" {}: {}".format(cell_id, cell_value)) ws_stats[cell_id] = cell_value cell_id = 'A{}'.format(row_num) ws_stats[cell_id] = row_num - row_num_start + 1 row_num += 1 # Save the file in memory return save_in_memory_and_return(self.workbook)
def _deserialize(kls, data): return DateTime(data)
def _range_request_handler(self, REQUEST, RESPONSE): RESPONSE.setHeader("Content-Type", "application/octet-stream") # HTTP Range header handling: return True if we've served a range # chunk out of our data. # convert ranges from bytes to array indices slice_index = REQUEST.get('slice_index', None) if slice_index is not None: slice_index_list = [] for index in slice_index: slice_index_list.append(slice(index.get('start'), index.get('stop'), index.get('step'))) list_index = REQUEST.get('list_index', None) if list_index is not None: RESPONSE.write(self.getArray()[tuple(slice_index_list)][list_index].tobytes()) else: RESPONSE.write(self.getArray()[tuple(slice_index_list)].tobytes()) return True range = REQUEST.get_header('Range', None) request_range = REQUEST.get_header('Request-Range', None) if request_range is not None: # Netscape 2 through 4 and MSIE 3 implement a draft version # Later on, we need to serve a different mime-type as well. range = request_range if_range = REQUEST.get_header('If-Range', None) if range is not None: ranges = HTTPRangeSupport.parseRange(range) array = self.getArray() factor = array.nbytes / array.shape[0] if if_range is not None: # Only send ranges if the data isn't modified, otherwise send # the whole object. Support both ETags and Last-Modified dates! if len(if_range) > 1 and if_range[:2] == 'ts': # ETag: if if_range != self.http__etag(): # Modified, so send a normal response. We delete # the ranges, which causes us to skip to the 200 # response. ranges = None else: # Date date = if_range.split( ';')[0] try: mod_since=long(DateTime(date).timeTime()) except: mod_since=None if mod_since is not None: last_mod = self._data_mtime() if last_mod is None: last_mod = 0 last_mod = long(last_mod) if last_mod > mod_since: # Modified, so send a normal response. We delete # the ranges, which causes us to skip to the 200 # response. ranges = None if ranges: # Search for satisfiable ranges. satisfiable = 0 for start, end in ranges: if start < self.getSize(): satisfiable = 1 break if not satisfiable: RESPONSE.setHeader('Content-Range', 'bytes */%d' % self.getSize()) RESPONSE.setHeader('Accept-Ranges', 'bytes') RESPONSE.setHeader('Last-Modified', rfc1123_date(self._data_mtime())) RESPONSE.setHeader('Content-Type', self.content_type) RESPONSE.setHeader('Content-Length', self.getSize()) RESPONSE.setStatus(416) return True ranges = HTTPRangeSupport.expandRanges(ranges, self.getSize()) if len(ranges) == 1: # Easy case, set extra header and return partial set. start, end = ranges[0] size = end - start RESPONSE.setHeader('Last-Modified', rfc1123_date(self._data_mtime())) RESPONSE.setHeader('Content-Type', self.content_type) RESPONSE.setHeader('Content-Length', size) RESPONSE.setHeader('Accept-Ranges', 'bytes') RESPONSE.setHeader('Content-Range', 'bytes %d-%d/%d' % (start, end - 1, self.getSize())) RESPONSE.setStatus(206) # Partial content # convert ranges from bytes to array indices RESPONSE.write(array[start/factor:end/factor].tobytes()) else: boundary = choose_boundary() # Calculate the content length size = (8 + len(boundary) + # End marker length len(ranges) * ( # Constant lenght per set 49 + len(boundary) + len(self.content_type) + len('%d' % self.getSize()))) for start, end in ranges: # Variable length per set size = (size + len('%d%d' % (start, end - 1)) + end - start) # Some clients implement an earlier draft of the spec, they # will only accept x-byteranges. draftprefix = (request_range is not None) and 'x-' or '' RESPONSE.setHeader('Content-Length', size) RESPONSE.setHeader('Accept-Ranges', 'bytes') RESPONSE.setHeader('Last-Modified', rfc1123_date(self._data_mtime())) RESPONSE.setHeader('Content-Type', 'multipart/%sbyteranges; boundary=%s' % ( draftprefix, boundary)) RESPONSE.setStatus(206) # Partial content for start, end in ranges: RESPONSE.write('\r\n--%s\r\n' % boundary) RESPONSE.write('Content-Type: %s\r\n' % self.content_type) RESPONSE.write( 'Content-Range: bytes %d-%d/%d\r\n\r\n' % ( start, end - 1, self.getSize())) # convert ranges from bytes to array indices RESPONSE.write(array[start/factor:end/factor].tobytes()) RESPONSE.write('\r\n--%s--\r\n' % boundary) return True
from DateTime import DateTime from Products.ERP5Type.DateUtils import addToDate from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery portal_catalog = context.getPortalObject().portal_catalog # search for dates older than one minute ago # Only stop data ingestion of related resource is configured for batch # ingestion old_start_date = addToDate(DateTime(), {'minute': -1}) start_date_query = Query(**{ 'delivery.start_date': old_start_date, 'range': 'ngt' }) kw_dict = { "query": start_date_query, "portal_type": "Data Ingestion", "simulation_state": "started", "use_relative_url": "use/big_data/ingestion/batch" } parent_uid_list = [x.getUid() for x in portal_catalog(**kw_dict)] if len(parent_uid_list) != 0: kw_dict = { "portal_type": "Data Ingestion Line", "stock.quantity": '!=0', "resource_portal_type": "Data Product", "parent_uid": parent_uid_list }
def test_01_delivery_mode_on_sale_packing_list_and_invoice( self,quiet=0,run=run_all_test): """ tests that when the sale packing list is divergent on the quantity and that the resource on simulation movements is different from the price currency of the source section, source_asset_price is updated as we solve the divergence and accept the decision """ if not run: return if not quiet: printAndLog( 'test_01_delivery_mode_on_sale_packing_list_and_invoice') resource = self.portal.product_module.newContent( portal_type='Product', title='Resource', product_line='apparel') currency = self.portal.currency_module.newContent( portal_type='Currency', title='euro') currency.setBaseUnitQuantity(0.01) new_currency = \ self.portal.currency_module.newContent(portal_type='Currency') new_currency.setReference('XOF') new_currency.setTitle('Francs CFA') new_currency.setBaseUnitQuantity(1.00) self.tic()#execute transaction x_curr_ex_line = currency.newContent( portal_type='Currency Exchange Line', price_currency=new_currency.getRelativeUrl()) x_curr_ex_line.setTitle('Euro to Francs CFA') x_curr_ex_line.setBasePrice(655.957) x_curr_ex_line.setStartDate(DateTime(2008,10,21)) x_curr_ex_line.setStopDate(DateTime(2008,10,22)) x_curr_ex_line.validate() self.createBusinessProcess(currency) self.tic()#execute transaction client = self.portal.organisation_module.newContent( portal_type='Organisation', title='Client', price_currency=new_currency.getRelativeUrl(), default_address_region=self.default_region) vendor = self.portal.organisation_module.newContent( portal_type='Organisation', title='Vendor', default_address_region=self.default_region) order = self.portal.sale_order_module.newContent( portal_type='Sale Order', source_value=vendor, source_section_value=vendor, destination_value=client, destination_section_value=client, start_date=DateTime(2008,10, 21), price_currency_value=currency, delivery_mode=self.mail_delivery_mode, incoterm=self.cpt_incoterm, specialise_value=self.business_process, title='Order') order.newContent(portal_type='Sale Order Line', resource_value=resource, quantity=5, price=2) order.confirm() self.tic() self.buildPackingLists() related_packing_list = order.getCausalityRelatedValue( portal_type='Sale Packing List') self.assertNotEquals(related_packing_list, None) self.assertEqual(related_packing_list.getDeliveryMode(), order.getDeliveryMode()) self.assertEqual(related_packing_list.getIncoterm(), order.getIncoterm()) related_packing_list.start() related_packing_list.stop() self.tic() self.buildInvoices() related_invoice = related_packing_list.getCausalityRelatedValue( portal_type='Sale Invoice Transaction') self.assertNotEquals(related_invoice, None) self.assertEqual(related_invoice.getDeliveryMode(), order.getDeliveryMode()) self.assertEqual(related_invoice.getIncoterm(), order.getIncoterm())
def generateMovementListForStockOptimisation(self, group_by_node=1, **kw): now = DateTime() movement_list = [] for attribute, method in [('node_uid', 'getDestinationUid'), ('section_uid', 'getDestinationSectionUid')]: if getattr(self, method)() not in ("", None): kw[attribute] = getattr(self, method)() # We have to check the inventory for each stock movement date. # Inventory can be negative in some date, and positive in futur !! # This must be done by subclassing OrderBuilder with a new inventory # algorithm. inventory_kw = kw.copy() inventory_kw.setdefault('group_by_variation', 1) inventory_kw.setdefault('group_by_resource', 1) inventory_kw.setdefault('group_by_section', 0) sql_list = self.portal_simulation.getFutureInventoryList( group_by_node=group_by_node, **inventory_kw) # min_flow and max_delay are stored on a supply line. By default # we can get them through a method having the right supply type prefix # like getPurchaseSupplyLineMinFlow. So we need to guess the supply prefix supply_prefix = '' delivery_type = self.getDeliveryPortalType() portal = self.getPortalObject() if delivery_type in portal.getPortalPurchaseTypeList(): supply_prefix = 'purchase' elif delivery_type in portal.getPortalSaleTypeList(): supply_prefix = 'sale' else: supply_prefix = 'internal' resource_portal_type_list = self.getResourcePortalTypeList() def newMovement(inventory_item, resource): # Create temporary movement movement = self.getPortalObject().portal_trash.newContent( portal_type="Movement", temp_object=True) resource_portal_type = resource.getPortalType() assert resource_portal_type in resource_portal_type_list, \ "Builder %r does not support resource of type : %r" % ( self.getRelativeUrl(), resource_portal_type) movement.edit( resource=inventory_item.resource_relative_url, # XXX FIXME define on a supply line # quantity_unit quantity_unit=resource.getQuantityUnit(), destination_value=self.getDestinationValue(), resource_portal_type=resource_portal_type, destination_section_value=self.getDestinationSectionValue()) # define variation after resource is set movement.edit(variation_text=inventory_item.variation_text) return movement for inventory_item in sql_list: if (inventory_item.inventory is not None): resource = portal.portal_catalog.getObject( inventory_item.resource_uid) # Get min_flow, max_delay on supply line min_flow = 0 max_delay = 0 min_stock = 0 if supply_prefix: min_flow = resource.getProperty( supply_prefix + '_supply_line_min_flow', 0) max_delay = resource.getProperty( supply_prefix + '_supply_line_max_delay', 0) min_stock = resource.getProperty( supply_prefix + '_supply_line_min_stock', 0) if round(inventory_item.inventory, 5) < min_stock: stop_date = resource.getNextAlertInventoryDate( reference_quantity=min_stock, variation_text=inventory_item.variation_text, from_date=now, group_by_node=group_by_node, **kw) if stop_date is None: stop_date = now movement = newMovement(inventory_item, resource) movement.edit( start_date=stop_date - max_delay, stop_date=stop_date, quantity=max(min_flow, -inventory_item.inventory), ) movement_list.append(movement) # We could need to cancel automated stock optimization if for some reasons # previous optimisations are obsolete elif round(inventory_item.inventory, 5) > min_stock: delta = inventory_item.inventory - min_stock node_uid = inventory_item.node_uid # if node_uid is provided, we have to look at all provided nodes if kw.has_key('node_uid'): node_uid = kw['node_uid'] optimized_kw = {} if kw.get('group_by_variation', 1): optimized_kw[ 'variation_text'] = inventory_item.variation_text optimized_inventory_list = portal.portal_simulation.getInventoryList( resource_uid=inventory_item.resource_uid, node_uid=node_uid, simulation_state="auto_planned", sort_on=[("date", "descending")], group_by_node=group_by_node, **optimized_kw) for optimized_inventory in optimized_inventory_list: movement = newMovement(inventory_item, resource) quantity = min(delta, optimized_inventory.inventory) delta = delta - quantity movement.edit(start_date=optimized_inventory.date, quantity=-quantity) movement_list.append(movement) if delta <= 0: break return movement_list
def test_01_source_total_asset_price_on_accounting_lines(self,quiet=0, run=run_all_test): """ tests that the delivery builder of the invoice transaction lines copies the source asset price on the accounting_lines of the invoice """ if not run: return if not quiet: printAndLog( 'test_01_source_total_asset_price_on_accounting_lines') resource = self.portal.product_module.newContent( portal_type='Product', title='Resource', product_line='apparel') currency = self.portal.currency_module.newContent( portal_type='Currency', title='euro') currency.setBaseUnitQuantity(0.01) new_currency = \ self.portal.currency_module.newContent(portal_type='Currency') new_currency.setReference('XOF') new_currency.setTitle('Francs CFA') new_currency.setBaseUnitQuantity(1.00) self.tic()#execute transaction x_curr_ex_line = currency.newContent( portal_type='Currency Exchange Line', price_currency=new_currency.getRelativeUrl()) x_curr_ex_line.setTitle('Euro to Francs CFA') x_curr_ex_line.setBasePrice(655.957) x_curr_ex_line.setStartDate(DateTime(2008,10,21)) x_curr_ex_line.setStopDate(DateTime(2008,10,22)) x_curr_ex_line.validate() self.createBusinessProcess(currency) self.tic()#execute transaction client = self.portal.organisation_module.newContent( portal_type='Organisation', title='Client', default_address_region=self.default_region) vendor = self.portal.organisation_module.newContent( portal_type='Organisation', title='Vendor', price_currency=new_currency.getRelativeUrl(), default_address_region=self.default_region) order = self.portal.sale_order_module.newContent( portal_type='Sale Order', source_value=vendor, source_section_value=vendor, destination_value=client, destination_section_value=client, start_date=DateTime(2008,10, 21), price_currency_value=currency, specialise_value=self.business_process, title='Order') order.newContent(portal_type='Sale Order Line', resource_value=resource, quantity=1, price=2) order.confirm() self.tic() self.buildPackingLists() related_packing_list = order.getCausalityRelatedValue( portal_type='Sale Packing List') self.assertNotEquals(related_packing_list, None) related_packing_list.start() related_packing_list.stop() self.tic() self.buildInvoices() related_invoice = related_packing_list.getCausalityRelatedValue( portal_type='Sale Invoice Transaction') self.assertNotEquals(related_invoice, None) related_invoice.start() self.tic() line_list= related_invoice.contentValues( portal_type=self.portal.getPortalAccountingMovementTypeList()) self.assertNotEquals(line_list, None) result_list = [] for line in line_list: result_list.append((line.getSource(), line.getSourceTotalAssetPrice())) self.assertEqual(line.getDestinationTotalAssetPrice(), None) self.assertEquals( sorted(result_list), sorted([ ('account_module/customer', round(2*(1+0.196)*655.957)), ('account_module/receivable_vat', round(-2*0.196*655.957)), ('account_module/sale', round(-2*655.957 )) ]) ) self.assertEqual(len(related_invoice.checkConsistency()), 0)
def test_01_quantity_unit_on_sale_packing_list( self,quiet=0,run=run_all_test): """ tests that when a resource uses different quantity unit that the """ if not run: return if not quiet: printAndLog( 'test_01_quantity_unit_on_sale_packing_list') resource = self.portal.product_module.newContent( portal_type='Product', title='Resource', product_line='apparel') resource.setQuantityUnitList([self.unit_piece_quantity_unit, self.mass_quantity_unit]) currency = self.portal.currency_module.newContent( portal_type='Currency', title='euro') currency.setBaseUnitQuantity(0.01) self.createBusinessProcess(currency) self.tic()#execute transaction client = self.portal.organisation_module.newContent( portal_type='Organisation', title='Client', default_address_region=self.default_region) vendor = self.portal.organisation_module.newContent( portal_type='Organisation', title='Vendor', default_address_region=self.default_region) order = self.portal.sale_order_module.newContent( portal_type='Sale Order', source_value=vendor, source_section_value=vendor, destination_value=client, destination_section_value=client, start_date=DateTime(2008,10, 21), price_currency_value=currency, delivery_mode=self.mail_delivery_mode, incoterm=self.cpt_incoterm, specialise_value=self.business_process, title='Order') first_order_line = order.newContent( portal_type='Sale Order Line', resource_value=resource, quantity_unit = self.unit_piece_quantity_unit, quantity=5, price=3) second_order_line = order.newContent( portal_type='Sale Order Line', resource_value=resource, quantity_unit=self.mass_quantity_unit, quantity=1.5, price=2) order.confirm() self.tic() self.buildPackingLists() related_packing_list = order.getCausalityRelatedValue( portal_type='Sale Packing List') self.assertNotEquals(related_packing_list, None) movement_list = related_packing_list.getMovementList() movement_list.sort(key=lambda x:x.getCausalityId()) self.assertEqual(len(movement_list),2) self.assertEqual(movement_list[0].getQuantityUnit(), first_order_line.getQuantityUnit()) self.assertEqual(movement_list[1].getQuantityUnit(), second_order_line.getQuantityUnit())
def folderitems(self): rc = getToolByName(self.context, REFERENCE_CATALOG) bsc = getToolByName(self.context, 'bika_setup_catalog') workflow = getToolByName(self.context, 'portal_workflow') mtool = getToolByName(self.context, 'portal_membership') checkPermission = mtool.checkPermission if not self.allow_edit: can_edit_analyses = False else: if self.contentFilter.get('getPointOfCapture', '') == 'field': can_edit_analyses = checkPermission(EditFieldResults, self.context) else: can_edit_analyses = checkPermission(EditResults, self.context) self.allow_edit = can_edit_analyses self.show_select_column = self.allow_edit context_active = isActive(self.context) self.categories = [] items = super(AnalysesView, self).folderitems(full_objects = True) # manually skim retracted analyses from the list new_items = [] for i,item in enumerate(items): # self.contentsMethod may return brains or objects. if not ('obj' in items[i]): continue obj = hasattr(items[i]['obj'], 'getObject') and \ items[i]['obj'].getObject() or \ items[i]['obj'] if workflow.getInfoFor(obj, 'review_state') == 'retracted' \ and not checkPermission(ViewRetractedAnalyses, self.context): continue new_items.append(item) items = new_items methods = self.get_methods_vocabulary() self.interim_fields = {} self.interim_columns = {} self.specs = {} show_methodinstr_columns = False for i, item in enumerate(items): # self.contentsMethod may return brains or objects. obj = hasattr(items[i]['obj'], 'getObject') and \ items[i]['obj'].getObject() or \ items[i]['obj'] if workflow.getInfoFor(obj, 'review_state') == 'retracted' \ and not checkPermission(ViewRetractedAnalyses, self.context): continue result = obj.getResult() service = obj.getService() calculation = service.getCalculation() unit = service.getUnit() keyword = service.getKeyword() if self.show_categories: cat = obj.getService().getCategoryTitle() items[i]['category'] = cat if cat not in self.categories: self.categories.append(cat) # Check for InterimFields attribute on our object, interim_fields = hasattr(obj, 'getInterimFields') \ and obj.getInterimFields() or [] # kick some pretty display values in. for x in range(len(interim_fields)): interim_fields[x]['formatted_value'] = \ format_numeric_result(obj, interim_fields[x]['value']) self.interim_fields[obj.UID()] = interim_fields items[i]['service_uid'] = service.UID() items[i]['Service'] = service.Title() items[i]['Keyword'] = keyword items[i]['Unit'] = format_supsub(unit) if unit else '' items[i]['Result'] = '' items[i]['formatted_result'] = '' items[i]['interim_fields'] = interim_fields items[i]['Remarks'] = obj.getRemarks() items[i]['Uncertainty'] = '' items[i]['DetectionLimit'] = '' items[i]['retested'] = obj.getRetested() items[i]['class']['retested'] = 'center' items[i]['result_captured'] = self.ulocalized_time( obj.getResultCaptureDate(), long_format=0) items[i]['calculation'] = calculation and True or False try: items[i]['Partition'] = obj.getSamplePartition().getId() except AttributeError: items[i]['Partition'] = '' if obj.portal_type == "ReferenceAnalysis": items[i]['DueDate'] = self.ulocalized_time(obj.aq_parent.getExpiryDate(), long_format=0) else: items[i]['DueDate'] = self.ulocalized_time(obj.getDueDate(), long_format=1) cd = obj.getResultCaptureDate() items[i]['CaptureDate'] = cd and self.ulocalized_time(cd, long_format=1) or '' items[i]['Attachments'] = '' item['allow_edit'] = [] client_or_lab = "" tblrowclass = items[i].get('table_row_class'); if obj.portal_type == 'ReferenceAnalysis': items[i]['st_uid'] = obj.aq_parent.UID() items[i]['table_row_class'] = ' '.join([tblrowclass, 'qc-analysis']); elif obj.portal_type == 'DuplicateAnalysis' and \ obj.getAnalysis().portal_type == 'ReferenceAnalysis': items[i]['st_uid'] = obj.aq_parent.UID() items[i]['table_row_class'] = ' '.join([tblrowclass, 'qc-analysis']); else: sample = None if self.context.portal_type == 'AnalysisRequest': sample = self.context.getSample() elif self.context.portal_type == 'Worksheet': if obj.portal_type in ('DuplicateAnalysis', 'RejectAnalysis'): sample = obj.getAnalysis().getSample() else: sample = obj.aq_parent.getSample() elif self.context.portal_type == 'Sample': sample = self.context st_uid = sample.getSampleType().UID() if sample else '' items[i]['st_uid'] = st_uid if checkPermission(ManageBika, self.context): service_uid = service.UID() latest = rc.lookupObject(service_uid).version_id items[i]['Service'] = service.Title() items[i]['class']['Service'] = "service_title" # Show version number of out-of-date objects # No: This should be done in another column, if at all. # The (vX) value confuses some more fragile forms. # if hasattr(obj, 'reference_versions') and \ # service_uid in obj.reference_versions and \ # latest != obj.reference_versions[service_uid]: # items[i]['after']['Service'] = "(v%s)" % \ # (obj.reference_versions[service_uid]) # choices defined on Service apply to result fields. choices = service.getResultOptions() if choices: item['choices']['Result'] = choices # permission to view this item's results can_view_result = \ getSecurityManager().checkPermission(ViewResults, obj) # permission to edit this item's results # Editing Field Results is possible while in Sample Due. poc = self.contentFilter.get("getPointOfCapture", 'lab') can_edit_analysis = self.allow_edit and context_active and \ ( (poc == 'field' and getSecurityManager().checkPermission(EditFieldResults, obj)) or (poc != 'field' and getSecurityManager().checkPermission(EditResults, obj)) ) allowed_method_states = ['to_be_sampled', 'to_be_preserved', 'sample_received', 'sample_registered', 'sampled', 'assigned'] # Prevent from being edited if the instrument assigned # is not valid (out-of-date or uncalibrated), except if # the analysis is a QC with assigned status can_edit_analysis = can_edit_analysis \ and (obj.isInstrumentValid() \ or (obj.portal_type == 'ReferenceAnalysis' \ and item['review_state'] in allowed_method_states)) if can_edit_analysis: items[i]['allow_edit'].extend(['Analyst', 'Result', 'Remarks']) # if the Result field is editable, our interim fields are too for f in self.interim_fields[obj.UID()]: items[i]['allow_edit'].append(f['keyword']) # if there isn't a calculation then result must be re-testable, # and if there are interim fields, they too must be re-testable. if not items[i]['calculation'] or \ (items[i]['calculation'] and self.interim_fields[obj.UID()]): items[i]['allow_edit'].append('retested') # TODO: Only the labmanager must be able to change the method # can_set_method = getSecurityManager().checkPermission(SetAnalysisMethod, obj) can_set_method = can_edit_analysis \ and item['review_state'] in allowed_method_states method = obj.getMethod() \ if hasattr(obj, 'getMethod') and obj.getMethod() \ else service.getMethod() # Display the methods selector if the AS has at least one # method assigned item['Method'] = '' item['replace']['Method'] = '' if can_set_method: voc = self.get_methods_vocabulary(obj) if voc: # The service has at least one method available item['Method'] = method.UID() if method else '' item['choices']['Method'] = voc item['allow_edit'].append('Method') show_methodinstr_columns = True elif method: # This should never happen # The analysis has set a method, but its parent # service hasn't any method available O_o item['Method'] = method.Title() item['replace']['Method'] = "<a href='%s'>%s</a>" % \ (method.absolute_url(), method.Title()) show_methodinstr_columns = True elif method: # Edition not allowed, but method set item['Method'] = method.Title() item['replace']['Method'] = "<a href='%s'>%s</a>" % \ (method.absolute_url(), method.Title()) show_methodinstr_columns = True # TODO: Instrument selector dynamic behavior in worksheet Results # Only the labmanager must be able to change the instrument to be used. Also, # the instrument selection should be done in accordance with the method selected # can_set_instrument = service.getInstrumentEntryOfResults() and getSecurityManager().checkPermission(SetAnalysisInstrument, obj) can_set_instrument = service.getInstrumentEntryOfResults() \ and can_edit_analysis \ and item['review_state'] in allowed_method_states item['Instrument'] = '' item['replace']['Instrument'] = '' if service.getInstrumentEntryOfResults(): instrument = None # If the analysis has an instrument already assigned, use it if service.getInstrumentEntryOfResults() \ and hasattr(obj, 'getInstrument') \ and obj.getInstrument(): instrument = obj.getInstrument() # Otherwise, use the Service's default instrument elif service.getInstrumentEntryOfResults(): instrument = service.getInstrument() if can_set_instrument: # Edition allowed voc = self.get_instruments_vocabulary(obj) if voc: # The service has at least one instrument available item['Instrument'] = instrument.UID() if instrument else '' item['choices']['Instrument'] = voc item['allow_edit'].append('Instrument') show_methodinstr_columns = True elif instrument: # This should never happen # The analysis has an instrument set, but the # service hasn't any available instrument item['Instrument'] = instrument.Title() item['replace']['Instrument'] = "<a href='%s'>%s</a>" % \ (instrument.absolute_url(), instrument.Title()) show_methodinstr_columns = True elif instrument: # Edition not allowed, but instrument set item['Instrument'] = instrument.Title() item['replace']['Instrument'] = "<a href='%s'>%s</a>" % \ (instrument.absolute_url(), instrument.Title()) show_methodinstr_columns = True else: # Manual entry of results, instrument not allowed item['Instrument'] = _('Manual') msgtitle = t(_( "Instrument entry of results not allowed for ${service}", mapping={"service": safe_unicode(service.Title())}, )) item['replace']['Instrument'] = \ '<a href="#" title="%s">%s</a>' % (msgtitle, t(_('Manual'))) # Sets the analyst assigned to this analysis if can_edit_analysis: analyst = obj.getAnalyst() # widget default: current user if not analyst: analyst = mtool.getAuthenticatedMember().getUserName() items[i]['Analyst'] = analyst item['choices']['Analyst'] = self.getAnalysts() else: items[i]['Analyst'] = obj.getAnalystName() # If the user can attach files to analyses, show the attachment col can_add_attachment = \ getSecurityManager().checkPermission(AddAttachment, obj) if can_add_attachment or can_view_result: attachments = "" if hasattr(obj, 'getAttachment'): for attachment in obj.getAttachment(): af = attachment.getAttachmentFile() icon = af.getBestIcon() attachments += "<span class='attachment' attachment_uid='%s'>" % (attachment.UID()) if icon: attachments += "<img src='%s/%s'/>" % (self.portal_url, icon) attachments += '<a href="%s/at_download/AttachmentFile"/>%s</a>' % (attachment.absolute_url(), af.filename) if can_edit_analysis: attachments += "<img class='deleteAttachmentButton' attachment_uid='%s' src='%s'/>" % (attachment.UID(), "++resource++bika.lims.images/delete.png") attachments += "</br></span>" items[i]['replace']['Attachments'] = attachments[:-12] + "</span>" # Only display data bearing fields if we have ViewResults # permission, otherwise just put an icon in Result column. if can_view_result: items[i]['Result'] = result scinot = self.context.bika_setup.getScientificNotationResults() dmk = self.context.bika_setup.getResultsDecimalMark() items[i]['formatted_result'] = obj.getFormattedResult(sciformat=int(scinot),decimalmark=dmk) # LIMS-1379 Allow manual uncertainty value input # https://jira.bikalabs.com/browse/LIMS-1379 fu = format_uncertainty(obj, result, decimalmark=dmk, sciformat=int(scinot)) fu = fu if fu else '' if can_edit_analysis and service.getAllowManualUncertainty() == True: unc = obj.getUncertainty(result) item['allow_edit'].append('Uncertainty') items[i]['Uncertainty'] = unc if unc else '' items[i]['before']['Uncertainty'] = '± '; items[i]['after']['Uncertainty'] = '<em class="discreet" style="white-space:nowrap;"> %s</em>' % items[i]['Unit']; elif fu: items[i]['Uncertainty'] = fu items[i]['before']['Uncertainty'] = '± '; items[i]['after']['Uncertainty'] = '<em class="discreet" style="white-space:nowrap;"> %s</em>' % items[i]['Unit']; # LIMS-1700. Allow manual input of Detection Limits # LIMS-1775. Allow to select LDL or UDL defaults in results with readonly mode # https://jira.bikalabs.com/browse/LIMS-1700 # https://jira.bikalabs.com/browse/LIMS-1775 if can_edit_analysis and \ hasattr(obj, 'getDetectionLimitOperand') and \ hasattr(service, 'getDetectionLimitSelector') and \ service.getDetectionLimitSelector() == True: isldl = obj.isBelowLowerDetectionLimit() isudl = obj.isAboveUpperDetectionLimit() dlval='' if isldl or isudl: dlval = '<' if isldl else '>' item['allow_edit'].append('DetectionLimit') item['DetectionLimit'] = dlval choices=[{'ResultValue': '<', 'ResultText': '<'}, {'ResultValue': '>', 'ResultText': '>'}] item['choices']['DetectionLimit'] = choices self.columns['DetectionLimit']['toggle'] = True srv = obj.getService() defdls = {'min':srv.getLowerDetectionLimit(), 'max':srv.getUpperDetectionLimit(), 'manual':srv.getAllowManualDetectionLimit()} defin = '<input type="hidden" id="DefaultDLS.%s" value=\'%s\'/>' defin = defin % (obj.UID(), json.dumps(defdls)) item['after']['DetectionLimit'] = defin # LIMS-1769. Allow to use LDL and UDL in calculations. # https://jira.bikalabs.com/browse/LIMS-1769 # Since LDL, UDL, etc. are wildcards that can be used # in calculations, these fields must be loaded always # for 'live' calculations. if can_edit_analysis: dls = {'default_ldl': 'none', 'default_udl': 'none', 'below_ldl': False, 'above_udl': False, 'is_ldl': False, 'is_udl': False, 'manual_allowed': False, 'dlselect_allowed': False} if hasattr(obj, 'getDetectionLimits'): dls['below_ldl'] = obj.isBelowLowerDetectionLimit() dls['above_udl'] = obj.isBelowLowerDetectionLimit() dls['is_ldl'] = obj.isLowerDetectionLimit() dls['is_udl'] = obj.isUpperDetectionLimit() dls['default_ldl'] = service.getLowerDetectionLimit() dls['default_udl'] = service.getUpperDetectionLimit() dls['manual_allowed'] = service.getAllowManualDetectionLimit() dls['dlselect_allowed'] = service.getDetectionLimitSelector() dlsin = '<input type="hidden" id="AnalysisDLS.%s" value=\'%s\'/>' dlsin = dlsin % (obj.UID(), json.dumps(dls)) item['after']['Result'] = dlsin else: items[i]['Specification'] = "" if 'Result' in items[i]['allow_edit']: items[i]['allow_edit'].remove('Result') items[i]['before']['Result'] = \ '<img width="16" height="16" ' + \ 'src="%s/++resource++bika.lims.images/to_follow.png"/>' % \ (self.portal_url) # Everyone can see valid-ranges spec = self.get_analysis_spec(obj) if spec: min_val = spec.get('min', '') min_str = ">{0}".format(min_val) if min_val else '' max_val = spec.get('max', '') max_str = "<{0}".format(max_val) if max_val else '' error_val = spec.get('error', '') error_str = "{0}%".format(error_val) if error_val else '' rngstr = ",".join([x for x in [min_str, max_str, error_str] if x]) else: rngstr = "" items[i]['Specification'] = rngstr # Add this analysis' interim fields to the interim_columns list for f in self.interim_fields[obj.UID()]: if f['keyword'] not in self.interim_columns and not f.get('hidden', False): self.interim_columns[f['keyword']] = f['title'] # and to the item itself items[i][f['keyword']] = f items[i]['class'][f['keyword']] = 'interim' # check if this analysis is late/overdue resultdate = obj.aq_parent.getDateSampled() \ if obj.portal_type == 'ReferenceAnalysis' \ else obj.getResultCaptureDate() duedate = obj.aq_parent.getExpiryDate() \ if obj.portal_type == 'ReferenceAnalysis' \ else obj.getDueDate() items[i]['replace']['DueDate'] = \ self.ulocalized_time(duedate, long_format=1) if items[i]['review_state'] not in ['to_be_sampled', 'to_be_preserved', 'sample_due', 'published']: if (resultdate and resultdate > duedate) \ or (not resultdate and DateTime() > duedate): items[i]['replace']['DueDate'] = '%s <img width="16" height="16" src="%s/++resource++bika.lims.images/late.png" title="%s"/>' % \ (self.ulocalized_time(duedate, long_format=1), self.portal_url, t(_("Late Analysis"))) # Submitting user may not verify results (admin can though) if items[i]['review_state'] == 'to_be_verified' and \ not checkPermission(VerifyOwnResults, obj): user_id = getSecurityManager().getUser().getId() self_submitted = False try: review_history = list(workflow.getInfoFor(obj, 'review_history')) review_history.reverse() for event in review_history: if event.get('action') == 'submit': if event.get('actor') == user_id: self_submitted = True break if self_submitted: items[i]['after']['state_title'] = \ "<img src='++resource++bika.lims.images/submitted-by-current-user.png' title='%s'/>" % \ (t(_("Cannot verify: Submitted by current user"))) except WorkflowException: pass # add icon for assigned analyses in AR views if self.context.portal_type == 'AnalysisRequest': obj = items[i]['obj'] if obj.portal_type in ['ReferenceAnalysis', 'DuplicateAnalysis'] or \ workflow.getInfoFor(obj, 'worksheetanalysis_review_state') == 'assigned': br = obj.getBackReferences('WorksheetAnalysis') if len(br) > 0: ws = br[0] items[i]['after']['state_title'] = \ "<a href='%s'><img src='++resource++bika.lims.images/worksheet.png' title='%s'/></a>" % \ (ws.absolute_url(), t(_("Assigned to: ${worksheet_id}", mapping={'worksheet_id': safe_unicode(ws.id)}))) # the TAL requires values for all interim fields on all # items, so we set blank values in unused cells for item in items: for field in self.interim_columns: if field not in item: item[field] = '' # XXX order the list of interim columns interim_keys = self.interim_columns.keys() interim_keys.reverse() # add InterimFields keys to columns for col_id in interim_keys: if col_id not in self.columns: self.columns[col_id] = {'title': self.interim_columns[col_id], 'input_width': '6', 'input_class': 'ajax_calculate numeric', 'sortable': False} if can_edit_analyses: new_states = [] for state in self.review_states: # InterimFields are displayed in review_state # They are anyway available through View.columns though. # In case of hidden fields, the calcs.py should check calcs/services # for additional InterimFields!! pos = 'Result' in state['columns'] and \ state['columns'].index('Result') or len(state['columns']) for col_id in interim_keys: if col_id not in state['columns']: state['columns'].insert(pos, col_id) # retested column is added after Result. pos = 'Result' in state['columns'] and \ state['columns'].index('Uncertainty') + 1 or len(state['columns']) state['columns'].insert(pos, 'retested') new_states.append(state) self.review_states = new_states # Allow selecting individual analyses self.show_select_column = True # Dry Matter. # The Dry Matter column is never enabled for reference sample contexts # and refers to getReportDryMatter in ARs. if items and \ (hasattr(self.context, 'getReportDryMatter') and \ self.context.getReportDryMatter()): # look through all items # if the item's Service supports ReportDryMatter, add getResultDM(). for item in items: if item['obj'].getService().getReportDryMatter(): item['ResultDM'] = item['obj'].getResultDM() else: item['ResultDM'] = '' if item['ResultDM']: item['after']['ResultDM'] = "<em class='discreet'>%</em>" # modify the review_states list to include the ResultDM column new_states = [] for state in self.review_states: pos = 'Result' in state['columns'] and \ state['columns'].index('Uncertainty') + 1 or len(state['columns']) state['columns'].insert(pos, 'ResultDM') new_states.append(state) self.review_states = new_states self.categories.sort() # self.json_specs = json.dumps(self.specs) self.json_interim_fields = json.dumps(self.interim_fields) self.items = items # Method and Instrument columns must be shown or hidden at the # same time, because the value assigned to one causes # a value reassignment to the other (one method can be performed # by different instruments) self.columns['Method']['toggle'] = show_methodinstr_columns self.columns['Instrument']['toggle'] = show_methodinstr_columns return items
def test_01_diverged_purchase_packing_list_source_total_asset_price( self,quiet=0,run=run_all_test): """ tests that when the purchase packing list is divergent on the quantity and that the resource on simulation movements is different from the price currency of the destination section, destination_asset_price is updated as we solve the divergence and accept the decision """ if not run: return if not quiet: printAndLog( 'test_01_diverged_purchase_packing_list_source_total_asset_price') resource = self.portal.product_module.newContent( portal_type='Product', title='Resource', product_line='apparel') currency = self.portal.currency_module.newContent( portal_type='Currency', title='euro') currency.setBaseUnitQuantity(0.01) new_currency = \ self.portal.currency_module.newContent(portal_type='Currency') new_currency.setReference('XOF') new_currency.setTitle('Francs CFA') new_currency.setBaseUnitQuantity(1.00) self.tic()#execute transaction x_curr_ex_line = currency.newContent( portal_type='Currency Exchange Line', price_currency=new_currency.getRelativeUrl()) x_curr_ex_line.setTitle('Euro to Francs CFA') x_curr_ex_line.setBasePrice(655.957) x_curr_ex_line.setStartDate(DateTime(2008,10,21)) x_curr_ex_line.setStopDate(DateTime(2008,10,22)) x_curr_ex_line.validate() self.createBusinessProcess(currency) self.tic()#execute transaction client = self.portal.organisation_module.newContent( portal_type='Organisation', title='Client', default_address_region=self.default_region) vendor = self.portal.organisation_module.newContent( portal_type='Organisation', title='Vendor', price_currency=new_currency.getRelativeUrl(), default_address_region=self.default_region) order = self.portal.purchase_order_module.newContent( portal_type='Purchase Order', source_value=vendor, source_section_value=vendor, destination_value=client, destination_section_value=client, start_date=DateTime(2008,10, 21), price_currency_value=currency, specialise_value=self.business_process, title='Order') order.newContent(portal_type='Purchase Order Line', resource_value=resource, quantity=5, price=2) order.confirm() self.tic() self.buildPackingLists() related_packing_list = order.getCausalityRelatedValue( portal_type='Purchase Packing List') self.assertNotEquals(related_packing_list, None) related_packing_list_line_list=related_packing_list.getMovementList() related_packing_list_line= related_packing_list_line_list[0] self.assertEqual(related_packing_list_line.getQuantity(),5.0) related_packing_list_line.edit(quantity=3.0) self.tic() self.assertEqual(related_packing_list.getCausalityState(), 'diverged') self._solveDivergence(related_packing_list, 'quantity','accept') self.tic() related_packing_list.updateCausalityState() related_packing_list.start() related_packing_list.stop() self.tic() related_applied_rule = order.getCausalityRelatedValue( portal_type='Applied Rule') order_movement = related_applied_rule.contentValues()[0] delivery_applied_rule = order_movement.contentValues()[0] delivery_movement = delivery_applied_rule.contentValues()[0] invoice_applied_rule = delivery_movement.contentValues()[0] invoice_movement = invoice_applied_rule.contentValues()[0] invoice_transaction_applied_rule = invoice_movement.contentValues()[0] result_list = [] for invoice_transaction_movement in invoice_transaction_applied_rule.contentValues(): result_list.append((invoice_transaction_movement.getSource(), invoice_transaction_movement.getSourceTotalAssetPrice())) self.assertEquals( sorted(result_list), sorted([ ('account_module/customer', 2*3*(1+0.196)*655.957), ('account_module/receivable_vat', -2*3*0.196*655.957), ('account_module/sale', -2*3*655.957 ) ]) )
class SyndicationTool(UniqueObject, SimpleItem, ActionProviderBase): """ The syndication tool manages the site-wide policy for syndication of folder content as RSS. """ __implements__ = ActionProviderBase.__implements__ id = 'portal_syndication' meta_type = 'Default Syndication Tool' _actions = (ActionInformation( id='syndication', title='Syndication', action=Expression(text='string:${folder_url}/synPropertiesForm'), condition=Expression(text='python: folder is object'), permissions=(ManageProperties, ), category='object', visible=1), ) #Default Sitewide Values isAllowed = 0 syUpdatePeriod = 'daily' syUpdateFrequency = 1 syUpdateBase = DateTime() max_items = 15 security = ClassSecurityInfo() #ZMI Methods manage_options = ( ActionProviderBase.manage_options + ({ 'label': 'Overview', 'action': 'overview', 'help': ('CMFDefault', 'Syndication-Tool_Overview.stx') }, { 'label': 'Properties', 'action': 'propertiesForm', 'help': ('CMFDefault', 'Syndication-Tool_Properties.stx') }, { 'label': 'Policies', 'action': 'policiesForm', 'help': ('CMFDefault', 'Syndication-Tool_Policies.stx') }, { 'label': 'Reports', 'action': 'reportForm', 'help': ('CMFDefault', 'Syndication-Tool_Reporting.stx') })) security.declareProtected(ManagePortal, 'overview') overview = HTMLFile('synOverview', _dtmldir) security.declareProtected(ManagePortal, 'propertiesForm') propertiesForm = HTMLFile('synProps', _dtmldir) security.declareProtected(ManagePortal, 'policiesForm') policiesForm = HTMLFile('synPolicies', _dtmldir) security.declareProtected(ManagePortal, 'reportForm') reportForm = HTMLFile('synReports', _dtmldir) security.declareProtected(ManagePortal, 'editProperties') def editProperties(self, updatePeriod=None, updateFrequency=None, updateBase=None, isAllowed=None, max_items=None, REQUEST=None): """ Edit the properties for the SystemWide defaults on the SyndicationTool. """ if isAllowed is not None: self.isAllowed = isAllowed if updatePeriod: self.syUpdatePeriod = updatePeriod else: try: del self.syUpdatePeriod except KeyError: pass if updateFrequency: self.syUpdateFrequency = updateFrequency else: try: del self.syUpdateFrequency except KeyError: pass if updateBase: if type(updateBase) is type(''): updateBase = DateTime(updateBase) self.syUpdateBase = updateBase else: try: del self.syUpdateBase except KeyError: pass if max_items: self.max_items = max_items else: try: del self.max_items except KeyError: pass if REQUEST is not None: REQUEST['RESPONSE'].redirect(self.absolute_url() + '/propertiesForm' + '?manage_tabs_message=Tool+Updated.') security.declarePublic('editSyInformationProperties') def editSyInformationProperties(self, obj, updatePeriod=None, updateFrequency=None, updateBase=None, max_items=None, REQUEST=None): """ Edit syndication properties for the obj being passed in. These are held on the syndication_information object. Not Sitewide Properties. """ if not _checkPermission(ManageProperties, obj): raise AccessControl_Unauthorized syInfo = getattr(obj, 'syndication_information', None) if syInfo is None: raise 'Syndication is Disabled' if updatePeriod: syInfo.syUpdatePeriod = updatePeriod else: syInfo.syUpdatePeriod = self.syUpdatePeriod if updateFrequency: syInfo.syUpdateFrequency = updateFrequency else: syInfo.syUpdateFrequency = self.syUpdateFrequency if updateBase: if type(updateBase) is type(''): updateBase = DateTime(updateBase) syInfo.syUpdateBase = updateBase else: syInfo.syUpdateBase = self.syUpdateBase if max_items: syInfo.max_items = max_items else: syInfo.max_items = self.max_items security.declarePublic('enableSyndication') def enableSyndication(self, obj): """ Enable syndication for the obj """ if not self.isSiteSyndicationAllowed(): raise 'Syndication is Disabled' if hasattr(aq_base(obj), 'syndication_information'): raise 'Syndication Information Exists' syInfo = SyndicationInformation() obj._setObject('syndication_information', syInfo) syInfo = obj._getOb('syndication_information') syInfo.syUpdatePeriod = self.syUpdatePeriod syInfo.syUpdateFrequency = self.syUpdateFrequency syInfo.syUpdateBase = self.syUpdateBase syInfo.max_items = self.max_items syInfo.description = "Channel Description" security.declarePublic('disableSyndication') def disableSyndication(self, obj): """ Disable syndication for the obj; and remove it. """ syInfo = getattr(obj, 'syndication_information', None) if syInfo is None: raise 'This object does not have Syndication Information' obj._delObject('syndication_information') security.declarePublic('getSyndicatableContent') def getSyndicatableContent(self, obj): """ An interface for allowing folderish items to implement an equivalent of PortalFolder.contentValues() """ if hasattr(obj, 'synContentValues'): values = obj.synContentValues() else: values = PortalFolder.contentValues(obj) return values security.declarePublic('buildUpdatePeriods') def buildUpdatePeriods(self): """ Return a list of possible update periods for the xmlns: sy """ updatePeriods = (('hourly', 'Hourly'), ('daily', 'Daily'), ('weekly', 'Weekly'), ('monthly', 'Monthly'), ('yearly', 'Yearly')) return updatePeriods security.declarePublic('isSiteSyndicationAllowed') def isSiteSyndicationAllowed(self): """ Return sitewide syndication policy """ return self.isAllowed security.declarePublic('isSyndicationAllowed') def isSyndicationAllowed(self, obj=None): """ Check whether syndication is enabled for the site. This provides for extending the method to check for whether a particular obj is enabled, allowing for turning on only specific folders for syndication. """ syInfo = getattr(aq_base(obj), 'syndication_information', None) if syInfo is None: return 0 else: return self.isSiteSyndicationAllowed() security.declarePublic('getUpdatePeriod') def getUpdatePeriod(self, obj=None): """ Return the update period for the RSS syn namespace. This is either on the object being passed or the portal_syndication tool (if a sitewide value or default is set) NOTE: Need to add checks for sitewide policies!!! """ if not self.isSiteSyndicationAllowed(): raise 'Syndication is Not Allowed' if obj is None: return self.syUpdatePeriod syInfo = getattr(obj, 'syndication_information', None) if syInfo is not None: return syInfo.syUpdatePeriod else: return 'Syndication is Not Allowed' security.declarePublic('getUpdateFrequency') def getUpdateFrequency(self, obj=None): """ Return the update frequency (as a positive integer) for the syn namespace. This is either on the object being pass or the portal_syndication tool (if a sitewide value or default is set). Note: Need to add checks for sitewide policies!!! """ if not self.isSiteSyndicationAllowed(): raise 'Syndication is not Allowed' if obj is None: return self.syUpdateFrequency syInfo = getattr(obj, 'syndication_information', None) if syInfo is not None: return syInfo.syUpdateFrequency else: return 'Syndication is not Allowed' security.declarePublic('getUpdateBase') def getUpdateBase(self, obj=None): """ Return the base date to be used with the update frequency and the update period to calculate a publishing schedule. Note: I'm not sure what's best here, creation date, last modified date (of the folder being syndicated) or some arbitrary date. For now, I'm going to build a updateBase time from zopetime and reformat it to meet the W3CDTF. Additionally, sitewide policy checks might have a place here... """ if not self.isSiteSyndicationAllowed(): raise 'Syndication is not Allowed' if obj is None: when = self.syUpdateBase return when.ISO() syInfo = getattr(obj, 'syndication_information', None) if syInfo is not None: when = syInfo.syUpdateBase return when.ISO() else: return 'Syndication is not Allowed' security.declarePublic('getHTML4UpdateBase') def getHTML4UpdateBase(self, obj): """ Return HTML4 formated UpdateBase DateTime """ if not self.isSiteSyndicationAllowed(): raise 'Syndication is not Allowed' if obj is None: when = syUpdateBase return when.HTML4() syInfo = getattr(obj, 'syndication_information', None) if syInfo is not None: when = syInfo.syUpdateBase return when.HTML4() else: return 'Syndication is not Allowed' def getMaxItems(self, obj=None): """ Return the max_items to be displayed in the syndication """ if not self.isSiteSyndicationAllowed(): raise 'Syndication is not Allowed' if obj is None: return self.max_items syInfo = getattr(obj, 'syndication_information', None) if syInfo is not None: return syInfo.max_items else: return 'Syndication is not Allowed'
def test_01_simulation_movement_source_asset_price(self,quiet=0, run=run_all_test): """ tests that when resource on simulation movements is different from the price currency of the source section, that source_asset_price is set on the movement """ if not run: return if not quiet: printAndLog('test_01_simulation_movement_source_asset_price') resource = self.portal.product_module.newContent( portal_type='Product', title='Resource', product_line='apparel') currency = self.portal.currency_module.newContent( portal_type='Currency', title='euro') currency.setBaseUnitQuantity(0.01) new_currency = \ self.portal.currency_module.newContent(portal_type='Currency') new_currency.setReference('XOF') new_currency.setTitle('Francs CFA') new_currency.setBaseUnitQuantity(1.00) self.tic()#execute transaction x_curr_ex_line = currency.newContent( portal_type='Currency Exchange Line', price_currency=new_currency.getRelativeUrl()) x_curr_ex_line.setTitle('Euro to Francs CFA') x_curr_ex_line.setBasePrice(655.957) x_curr_ex_line.setStartDate(DateTime(2008,10,21)) x_curr_ex_line.setStopDate(DateTime(2008,10,22)) x_curr_ex_line.validate() self.createBusinessProcess(currency) self.tic()#execute transactio client = self.portal.organisation_module.newContent( portal_type='Organisation', title='Client', default_address_region=self.default_region) vendor = self.portal.organisation_module.newContent( portal_type='Organisation', title='Vendor', price_currency=new_currency.getRelativeUrl(), default_address_region=self.default_region) order = self.portal.sale_order_module.newContent( portal_type='Sale Order', source_value=vendor, source_section_value=vendor, destination_value=client, destination_section_value=client, start_date=DateTime(2008,10, 21), price_currency_value=currency, specialise_value=self.business_process, title='Order') order.newContent(portal_type='Sale Order Line', resource_value=resource, quantity=1, price=2) order.confirm() self.tic() self.buildPackingLists() related_applied_rule = order.getCausalityRelatedValue( portal_type='Applied Rule') order_movement = related_applied_rule.contentValues()[0] delivery_applied_rule = order_movement.contentValues()[0] delivery_movement = delivery_applied_rule.contentValues()[0] invoice_applied_rule = delivery_movement.contentValues()[0] invoice_movement = invoice_applied_rule.contentValues()[0] invoice_transaction_applied_rule = invoice_movement.contentValues()[0] invoice_transaction_movement =\ invoice_transaction_applied_rule.contentValues()[0] self.assertEqual(currency, invoice_transaction_movement.getResourceValue()) self.assertEqual(currency, delivery_movement.getPriceCurrencyValue()) self.assertEquals\ (invoice_transaction_movement.getSourceTotalAssetPrice(), -655.957*invoice_transaction_movement.getTotalPrice()) self.assertEquals\ (invoice_transaction_movement.getDestinationTotalAssetPrice(), None)