def get_filters(self, facets=None): query = self.request.get('query', '*') filters = {} if facets is not None: for facet in facets: filter = self.request.get(facet, None) if filter is not None: if facets[facet].agg_type == 'date_histogram': if isinstance(filter, list): filters[facet] = [ DateTime.DateTime(f).asdatetime() for f in filter ] else: filters[facet] = DateTime.DateTime( filter).asdatetime() elif facets[facet].agg_type == 'histogram': if isinstance(filter, list): filters[facet] = [int(float(f)) for f in filter] else: filters[facet] = int(float(filter)) elif facets[facet].agg_type == 'range': if isinstance(filter, list): filters[facet] = [int(float(f)) for f in filter] else: filters[facet] = int(float(filter)) else: filters[facet] = filter return query, filters
def getYearsBeforeAndAfter(self): "find the number of years on either side of the entries we have in records and archives from now" minRecord = None maxRecord = None minArchive = None maxArchive = None if self.records is not None: minRecord = DateTime.DateTime(self.records.minKey()) maxRecord = DateTime.DateTime(self.records.maxKey()) if self.archive is not None: minArchive = DateTime.DateTime(self.archive.minKey()) maxArchive = DateTime.DateTime(self.archive.maxKey()) nowYear = DateTime.DateTime().year() #need to make sure the items are not None minRecordYear = minRecord.year() if minRecord is not None else nowYear maxRecordYear = maxRecord.year() if maxRecord is not None else nowYear minArchiveYear = minArchive.year( ) if minArchive is not None else nowYear maxArchiveYear = maxArchive.year( ) if maxArchive is not None else nowYear minYear = max((nowYear - min(minRecordYear, minArchiveYear)), 1) maxYear = max((max(maxRecordYear, maxArchiveYear) - nowYear), 1) return minYear, maxYear
def meanphase(sdate, k): """Calculates time of the mean new Moon for a given base date. This argument K to this function is the precomputed synodic month index, given by: K = (year - 1900) * 12.3685 where year is expressed as a year and fractional year. """ # Time in Julian centuries from 1900 January 0.5 if not hasattr(sdate, 'jdn'): delta_t = sdate - DateTime.DateTime(1900, 1, 1, 12).jdn t = delta_t / 36525 else: delta_t = sdate - DateTime.DateTime(1900, 1, 1, 12) t = delta_t.days / 36525 # square for frequent use t2 = t * t # and cube t3 = t2 * t nt1 = (2415020.75933 + c.synodic_month * k + 0.0001178 * t2 - 0.000000155 * t3 + 0.00033 * dsin(166.56 + 132.87 * t - 0.009173 * t2)) return nt1
def currententries(self): """ test method """ results_set = self.getResults() allItems = [] if results_set is not None: for x in results_set: description = x.get('abstract_filtered','') Title = x.get('title') if x.get('venue'): place_array = x.get('venue','') venue = place_array.get('name','') else: venue = "" if x.get('series'): series_array = x.get('series','') series = series_array.get('name','') else: series = "" talk_id = x.get('id','') series_id = x.get('series_id') speaker = x.get('name_of_speaker','') start_time = x.get('start_time','') end_time = x.get('end_time','') special_message = x.get('special_message','') fm_startdate = DateTime(start_time).strftime('%A, %d %B %Y').replace(', 0',', ') fm_starttime = DateTime(start_time).strftime('%I:%M%p').strip('0').replace(':00','').lower() fm_endtime = DateTime(end_time).strftime('%I:%M%p').strip('0') fm_startday = DateTime(start_time).strftime('%d').lstrip('0') fm_startmonth = DateTime(start_time).strftime('%B %Y') link = "http://talks.ox.ac.uk/talk/index/%s" %(talk_id) ical = "http://talks.ox.ac.uk/talk/vcal/%s" %(talk_id) allItems.append({'description': description, 'Title': Title, 'speaker': speaker, 'venue': venue, 'series': series, 'series_id': series_id, 'talk_id': talk_id, 'ical': ical, 'link': link, 'start_time': start_time, 'end_time': end_time, 'special_message': special_message, 'fm_startdate': fm_startdate, 'fm_starttime': fm_starttime, 'fm_startday': fm_startday, 'fm_startmonth': fm_startmonth, 'fm_endtime': fm_endtime,}) allItems.sort(key=lambda x: x["start_time"], reverse=False) return allItems
def _get_instance_info(self, instance): """get instance info""" d = instance.EffectiveDate if d and d != 'None' and not isinstance(d, tuple): d = DateTime.DateTime(d) else: d = DateTime.DateTime(instance.CreationDate) return d, instance.review_state
def test(): # MWF 1-3pm r1 = Recurrence() r1.set_weekday(0) r1.add_weekday(2) r1.add_weekday(4) r1.set_hhmm_range(13, 00, 15, 00) print "r1:", r1 r2 = Recurrence() r2.set_timetuple((1990, 9, 24)) print "r2:", r2 print "r1 intersect r2:", r1.intersect(r2) # TTh 2-4pm r3 = Recurrence() r3.set_weekday(1) r3.add_weekday(3) r3.set_hhmm_range(14, 00, 16, 00) print "r3:", r3 print "r1 intersect r3:", r1.intersect(r3) start = DateTime.DateTime(1990, 9, 24) end = DateTime.DateTime(1990, 10, 13) step = DateTime.TimeDelta(hours=13) result = r1.enumerate(start, end, step) print "(enumerate) dates between", start, "&", end, print "(step %s) covered by r1:" % step for r in result: print " ", r list = [] s = start + 0 while s <= end: list.append(s) s = s + step result = r1.filter(list) print "(filter) dates between", start, "&", end, print "(step %s) covered by r1:" % step for r in result: print " ", r step = DateTime.TimeDelta(hours=1) result = r1.enumerate(start, end, step) print "dates between", start, "&", end, "(step %s) covered by r1:" % step for r in result: print " ", r print "timing intersection test:" t = time.clock() for i in range(1000): x = r1.intersect(r2) x = r1.intersect(r3) print "%.4f seconds per intersection" % ((time.clock() - t) / 2000)
def create_event(self): setRoles(self.portal, TEST_USER_ID, ['Manager']) evid = self.portal.invokeFactory('Event', 'event') event = self.portal[evid] event.start = DateTime.DateTime() event.end = DateTime.DateTime() + 4 title = 'Great event' event.setTitle(title) text = 'Something awesome is going on!' event.setText(text) return event
def test_hit_count_tool_increment_and_get_counts(self): # Make sure that there are no registered objects. hit_counts = self.hit_count_tool.getHitCounts() self.assertEqual(len(hit_counts), 0) # Register an object. self.hit_count_tool.registerObject(self.folder.getId(), DateTime.DateTime()) # Make sure that incrementCounts raises a TypeError when supplied with # a mapping that isn't a dict. mapping = None self.assertRaises(TypeError, self.hit_count_tool.incrementCounts, mapping) # Make sure that incrementCounts succeeds when supplied with an empty # mapping. mapping = {} self.hit_count_tool.incrementCounts(mapping) # Make sure that the newly registered object has a hit count of 0. hit_counts = self.hit_count_tool.getHitCounts() self.assertEqual(len(hit_counts), 1) self.assertEqual(hit_counts[0], (self.folder.getId(), 0)) # Increment the hit count for the registered object, and make sure that # it gets updated. mapping = {self.folder.getId(): 1} self.hit_count_tool.incrementCounts(mapping) hit_counts = self.hit_count_tool.getHitCounts() self.assertEqual(len(hit_counts), 1) self.assertEqual(hit_counts[0], (self.folder.getId(), 1)) # Register another object. self.hit_count_tool.registerObject(self.doc.getId(), DateTime.DateTime()) hit_counts = self.hit_count_tool.getHitCounts() self.assertEqual(len(hit_counts), 2) # Increment the hit count for the first registered object. mapping = {self.folder.getId(): 1} self.hit_count_tool.incrementCounts(mapping) hit_counts = self.hit_count_tool.getHitCounts() self.assertEqual(hit_counts[0], (self.folder.getId(), 2)) # Make sure that the second registered object still has a hit count of 0. self.assertEqual(hit_counts[1], (self.doc.getId(), 0)) # Make sure that we can get hit counts by object ID: self.assertEqual( self.hit_count_tool.getHitCountForObject(self.folder.getId()), 2) self.assertEqual( self.hit_count_tool.getHitCountForObject(self.doc.getId()), 0)
def getInitialDate(self): "get the initial start date for this object" scriptPath = self.getConfig('scriptPath') if scriptPath: script = self.getCompoundDocContainer().restrictedTraverse( scriptPath, None) if script is not None: return self.changeCallingContext(script)() attributeNameForCustomCreation = self.getConfig( 'attributeNameForCustomCreation') if attributeNameForCustomCreation: result = getattr(self, attributeNameForCustomCreation)() if result: return DateTime.DateTime(result) return DateTime.DateTime()
def restartSparql(self, spq_path): """Refreshes a sparql query and schedules it in the async queue; the argument is the relative path of sparql object """ async_service = queryUtility(IAsyncService) if async_service is None: logger.warn("Can't restartSparql. plone.app.async NOT installed!") return p_catalog = getToolByName(self.context, 'portal_catalog') spq_brain = p_catalog.searchResults(portal_type='Sparql', path=spq_path)[0] spq_ob = spq_brain.getObject() if spq_ob and spq_ob.getRefresh_rate() != 'Once': spq_ob.scheduled_at = DateTime.DateTime() logger.info('[Restarting Sparql]: %s', spq_brain.getPath()) try: async_queue = async_service.getQueues()[''] async_service.queueJobInQueue(async_queue, ('sparql', ), async_updateLastWorkingResults, spq_ob, scheduled_at=spq_ob.scheduled_at, bookmarks_folder_added=False) except Exception, e: logger.error("Got exception %s when restarting sparql %s", e, spq_brain.getPath())
def __init__(self): self._hits = {} self._recent_hit_counts = [] self._hit_counts = [] self._recent_daily_averages = [] self._daily_averages = [] self._startdate = self._inc_begin = self._inc_end = DateTime.DateTime()
def __init__(self, name, data, script, parent): if script is not None: data = script(self, data, parent) self.__dict__.update(data) self.id = repr(name) self.record_id = name self.recordDate = DateTime.DateTime(float(name))
def DateFromString(text, formats=_date_formats, defaultdate=None, DateTime=DateTime): """ DateFromString(text, [formats, defaultdate]) Returns a DateTime instance reflecting the date given in text. A possibly included time part is ignored. formats and defaultdate work just like for DateTimeFromString(). """ _text, day, month, year, datestyle = _parse_date(text, formats, defaultdate) if datestyle == 'unknown' and \ 'unknown' not in formats: raise ValueError,\ 'Failed to parse "%s": found "%s" date' % \ (origtext, datestyle) try: return DateTime.DateTime(year, month, day) except DateTime.RangeError, why: raise DateTime.RangeError,\ 'Failed to parse "%s": %s' % (text, why)
def check_for_timeout(): # get current UTC time utcnow = DateTime.DateTime(time.time(), 'UTC').timeTime() # plone.app.registry get users field name registry = getUtility(IRegistry) property_name = registry[ 'niteoweb.grouptimeout.interfaces.ITimeout.propertyName' ] timeout_users = [] all_users = api.user.get_users() for user in all_users: # user's groups users_groups = api.group.get_groups(user=user) # get current user's group with max timeout value max_timeout = -1 max_timeout_group = None for group in users_groups: t = group.getProperty('timeout') if max_timeout < t: max_timeout = t max_timeout_group = group utcuser = user.getProperty(property_name).toZone('UTC').timeTime() # if no group has no timeouts set AND if timeout is reached if not max_timeout == -1 and \ utcnow - utcuser > max_timeout_group.getProperty('timeout'): timeout_users += [user] return timeout_users
def getPromotions(self): """ Get the 5 promotions to show on top """ pl = self.getCurrentLanguage() ret_themes = [] context = aq_inner(self.context) if pl != 'en': context = context.getCanonical() collections = context.restrictedTraverse('megatopics-collections', None) if not collections: return None topics = collections.getFolderContents(contentFilter={ 'portal_type': 'Topic', 'sort_on': 'getObjPositionInParent'}) now = DateTime.DateTime() for topic in topics: ret_dict = {} tobj = topic.getObject() tobj_title = tobj.Title() ret_dict[tobj_title] = [] ret_list = ret_dict[tobj_title] brains = tobj.queryCatalog() for brain in brains: url = brain.getURL() # do not show expired content if brain.expires <= now: continue can_url = brain.getObject().getCanonical().absolute_url() t = can_url, url, brain ret_list.append(t) ret_themes.append(ret_dict) return ret_themes
def testStringDateFormatting(self): import DateTime html = String("%(name capitalize spacify)s is " "%(date fmt=year)s/%(date fmt=month)s/%(date fmt=day)s") res = html(date=DateTime.DateTime("2001-04-27"), name='the_date') expected = 'The date is 2001/4/27' assert res == expected, res
def update_modification_date(obj): """Set the modification date of the object to the current time """ # we don't want to fail in here if obj is None: return obj.setModificationDate(DateTime.DateTime())
def setupRhaptosSite(app=None, id=portal_name, quiet=0, with_default_memberarea=1): '''Creates a Rhaptos site.''' if not hasattr(aq_base(app), id): _start = time.time() if not quiet: ZopeTestCase._print('Adding Rhaptos Site ... ') # Add user and log in app.acl_users._doAddUser(portal_owner, '', ['Manager'], []) user = app.acl_users.getUserById(portal_owner).__of__(app.acl_users) newSecurityManager(None, user) # Add Rhaptos Site factory = app.manage_addProduct['RhaptosSite'] u_str = str(DateTime.DateTime()).replace(' ', '').replace( '.', '').replace(':', '').replace('/', '').replace('-', '').lower() factory.manage_addRhaptosSite(id, '', create_userfolder=1, dbauser='******', dbuser='******', dbname='testrepository' + u_str) # Precreate default memberarea for performance reasons if with_default_memberarea: PloneTestCase._setupHomeFolder(app[id], default_user) # Log out noSecurityManager() transaction.commit() if not quiet: ZopeTestCase._print('done (%.3fs)\n' % (time.time() - _start, ))
def set(self, instance, value, **kwargs): """writes the value to the same named field on the proxy object """ # Retrieve the proxy object proxy_object = self.get_proxy(instance) # Return None if we could not find a proxied object, e.g. through # the proxy expression 'context.getSample()' on an AR if not proxy_object: logger.debug("Expression '{}' did not return a valid Proxy Object on {}" .format(self.proxy, instance)) return None # Lookup the proxied field by name field_name = self.getName() field = proxy_object.getField(field_name) # Bail out if the proxy object has no identical named field. if field is None: raise KeyError("Object '{}' with id '{}' has no field named '{}'".format( proxy_object.portal_type, proxy_object.getId(), field_name)) # set the value on the proxy object field.set(proxy_object, value, **kwargs) # get the current time now = DateTime.DateTime() # update the modification date of the proxied object proxy_object.setModificationDate(now) # update the modification date of the holding object instance.setModificationDate(now)
def invalidateWorkingResult(self): """ invalidate working results""" self.setSparql_results("") self.invalidateSparqlCacheResults() pr = getToolByName(self, 'portal_repository') comment = "Invalidated last working result" comment = comment.encode('utf') try: pr.save(obj=self, comment=comment) except FileTooLargeToVersionError: commands = view.getCommandSet('plone') commands.issuePortalMessage( """Changes Saved. Versioning for this file has been disabled because it is too large.""", msgtype="warn") async_service = queryUtility(IAsyncService) if async_service is None: logger.warn( "Can't invalidateWorkingResult. plone.app.async NOT installed!" ) return self.scheduled_at = DateTime.DateTime() async_queue = async_service.getQueues()[''] async_service.queueJobInQueue(async_queue, ('sparql', ), async_updateLastWorkingResults, self, scheduled_at=self.scheduled_at, bookmarks_folder_added=False)
def setDate(self, date): "set the DateTime object" try: data = DateTime.DateTime(date) except: data = "" self.setObject('data', data)
def processMaintenance(self, form): "process the maintenance" now = DateTime.DateTime() try: deleteAllDays = int(form.get('deleteAllDays', None)) except ValueError: deleteAllDays = None if deleteAllDays is not None: self.REQUEST.other['deletedAll'] = self.deleteOrders( stop=now - deleteAllDays) or 0 try: deleteCompletedDays = int(form.get('deleteCompletedDays', None)) except ValueError: deleteCompletedDays = None if deleteCompletedDays is not None: self.REQUEST.other['deletedCompleted'] = self.deleteOrders( stop=now - deleteCompletedDays, completed=1) or 0 try: deleteUnCompletedDays = int(form.get('deleteUnCompletedDays', None)) except ValueError: deleteUnCompletedDays = None if deleteUnCompletedDays is not None: self.REQUEST.other['deletedUnCompleted'] = self.deleteOrders( stop=now - deleteUnCompletedDays, completed=0) or 0
def ParseDateTimeGMT(arpastring, parse_arpadatetime=arpadatetimeRE.match): """ ParseDateTimeGMT(arpastring) Returns a DateTime instance reflecting the given ARPA date converting it to UTC (timezones are honored). """ s = arpastring.strip() date = parse_arpadatetime(s) if not date: raise ValueError, 'wrong format or unknown time zone' litday, day, litmonth, month, year, hour, minute, second, zone = date.groups( ) if len(year) == 2: year = DateTime.add_century(int(year)) else: year = int(year) if litmonth: litmonth = litmonth.lower() try: month = litmonthtable[litmonth] except KeyError: raise ValueError, 'wrong month format' else: month = int(month) day = int(day) hour = int(hour) minute = int(minute) if second is None: second = 0.0 else: second = float(second) offset = Timezone.utc_offset(zone) # litday is ignored return DateTime.DateTime(year, month, day, hour, minute, second) - offset
def set_modificationTime(self, time=None): """ Set the modification time. Takes either a datetime object, or a date/time string. """ import DateTime if not time: self._modification_time = DateTime.DateTime() else: # by marshalling the time to a string, we can be reasonably # sure that DateTime will be able to deal with it, even if it # was a DateTime object to begin with. self._modification_time = DateTime.DateTime(str(time)) return self._modification_time
def ParseDate(isostring, parse_isodate=isodateRE.match, strip=string.strip, atoi=string.atoi, atof=string.atof): """ParseDate(isostring) Returns a DateTime instance reflecting the given ISO date. A time part may not be included. """ s = strip(isostring) date = parse_isodate(s) if not date: raise ValueError, 'wrong format, use YYYY-MM-DD' year, month, day = date.groups() year = atoi(year) if month is None: month = 1 else: month = atoi(month) if day is None: day = 1 else: day = atoi(day) return DateTime.DateTime(year, month, day)
def finishOrder(self, sessionData=None, username=None): "finish the order on this object which involves changing the ownership and setting who the customer is" if username is None: username = getSecurityManager().getUser().getUserName() if sessionData is None: sessionData = self.getSession() url = sessionData['checkoutObject'].replace(self.REQUEST.BASE0 + '/', '') order = self.getCompoundDocContainer().restrictedTraverse(url, None) #needed explicitely here since in this method we may have found the order not from the session data self.setOrder(order) if username in order.users_with_local_role('Owner'): order.customerLogin.data = username order.orderDate.data = DateTime.DateTime() self.recordOrderSelectionDetails(order) order.profile = '' if sessionData: self.storeFreightInfo(order, sessionData) self.storePriceInfo(order) self.clearCart() if order.CatalogManager is None: order.addRegisteredObject('CatalogManager', 'CatalogManager') order.CatalogManager.append(self.orderCatalog) order.index_object()
def ParseDate(arpastring, parse_arpadate=arpadateRE.match, strip=string.strip, atoi=string.atoi, atof=string.atof, lower=string.lower): """ParseDate(arpastring) Returns a DateTime instance reflecting the given ARPA date. Only the date part is parsed, any time part will be ignored. The instance's time is set to 0:00:00. """ s = strip(arpastring) date = parse_arpadate(s) if not date: raise ValueError, 'wrong format' litday, day, litmonth, month, year = date.groups() if len(year) == 2: year = DateTime.add_century(atoi(year)) else: year = atoi(year) if litmonth: litmonth = lower(litmonth) try: month = litmonthtable[litmonth] except KeyError: raise ValueError, 'wrong month format' else: month = atoi(month) day = atoi(day) # litday and timezone are ignored return DateTime.DateTime(year, month, day)
def populatorLoader(self, string): "load the data into this object if it matches me" try: if string: self.setObject('data', DateTime.DateTime(string)) except ValueError: pass
def configure_logger(device_name): global options global COLOR_INDEX logger = logging.getLogger(device_name) logger.setLevel(logging.DEBUG) stream_formatter = logging.Formatter(COLOR_LIST[COLOR_INDEX % len(COLOR_LIST)] + colorama.Style.BRIGHT + STREAM_LOG_FORMAT + colorama.Style.RESET_ALL) COLOR_INDEX = (COLOR_INDEX + 1) % len(COLOR_LIST) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(stream_formatter) stream_handler.setLevel(options.log_level) logger.addHandler(stream_handler) if not options.no_logfile: dt = DateTime.DateTime() logfile = "{}_{}-{}-{}-{}_output.log".format(device_name, dt.yy(), dt.dayOfYear(), dt.hour(), dt.minute()) logfile = os.path.join(LOG_DIR, logfile) fh = logging.FileHandler(logfile) fh.setLevel(logging.DEBUG) file_formatter = logging.Formatter(FILE_LOG_FORMAT) fh.setFormatter(file_formatter) logger.addHandler(fh) return logger
def validator(self, context, val): try: DateTime.DateTime(val) except DateTime.DateTime.SyntaxError: return None, self.validationError return val, None