def ParseDate(arpastring,parse_arpadate=arpadateRE.match): """ ParseDate(arpastring) Returns a DateTime instance reflecting the given ARPA date. Only the date part is parsed, any time part will be ignored. The instance's time is set to 0:00:00. """ s = arpastring.strip() date = parse_arpadate(s) if not date: raise ValueError,'wrong format' litday,day,litmonth,month,year = date.groups() if len(year) == 2: year = DateTime.add_century(int(year)) else: year = int(year) if litmonth: litmonth = litmonth.lower() try: month = litmonthtable[litmonth] except KeyError: raise ValueError,'wrong month format' else: month = int(month) day = int(day) # litday and timezone are ignored return DateTime.DateTime(year,month,day)
def ParseDateTime(arpastring,parse_arpadatetime=arpadatetimeRE.match): """ ParseDateTime(arpastring) Returns a DateTime instance reflecting the given ARPA date assuming it is local time (timezones are silently ignored). """ s = arpastring.strip() date = parse_arpadatetime(s) if not date: raise ValueError,'wrong format or unknown time zone' litday,day,litmonth,month,year,hour,minute,second,zone = date.groups() if len(year) == 2: year = DateTime.add_century(int(year)) else: year = int(year) if litmonth: litmonth = litmonth.lower() try: month = litmonthtable[litmonth] except KeyError: raise ValueError,'wrong month format' else: month = int(month) day = int(day) hour = int(hour) minute = int(minute) if second is None: second = 0.0 else: second = float(second) # litday and timezone are ignored return DateTime.DateTime(year,month,day,hour,minute,second)
def phase_hunt(sdate=DateTime.now()): """Find time of phases of the moon which surround the current date. Five phases are found, starting and ending with the new moons which bound the current lunation. """ if not hasattr(sdate,'jdn'): sdate = DateTime.DateTimeFromJDN(sdate) adate = sdate + DateTime.RelativeDateTime(days=-45) k1 = floor((adate.year + ((adate.month - 1) * (1.0/12.0)) - 1900) * 12.3685) nt1 = meanphase(adate, k1) adate = nt1 sdate = sdate.jdn while 1: adate = adate + c.synodic_month k2 = k1 + 1 nt2 = meanphase(adate,k2) if nt1 <= sdate < nt2: break nt1 = nt2 k1 = k2 phases = list(map(truephase, [k1, k1, k1, k1, k2], [0/4.0, 1/4.0, 2/4.0, 3/4.0, 0/4.0])) return phases
def test8(): import DateTime html=String(""" %(name capitalize spacify)s is %(date fmt=year)s/%(date fmt=month)s/%(date fmt=day)s """) print html(date=DateTime.DateTime(), name='todays_date')
def testStringDateFormatting(self): import DateTime from DocumentTemplate.DT_HTML import String html = String("%(name capitalize spacify)s is " "%(date fmt=year)s/%(date fmt=month)s/%(date fmt=day)s") res = html(date=DateTime.DateTime("2001-04-27"), name='the_date') expected = 'The date is 2001/4/27' self.assertEqual(res, expected)
def testDTMLDateFormatting(self): import DateTime html = self.doc_class("<dtml-var name capitalize spacify> is " "<dtml-var date fmt=year>/<dtml-var date " "fmt=month>/<dtml-var date fmt=day>") res = html(date=DateTime.DateTime("1995-12-25"), name='christmas_day') expected = 'Christmas day is 1995/12/25' self.assertEqual(res, expected)
def toDateTime(thing): """ ensures that a datetime is a DateTime object """ if isinstance(thing, DateTime): return thing else: return DateTime(thing)
def _test(): import ISO,ARPA year = DateTime.now().year print 'Easter Sunday for the next few years' for i in range(10): easter = EasterSunday(year+i) print 'ISO:',ISO.str(easter),' ARPA:', ARPA.str(easter)
def get_entry(caseNum, ticks): thisDate = DateTime.gmtime(ticks) findDate = DateTime.ISO.strGMT(thisDate) entries = mydb.query("SELECT comments, analysis from annotations where ( validtime = '"+findDate+"' and casenum = '"+caseNum+"' )") entries = entries.getresult() return entries
def clean(self, days, transaction_threshold=100): """Perform the clean-up by looking old objects and deleting them. Example: /planets/@@feed-mega-cleanup?days=30 Commit ZODB transaction for every N objects to that commit buffer does not grow too long (timewise, memory wise). Source: http://opensourcehacker.com/2011/08/28/automatically-removing-old-items-from-a-plone-site/ @param days: if item has been created before than this many days ago it is deleted @param transaction_threshold: How often we commit - for every nth item """ context = self.context.aq_inner count = 0 alsoProvides(self.request, IDisableCSRFProtection) # DateTime deltas are days as floating points end = DateTime.DateTime() - days start = DateTime.DateTime(2000, 1, 1) date_range_query = {'query': (start, end), 'range': 'min:max'} logger.info("Beginning feed clean up for items older than {0} " "({1} days)".format(end, days)) items = context.portal_catalog.queryCatalog( {"portal_type": "FeedFeederItem", "getFeedItemUpdated": date_range_query, "sort_on": "getFeedItemUpdated" }) for b in items: count += 1 obj = b.getObject() logger.info("Deleting: %s %s", obj.absolute_url(), obj.getFeedItemUpdated()) obj.aq_parent.manage_delObjects([obj.getId()]) if count % transaction_threshold == 0: # Prevent transaction becoming too large (memory buffer) # by committing now and then logger.info("Committing transaction") transaction.commit() msg = "{0} items removed.".format(count) logger.info(msg) return msg
def _getEffectiveDate(self, struct): """Extract the effective date from the struct, or return a default value. """ ed = struct.get('pubDate', struct.get('pubdate', None)) if ed is None: ed = struct.get('dateCreated', struct.get('datecreated', DateTime.DateTime())) return ed
def test7(): import DateTime html=HTML(""" <!--#var name capitalize spacify--> is <!--#var date fmt=year-->/<!--#var date fmt=month-->/<!--#var date fmt=day--> """) html.names({'name':'name', 'date':'date'}) print html(date=DateTime.DateTime(), name='todays_date')
def test_getSignature_dict_date_as_datetime(self): now = DateTime.DateTime().asdatetime() d = {'key': now} self.assertEqual( self.service._getSignature(d, ['key']), sha1(now.strftime('%Y%m%d') + '+' + self.service_password) ) # dict was updated self.assertEqual(d['key'], now)
def migrate_sparqls(context): """ Migrate sparqls for async update of last working results """ catalog = getToolByName(context, 'portal_catalog') brains = catalog.searchResults(portal_type='Sparql') logger.info('Migrating %s Sparqls ...', len(brains)) already_migrated = 0 has_args = 0 async_service = queryUtility(IAsyncService) if async_service is None: logger.warn("Can't migrate_sparqls. plone.app.async NOT installed!") return for brain in brains: obj = brain.getObject() if getattr(obj, 'arg_spec', ''): has_args += 1 continue if hasattr(obj, 'scheduled_at'): already_migrated += 1 continue obj.refresh_rate = 'Daily' if getattr(obj, 'sparql_static', False): obj.refresh_rate = 'Once' cached_result = getattr(obj, 'cached_result', {}) rows = cached_result.get('result', {}).get('rows', []) obj.scheduled_at = DateTime.DateTime() async_queue = async_service.getQueues()[''] if not rows: async_service.queueJobInQueue( async_queue, ('sparql',), async_updateLastWorkingResults, obj, scheduled_at=obj.scheduled_at, bookmarks_folder_added=False ) else: if obj.refresh_rate != 'Once': before = datetime.datetime.now(pytz.UTC) delay = before + datetime.timedelta(days=1) async_service.queueJobInQueueWithDelay( None, delay, async_queue, ('sparql',), async_updateLastWorkingResults, obj, scheduled_at=obj.scheduled_at, bookmarks_folder_added=False ) logger.info('Migrated %s Sparqls ...', len(brains) - already_migrated - has_args) logger.info('Sparqls with arguments: %s...', has_args) logger.info('Already Migrated %s Sparqls ...', already_migrated) return "Sparql Migration Done"
def add_entry(ticks, comments, analysis, caseNum): comments = regsub.gsub("'","´", comments) analysis = regsub.gsub("'","´", analysis) thisDate = DateTime.gmtime(ticks) findDate = DateTime.ISO.strGMT(thisDate) delete = mydb.query("delete from annotations WHERE validtime = '"+findDate+"' and casenum = '"+caseNum+"' ") insert = mydb.query("insert into annotations (validtime, comments, analysis, casenum) values('"+findDate+"','"+comments+"','"+analysis+"','"+caseNum+"')") print 'DONE'
def mayBeUnmaintained(self): """Return True if there hasn't been a release in over a year""" lastRelease = self.getLatestReleaseDate() if not lastRelease: return False if DateTime.DateTime() - lastRelease > 360: return True return False
def DateTime(self): """return a DateTime object representing self""" if self.isnull(): raise ValueError, "null recurrence" if self.isrange(): raise ValueError, "not a concrete date and time" t = self.time[0][0] h = t / 60 m = t % 60 d = DateTime.DateTime(self.year[0][0], self.month[0][0], self.day[0][0], h, m) return d
def __str__(self): if type(self.date) is int: d = DateTime.DateTimeFromJDN(self.date) else: d = self.date s = "%s for %s, %s (%%%.2f illuminated)" %\ (self.__class__, d.strftime(), self.phase_text, self.illuminated * 100) return s
def getData(symbol, startdate=0): symbol = string.lower(symbol) adate = [] open_tmp = [] high_tmp = [] low_tmp = [] close_tmp = [] vol_tmp = [] have_vol = 0 if startdate == 0: startdate = DateTime.DateTime(1900,1,1) if db[symbol].has_key('StartDate'): if startdate < db[symbol]['StartDate']: startdate = db[symbol]['StartDate'] else: raise 'DB not set properly - no startdate' dir = os.path.join(data_dir, symbol) nowyear = DateTime.now().year for i in range(nowyear - startdate.year + 1): filename = os.path.join(dir, `nowyear-i`, 'daily.dat') infile = open(filename, 'r') filebuf = infile.read() infile.close l = string.split(filebuf,'\n') i=0 while i<(len(l)): nums = string.split(l[i], ',') if (len(nums) >= 5): date = ds_toDate(nums[0]) if (date >= startdate) or startdate==0: adate.append(date) open_tmp.append(float(nums[1])) high_tmp.append(float(nums[2])) low_tmp.append(float(nums[3])) close_tmp.append(float(nums[4])) else: pass if (len(nums) == 6): have_vol = 1 vol_tmp.append(float(nums[5])) else: have_vol = 0 i = i + 1 return adate, Numeric.array(open_tmp), Numeric.array(high_tmp), \ Numeric.array(low_tmp), Numeric.array(close_tmp), \ Numeric.array(vol_tmp)
def makeData(): # generate data mydata = [] accounts = ['Cash', 'Fixed Assets', 'Income', 'Expenditure', 'Etc. Etc.'] for i in range(1000): account = random.choice(accounts) amount = random.random() * 1000 theDate = DateTime.DateTime(1999, random.randint(1, 12), random.randint(1, 28)) mydata.append((i + 1, theDate, account, amount)) return mydata
def __init__(self, date=DateTime.now()): """MoonPhase constructor.""" if isinstance(date, DateTime.DateTimeType): self.date = date else: self.date = DateTime.DateTimeFrom(date) self.__dict__.update(phase(self.date)) self.phase_text = phase_string(self.phase)
def to_str_date(self, date): """Converts the date to a string :param date: DateTime object or ISO date string :returns: locale date string """ date = DateTime.DateTime(date) try: return date.strftime(self.date_format_long) except ValueError: return str(date)
def WaitTillDocumentReady(): IsDocumentReady = False TimeElapsed = int(Project.Variables.VariableByName['TimeElapsed']) PageLoadTimeOut = int(Project.Variables.VariableByName['PageLoadTimeOut']) ObjPage = BrowserFactry.GetPageObject() TimeStarted = DateTime.GetCurrentTime() #Waiter.Wait(1,"Wait For Page Load Completely") #if((ObjPage.contentDocument is not None) or (ObjPage.contentDocument.readyState == "complete")): #IsDocumentReady = True while ((ObjPage.contentDocument is None) or (ObjPage.contentDocument.readyState != "complete")): if (TimeElapsed > PageLoadTimeOut): Log.Message("Page Object is not ready") return IsDocumentReady ObjPage = BrowserFactry.GetPageObject() TimeElapsed = DateTime.GetTimeDiffInMinutes(TimeStarted, DateTime.GetCurrentTime()) IsDocumentReady = True return IsDocumentReady
def Main(): form = cgi.FormContent() zticks = str(form["zticks"][0]) caseNum = str(form["caseNum"][0]) nowDate = DateTime.gmtime(zticks) strTicks = DateTime.ISO.strGMT(nowDate) delete = mydb.query("DELETE from specquestions WHERE validTime = '"+strTicks+"' ") style.jump_page('index.py?caseNum='+caseNum)
def __init__(self, context, request): self.context = context self.request = request self.catalog = getToolByName(self.context, 'portal_catalog') self.membrane_tool = getToolByName(self.context, 'membrane_tool') self.expiry_days = 30 r_date = getattr(self.request, 'report_date', None) if r_date: self.report_date = DateTime.DateTime(r_date) else: self.report_date = DateTime.now() self.expiry_date = self.report_date-self.expiry_days
def query(self): registry = getUtility(IRegistry) archivzeit = registry[ 'nva.folderbehaviors.interfaces.ISchmuckbilder.archivzeit'] end = DateTime.DateTime( ) + 0.1 # If we have some clock skew peek a little to the future start = DateTime.DateTime() - archivzeit date_range_query = {'query': (start, end), 'range': 'min:max'} pathes = [] path = u'/'.join(self.context.getPhysicalPath()) pathes.append(path) if hasattr(self.context, 'newsfolder'): if self.context.newsfolder: for i in self.context.newsfolder: pathes.append(u''.join(i.to_object.getPhysicalPath(), '/')) brains = api.content.find(portal_type="News Item", path=pathes, created=date_range_query, sort_on='created', sort_order='reverse') return brains
def test_hit_count_tool_register_and_list_objects(self): # Register an object. self.hit_count_tool.registerObject(self.folder.getId(), DateTime.DateTime()) self.assertEqual(len(self.hit_count_tool.listRegisteredObjects()), 1) registered_object = self.hit_count_tool.listRegisteredObjects()[0] self.assertEqual(registered_object, self.folder.getId()) # Register another object. self.hit_count_tool.registerObject(self.doc.getId(), DateTime.DateTime()) self.assertEqual(len(self.hit_count_tool.listRegisteredObjects()), 2) registered_object = self.hit_count_tool.listRegisteredObjects()[0] self.assertEqual(registered_object, self.doc.getId()) # Re-register the first object. self.hit_count_tool.registerObject(self.folder.getId(), DateTime.DateTime()) self.assertEqual(len(self.hit_count_tool.listRegisteredObjects()), 2) registered_object = self.hit_count_tool.listRegisteredObjects()[1] self.assertEqual(registered_object, self.folder.getId())
def addInvitation(self, address, proj_id): now = DateTime.now() invitekeymap = self.getInvitesByEmailAddress(address) if proj_id not in invitekeymap: invitekeymap[proj_id] = now self._by_address[address] = invitekeymap by_project = self.getInvitesByProject(proj_id) if address not in by_project: by_project[address] = now self._by_project[proj_id] = by_project return invitekeymap.key
def sparqlTriggerAsync(self, cached_data): """ Verify if cached data exists, if not trigger async job """ if not cached_data or (cached_data and not cached_data.get_size()): self.context.last_scheduled_at = DateTime.DateTime() self.context._updateOtherCachedFormats( self.context.last_scheduled_at, self.context.endpoint_url, self.context.query) api.portal.show_message( message="The data will be updated shortly. Please retry later.", request=self.request) return self.request.response.redirect(self.context.absolute_url())
def unpack(self, name, field, value): """Unpacks the value from the basic json types to the objects which are stored on the field later. """ if self._provided_by_one_of(field, [ schema.interfaces.ITime, schema.interfaces.IDatetime, ]): if value: return DateTime.DateTime(value).asdatetime() if self._provided_by_one_of(field, [schema.interfaces.IDate]): if value: return DateTime.DateTime(value).asdatetime().date() if self._provided_by_one_of(field, [INamedFileField]): if value and isinstance(value, dict): filename = value['filename'] data = base64.decodestring(value['data']) return field._type(data=data, filename=filename) return value
def start_ipython(options): colorama.init() comports = options.devices d = list() # Print out a mini intro to the interactive session -- # Start with white and then magenta to keep the session white # (workaround for a bug in ipython) colors = { "c_default": colorama.Fore.WHITE + colorama.Style.BRIGHT, "c_highlight": colorama.Fore.YELLOW + colorama.Style.BRIGHT, "c_text": colorama.Fore.CYAN + colorama.Style.BRIGHT } print(USAGE_STRING.format(**colors)) if not options.no_logfile and not os.path.exists(LOG_DIR): print("Creating log directory: {}".format(os.path.abspath(LOG_DIR))) os.mkdir(LOG_DIR) for dev_com in comports: d.append( Interactive( Uart(port=dev_com, baudrate=options.baudrate, device_name=dev_com.split("/")[-1]))) device = d[0] send = device.acidev.write_aci_cmd # NOQA: Ignore unused variable # Set iPython configuration ipython_config = traitlets.config.get_config() if options.no_logfile: ipython_config.TerminalInteractiveShell.logstart = False ipython_config.InteractiveShellApp.db_log_output = False else: dt = DateTime.DateTime() logfile = "{}/{}-{}-{}-{}_interactive_session.log".format( LOG_DIR, dt.yy(), dt.dayOfYear(), dt.hour(), dt.minute()) ipython_config.TerminalInteractiveShell.logstart = True ipython_config.InteractiveShellApp.db_log_output = True ipython_config.TerminalInteractiveShell.logfile = logfile ipython_config.TerminalInteractiveShell.confirm_exit = False ipython_config.InteractiveShellApp.multiline_history = True ipython_config.InteractiveShellApp.log_level = logging.DEBUG IPython.embed(config=ipython_config) for dev in d: dev.close() raise SystemExit(0)
def load_user_info(self, user_name): # os.system('nohup python3 APIs.py &') # user_destinations = ["305 Swindon Way, West Lafayette, Indiana", "222 West Wood St, West Lafayette, Indiana", "West Madison Street, Chicago, Illinois"] file_path = "Users/%s/%sAPI.json" % (user_name, user_name) with open(file_path) as f: user_dict = json.load(f) self.rt = user_dict['map'] self.calendarEvents = user_dict['events'] self.weather_dict = user_dict['weather'] self.datetime = DateTime.DateTime() self.feed = feeds.Feeds() self.news_data = user_dict['news']
def ParseDateTimeGMT(arpastring, parse_arpadatetime=arpadatetimeRE.match, strip=string.strip, atoi=string.atoi, atof=string.atof, lower=string.lower): """ParseDateTimeGMT(arpastring) Returns a DateTime instance reflecting the given ARPA date converting it to UTC (timezones are honored). """ s = strip(arpastring) date = parse_arpadatetime(s) if not date: raise ValueError, 'wrong format or unknown time zone' litday, day, litmonth, month, year, hour, minute, second, zone = date.groups( ) if len(year) == 2: year = DateTime.add_century(atoi(year)) else: year = atoi(year) if litmonth: litmonth = lower(litmonth) try: month = litmonthtable[litmonth] except KeyError: raise ValueError, 'wrong month format' else: month = atoi(month) day = atoi(day) hour = atoi(hour) minute = atoi(minute) if second is None: second = 0.0 else: second = atof(second) offset = Timezone.utc_offset(zone) # litday is ignored return DateTime.DateTime(year, month, day, hour, minute, second) - offset
def _dateConvertFromDB(d): if d==None: return None try: return DateTime.strptime(d, '%Y-%m-%d') #just Y/M/D except: pass try: return DateTime.strptime(d, '%H:%M:%S') #just hh:mm:ss except: pass dashind = string.rindex(d, '-') tz = d[dashind:] d = d[:dashind] try: return DateTime.strptime(d, '%H:%M:%S'), tz # timetz except: pass # NO -- it was already stripped off, above! -- js Thu Aug 9 11:51:23 2001 #strip off offset from gmt #d = d[:string.rindex(d, '-')] try: return DateTime.strptime(d, '%Y-%m-%d %H:%M:%S') # full date except: #print "date passed to convert function: |%s|" % d raise
def test_get_localized_time(self): """Test getting the localized time.""" # set the expected localized date format self._set_localization_date_format() # tests result = portal.get_localized_time( datetime=DateTime.DateTime(1999, 12, 31, 23, 59), long_format=True, ) self.assertEqual(result, 'Dec 31, 1999 11:59 PM') result = portal.get_localized_time( datetime=DateTime.DateTime(1999, 12, 31, 23, 59), time_only=True, ) self.assertEqual(result, '11:59 PM') result = portal.get_localized_time(datetime=DateTime.DateTime( 1999, 12, 31, 23, 59), ) self.assertEqual(result, 'Dec 31, 1999')
def Main(): style.header("Edit Questions", "white") form = cgi.FormContent() zticks = form["zticks"][0] caseNum = form["caseNum"][0] zticks = int( float(zticks) ) nowDate = DateTime.gmtime(zticks) nice_date = nowDate.strftime("%x %H Z") strTicks = DateTime.ISO.strGMT(nowDate) add_entry( strTicks ) print '<H2 align="CENTER">Edit Question for '+nice_date+':</H2>' print '<HR>' print '<a href="del.py?caseNum='+caseNum+'&zticks='+str(zticks)+'">Delete this question from DB</a>' entry = mydb.query("SELECT * from "+table_str+" WHERE validTime = '"+strTicks+"' ").dictresult() question = entry[0]["question"] type = entry[0]["type"] optiona = entry[0]["optiona"] optionb = entry[0]["optionb"] optionc = entry[0]["optionc"] optiond = entry[0]["optiond"] optione = entry[0]["optione"] optionf = entry[0]["optionf"] answer = entry[0]["answer"] cor_comments = entry[0]["correct"] wro_comments = entry[0]["wrong"] print '<form method="POST" action="change.py">' print '<input type="hidden" name="validTime" value="'+strTicks+'">' print '<input type="hidden" name="caseNum" value="'+caseNum+'">' mk_question(question) mk_type(type) mk_optiona(optiona) mk_optionb(optionb) mk_optionc(optionc) mk_optiond(optiond) mk_optione(optione) mk_optionf(optionf) mk_answer(answer) mk_cor_comments(cor_comments) mk_wro_comments(wro_comments) print '<input type="SUBMIT" value="Make Changes">' print '</form></body></html>'
def run(self): ''' 爬虫启动函数 返回值:0:成功,-1IP代理全部失效,1未知失败 ''' try: self.okProxy = True self.appList = [] self.isStart = True # 从当前天数开始,往前推算 if self.search: print(self.search) for day in range(self.day): # 将倒数天数转为2020-03-11格式 date = DateTime.getDate(day + 1) for key in self.search: # 从关键字集合中取出每一个关键字,进行爬取 self.get_search_day_Info(date, key) # 进行爬取时,延时3秒 time.sleep(3) else: #无关键字爬取 for day in range(self.day): #print(day) date = DateTime.getDate(day + 1) self.get_noserach_day_Info(date) time.sleep(3) self.isStart = False return 0 except: if not self.okProxy: # 如果代理池的IP全失效了 print("失效的IP") return -1 print("未知错误") return 1
def loadSymbol(self, symbol): if Avidus.ds.hasSymbol(symbol): import DateTime date = DateTime.now() - DateTime.RelativeDate(years=+5) self.mData.setData(Avidus.ds.getData(symbol, date), symbol) else: message = 'Invalid symbol' self.emit(PYSIGNAL("statusMessage"), (message, )) QMessageBox.information(self, 't a c t', """Invalid Symbol""") return self.xsize = self.mX.UpdateData(self.mData, self.width()) if self.splitter.width() != self.xsize: self.mChart.UpdateData(DataSet()) for i in range(len(self.mIndCharts)): self.mIndCharts[i].UpdateData(DataSet()) self.resizeContents(self.xsize, self.visibleHeight()) self.splitter.resize(self.xsize, self.visibleHeight()) # move to the far right self.center(4000, 0, 0, 0) #print 'got a size of: ', self.width(), self.height() #print 'got a size of: ', self.mChart.width(), self.mChart.height() #print 'got a size of: ', self.splitter.width(), self.splitter.height() self.mChart.UpdateData(self.mData) for i in range(len(self.mIndCharts)): self.mIndCharts[i].UpdateData(self.mData) # move to the far right self.center(4000, 0, 0, 0) message = symbol + ' loaded.' self.emit(PYSIGNAL("statusMessage"), (message, )) self.ticker.insertItem(symbol, 0)
def checkCollection(self, device): """ See how old the data is that we've collected @param device: device to collect against @type device: string @return: is the SNMP status number > 0 and is the last collection time + collage older than now? @type: boolean """ age = device.getSnmpLastCollection() + self.collage if device.getSnmpStatusNumber() > 0 and age >= DateTime.DateTime(): self.log.info("Skipped collection of %s" % device.id) return False return True
def __init__(self, date=DateTime.now()): """MoonPhase constructor. Give me a date, as either a Julian Day Number or a DateTime object.""" if not isinstance(date, DateTime.DateTimeType): self.date = DateTime.DateTimeFromJDN(date) else: self.date = date self.__dict__.update(phase(self.date)) self.phase_text = phase_string(self.phase)
def generateUniqueId(type_name): """ generateUniqueIds for sparqls """ now = DateTime.DateTime() time = '%s.%s' % (now.strftime('%Y-%m-%d'), str(now.millis())[7:]) rand = str(random())[2:6] prefix = '' suffix = '' if type_name is not None: prefix = type_name.replace(' ', '_') + '.' prefix = prefix.lower() return prefix + time + rand + suffix
def add_query(self, query, result_query, new=None): # store as string the changes made result = "" # Generate _list for query and result. New-attribute # is for refcounter update. We need to know # if the call was from edit result_key = KeyGenerator(result_query).generate() query_key = KeyGenerator(query).generate() # convert dicts to persistent query = to_persistent(query) result_query = to_persistent(result_query) # If we already have the same query if self.tool_data.has_key(query_key): # Check if the result has changed result = str(DateTime.DateTime() ) + " NO CHANGES for key '" + query_key + "'" if new != None: # new reference self.tool_data[query_key]['refcounter'] += 1 result = str(DateTime.DateTime( )) + " Created new reference in key '" + query_key + "'" elif self.tool_data[query_key]['result_key'] != result_key: self.tool_data[query_key]['result_key'] = result_key self.tool_data[query_key]['result_query'] = result_query result = str(DateTime.DateTime() ) + " RESULTS UPDATED for key '" + query_key + "'" else: # If there isn't any query linked we create a new one _dict = { 'result_key': result_key, 'result_query': result_query, 'query': query, 'refcounter': 1 } self.tool_data[query_key] = to_persistent(_dict) return result
def WeekTime(year, isoweek=1, isoday=1, hour=0, minute=0, second=0.0): """Week(year,isoweek=1,isoday=1,hour=0,minute=0,second=0.0) Returns a DateTime instance pointing to the given ISO week and day. isoday defaults to 1, which corresponds to Monday in the ISO numbering. The time part is set as given. """ d = DateTime.DateTime(year, 1, 1, hour, minute, second) if d.iso_week[0] == year: # 1.1. belongs to year (backup to Monday) return d + (-d.day_of_week + 7 * (isoweek - 1) + isoday - 1) else: # 1.1. belongs to year-1 (advance to next Monday) return d + (7 - d.day_of_week + 7 * (isoweek - 1) + isoday - 1)
def test_hit_count_tool_reset_counts(self): # Make sure that there are no registered objects. hit_counts = self.hit_count_tool.getHitCounts() self.assertEqual(len(hit_counts), 0) # Register an object. self.hit_count_tool.registerObject(self.folder.getId(), DateTime.DateTime()) hit_counts = self.hit_count_tool.getHitCounts() self.assertEqual(len(hit_counts), 1) # Reset the hit counts. self.hit_count_tool.resetHitCounts() hit_counts = self.hit_count_tool.getHitCounts() self.assertEqual(len(hit_counts), 0)
def get_entry(caseNum, ticks): thisDate = DateTime.gmtime(ticks) findDate = DateTime.ISO.strGMT(thisDate) entries1 = mydb.query("SELECT comments, analysis from annotations where ( validtime = '"+findDate+"' and casenum = '"+caseNum+"' )") entries2 = mydb1.query("SELECT comments, analysis from annotations where ( validtime = '"+findDate+"' and casenum = '"+caseNum+"' )") entries3 = mydb1.query("SELECT comments, analysis from annotations_custom where ( validtime = '"+findDate+"' and casenum = '"+caseNum+"' and className = '"+className+"')") entries1 = entries1.dictresult() entries2 = entries2.dictresult() entries3 = entries3.dictresult() if len(entries3) > 0: return entries3 if len(entries2) > 0: return entries2 return entries3
def printTime(secs = 0): if secs != 0: nowDate = DateTime.ISO.ParseDateTimeGMT(secs) nowTuple = nowDate.tuple() localDate = DateTime.gm2local(nowDate) localTuple = localDate.tuple() dateStr = time.strftime("%B %d, %Y", localTuple ) timeStr = time.strftime("%I:%M %p [%Z]", localTuple )+' ( '+str(nowTuple[3])+' Z )' else: dateStr = " -- " timeStr = " -- " print '<H2><CENTER><FONT COLOR="#000000">C</FONT><FONT SIZE="+1">urrent</FONT> <FONT COLOR="#000000">T</FONT><FONT SIZE="+1">ime</FONT>:' print '<FONT COLOR="#b0020f">'+timeStr+'</FONT>' print ' ' print '<FONT COLOR="#000000">D</FONT><FONT SIZE="+1">ate</FONT>:' print '<FONT COLOR="#b0020f">'+dateStr+'</FONT></CENTER></H2><BR>'
def __init__(self, description, results): """construct with the description fetched from a database cursor and with the result rows from a query""" data = [] for result in results: row = {} for item, desc in map(None, result, description): if desc[1] == 'DATE': if item is not None: item = DateTime.gmtime(float(item)) elif desc[1] == 'RAW': item = str(item) row[string.lower(desc[0])] = item data.append(row) self.data = tuple(data)
def downloadCart (self): if self.isEmpty (): self.context.plone_utils.addPortalMessage(self.msg['isEmpty']) else : result = [] for item in self.cart.items (): pc = self.context.portal_catalog (UID=item[0]) if len(pc) != 0: result.append (pc[0]) FileCartZip (self.request, result) user = getSecurityManager().getUser().getId() comment = dict( user = user, date = DateTime.now(), comment = self.request.form['filecart_download_comment'],) filecartcommentsutility = getUtility(interfaces.IFileCartCommentsUtility) filecartcommentsutility.commentDownload(self.context, result, comment) self.context.plone_utils.addPortalMessage(self.msg['download'])
def process(file): f = open(file, 'r').readlines() i = int( file[:-4] ) # Loop over these lines for line in f: tokens = re.split(",",line) if (len(tokens) > 4): # Simple test to make sure line is long # For now, we are ignoring the Max/Min lines myDate = tokens[1] myTime = tokens[0] thisTime = DateTime.strptime(myDate +" "+ myTime, "%m/%d/%y %H:%M") myMinute = int(myTime[3:5]) if (myMinute == 0 or myMinute == 20 or myMinute == 40): gmtTime = thisTime + DateTime.RelativeDateTime(hours=+6) filePre = gmtTime.strftime("%Y%m%d") gempakTime = gmtTime.strftime("%y%m%d/%H%M") fref = open('/tmp/'+filePre+'.fil', 'a') try: fref.write(" "+ snetConv[i] +" "+ gempakTime +" ") tempF = int( (tokens[6])[:-1] ) RH = int( (tokens[7])[:3] ) dirTxt = tokens[2] mph = (tokens[3])[:-3] knots = round(float(mph) / 1.1507, 2) alti = tokens[8] p24i = (tokens[9])[:-2] srad = int( (tokens[4])[:-1] ) * 10 drct = txt2drct[dirTxt] dwpf = dewPoint(float(tempF), float(RH) ) fref.write( str(tempF) +" "+ str(RH) +" "+str(dwpf) +" " \ + str(drct)+" "+ str(knots) +" "+ str(p24i) +" "+ str(srad)+"\n") except KeyError: hi = "hello" fref.close()
def addSymbol(symbol): sybmol = string.lower(symbol) symbols.append(symbol) dir = os.path.join(data_dir, symbol) try: os.chdir(dir) except: os.makedirs(dir) startyear = db['startdate'].year for i in range(DateTime.now().year - startyear + 1): datepath = os.path.join(dir, `startyear+i`) try: os.chdir(datepath) except: os.makedirs(datepath) updateSymbol(symbol) db['symbols'] = symbols
def Main(): form = cgi.FormContent() now, secs = decide_time(form) # Figure out the time wanted by the form values data = "c11, c12" # Default to temperature maps if form.has_key("data"): data = form["data"][0] data = regsub.gsub('_',' ', data) req_time = DateTime.localtime(secs) # Convert this into a DateTime format style.header("Campbell Data Plotter","white") # Setup HTML print '<TABLE width="100%">' print '<TR><TH bgcolor="red"><font color="white">Select Data Type:</TH>' print '<TH colspan="6" bgcolor="blue"><font color="white">Navigation:</TH>' print '</TR>' print '<TR><TD>' data_options(data, secs) print '</TD><TD>' prev_year(data, req_time) print '</TD><TD>' prev_month(data, req_time) print '</TD><TD>' prev_day(data, req_time) print '</TD><TD>' next_day(data, req_time) print '</TD><TD>' next_month(data, req_time) print '</TD><TD>' next_year(data, req_time) print '</TD></TR>' print '</TABLE>' if float(now) < float(secs) : print '<H1>You expect me to plot into the future? I am good, but not that good :)</H1>' else: plot.plot(data, secs) print '<BR><BR><a href="/campbell/index.html">ISU Campbell Homepage</a>'
def loadSymbol(self, symbol): if Avidus.ds.hasSymbol(symbol): import DateTime date = DateTime.now() - DateTime.RelativeDate(years=+5) self.mData.setData(Avidus.ds.getData(symbol, date), symbol) else: message = 'Invalid symbol' self.emit(PYSIGNAL("statusMessage"), (message,)) QMessageBox.information(self, 't a c t', """Invalid Symbol""") return self.xsize = self.mX.UpdateData(self.mData,self.width()) if self.splitter.width() != self.xsize: self.mChart.UpdateData(DataSet()) for i in range(len(self.mIndCharts)): self.mIndCharts[i].UpdateData(DataSet()) self.resizeContents(self.xsize, self.visibleHeight()) self.splitter.resize(self.xsize, self.visibleHeight()) # move to the far right self.center(4000,0,0,0) #print 'got a size of: ', self.width(), self.height() #print 'got a size of: ', self.mChart.width(), self.mChart.height() #print 'got a size of: ', self.splitter.width(), self.splitter.height() self.mChart.UpdateData(self.mData) for i in range(len(self.mIndCharts)): self.mIndCharts[i].UpdateData(self.mData) # move to the far right self.center(4000,0,0,0) message = symbol + ' loaded.' self.emit(PYSIGNAL("statusMessage"), (message,)) self.ticker.insertItem(symbol,0)
def Main(): form = cgi.FormContent() try: year = int(form["year"][0]) day = int(form["day"][0]) month = int(form["month"][0]) ztime = int(form["ztime"][0]) version = form["version"][0] zswitch = form["zswitch"][0] except: style.SendError("Bzz, give me a date please...") myDate = DateTime.mktime(year, month, day, ztime, 0, 0, '','','0') secs = myDate.ticks() time_tuple = myDate.tuple() myDateStr = DateTime.ISO.strGMT(myDate) SEVERE2.setupPage() SEVERE2.printTime(myDateStr) dir_format = myDate.strftime("/archivewx/data/%Y_%m_%d/") print '<BASE HREF="http://www.pals.iastate.edu'+dir_format+'">' if version == "basic": functs.dbComments(myDateStr, "comments", "News and Notes:", zswitch) else: functs.db_comments_417(secs, time_tuple, "comments", "News and Notes:", "mt417") functs.mk_data(time_tuple, 1) if version == "basic": functs.dbComments(myDateStr, "analysis", "Meteorological Analysis:", zswitch) else: functs.db_comments_417(secs, time_tuple, "analysis", "Meteorological Analysis:", "mt417") functs.finishPage()
def ParseDateTimeGMT(arpastring,parse_arpadatetime=arpadatetimeRE.match, strip=string.strip,atoi=string.atoi,atof=string.atof, lower=string.lower): """ParseDateTimeGMT(arpastring) Returns a DateTime instance reflecting the given ARPA date converting it to UTC (timezones are honored). """ s = strip(arpastring) date = parse_arpadatetime(s) if not date: raise ValueError,'wrong format or unknown time zone' litday,day,litmonth,month,year,hour,minute,second,zone = date.groups() if len(year) == 2: year = DateTime.add_century(atoi(year)) else: year = atoi(year) if litmonth: litmonth = lower(litmonth) try: month = litmonthtable[litmonth] except KeyError: raise ValueError,'wrong month format' else: month = atoi(month) day = atoi(day) hour = atoi(hour) minute = atoi(minute) if second is None: second = 0.0 else: second = atof(second) offset = Timezone.utc_offset(zone) # litday is ignored return DateTime.DateTime(year,month,day,hour,minute,second) - offset
def ds_toDate(datestring): "convert a string to a DateTime object" # should catch exceptions here.... #return(DateTimeFrom(datestring)) return DateTime.strptime(datestring, "%d-%b-%y")
def phase(phase_date=DateTime.now()): """Calculate phase of moon as a fraction: The argument is the time for which the phase is requested, expressed in either a DateTime or by Julian Day Number. Returns a dictionary containing the terminator phase angle as a percentage of a full circle (i.e., 0 to 1), the illuminated fraction of the Moon's disc, the Moon's age in days and fraction, the distance of the Moon from the centre of the Earth, and the angular diameter subtended by the Moon as seen by an observer at the centre of the Earth.""" # Calculation of the Sun's position # date within the epoch if hasattr(phase_date, "jdn"): day = phase_date.jdn - c.epoch else: day = phase_date - c.epoch # Mean anomaly of the Sun N = fixangle((360/365.2422) * day) # Convert from perigee coordinates to epoch 1980 M = fixangle(N + c.ecliptic_longitude_epoch - c.ecliptic_longitude_perigee) # Solve Kepler's equation Ec = kepler(M, c.eccentricity) Ec = sqrt((1 + c.eccentricity) / (1 - c.eccentricity)) * tan(Ec/2.0) # True anomaly Ec = 2 * todeg(atan(Ec)) # Suns's geometric ecliptic longuitude lambda_sun = fixangle(Ec + c.ecliptic_longitude_perigee) # Orbital distance factor F = ((1 + c.eccentricity * cos(torad(Ec))) / (1 - c.eccentricity**2)) # Distance to Sun in km sun_dist = c.sun_smaxis / F sun_angular_diameter = F * c.sun_angular_size_smaxis ######## # # Calculation of the Moon's position # Moon's mean longitude moon_longitude = fixangle(13.1763966 * day + c.moon_mean_longitude_epoch) # Moon's mean anomaly MM = fixangle(moon_longitude - 0.1114041 * day - c.moon_mean_perigee_epoch) # Moon's ascending node mean longitude # MN = fixangle(c.node_mean_longitude_epoch - 0.0529539 * day) evection = 1.2739 * sin(torad(2*(moon_longitude - lambda_sun) - MM)) # Annual equation annual_eq = 0.1858 * sin(torad(M)) # Correction term A3 = 0.37 * sin(torad(M)) MmP = MM + evection - annual_eq - A3 # Correction for the equation of the centre mEc = 6.2886 * sin(torad(MmP)) # Another correction term A4 = 0.214 * sin(torad(2 * MmP)) # Corrected longitude lP = moon_longitude + evection + mEc - annual_eq + A4 # Variation variation = 0.6583 * sin(torad(2*(lP - lambda_sun))) # True longitude lPP = lP + variation # # Calculation of the Moon's inclination # unused for phase calculation. # Corrected longitude of the node # NP = MN - 0.16 * sin(torad(M)) # Y inclination coordinate # y = sin(torad(lPP - NP)) * cos(torad(c.moon_inclination)) # X inclination coordinate # x = cos(torad(lPP - NP)) # Ecliptic longitude (unused?) # lambda_moon = todeg(atan2(y,x)) + NP # Ecliptic latitude (unused?) # BetaM = todeg(asin(sin(torad(lPP - NP)) * sin(torad(c.moon_inclination)))) ####### # # Calculation of the phase of the Moon # Age of the Moon, in degrees moon_age = lPP - lambda_sun # Phase of the Moon moon_phase = (1 - cos(torad(moon_age))) / 2.0 # Calculate distance of Moon from the centre of the Earth moon_dist = (c.moon_smaxis * (1 - c.moon_eccentricity**2))\ / (1 + c.moon_eccentricity * cos(torad(MmP + mEc))) # Calculate Moon's angular diameter moon_diam_frac = moon_dist / c.moon_smaxis moon_angular_diameter = c.moon_angular_size / moon_diam_frac # Calculate Moon's parallax (unused?) # moon_parallax = c.moon_parallax / moon_diam_frac res = { 'phase': fixangle(moon_age) / 360.0, 'illuminated': moon_phase, 'age': c.synodic_month * fixangle(moon_age) / 360.0 , 'distance': moon_dist, 'angular_diameter': moon_angular_diameter, 'sun_distance': sun_dist, 'sun_angular_diameter': sun_angular_diameter } return res
def _test(): import sys t = DateTime.now() print 'Testing DateTime Parser...' l = [ # Literal formats ('Sun Nov 6 08:49:37 1994', '1994-11-06 08:49:37.00'), ('sun nov 6 08:49:37 1994', '1994-11-06 08:49:37.00'), ('sUN NOV 6 08:49:37 1994', '1994-11-06 08:49:37.00'), ('Sunday, 06-Nov-94 08:49:37 GMT', '1994-11-06 08:49:37.00'), ('Sun, 06 Nov 1994 08:49:37 GMT', '1994-11-06 08:49:37.00'), ('06-Nov-94 08:49:37', '1994-11-06 08:49:37.00'), ('06-Nov-94', '1994-11-06 00:00:00.00'), ('06-NOV-94', '1994-11-06 00:00:00.00'), ('November 19 08:49:37', '%s-11-19 08:49:37.00' % t.year), ('Nov. 9', '%s-11-09 00:00:00.00' % t.year), ('Sonntag, der 6. November 1994, 08:49:37 GMT', '1994-11-06 08:49:37.00'), ('6. November 2001, 08:49:37', '2001-11-06 08:49:37.00'), ('sep 6', '%s-09-06 00:00:00.00' % t.year), ('sep 6 2000', '2000-09-06 00:00:00.00'), ('September 29', '%s-09-29 00:00:00.00' % t.year), ('Sep. 29', '%s-09-29 00:00:00.00' % t.year), ('6 sep', '%s-09-06 00:00:00.00' % t.year), ('29 September', '%s-09-29 00:00:00.00' % t.year), ('29 Sep.', '%s-09-29 00:00:00.00' % t.year), ('sep 6 2001', '2001-09-06 00:00:00.00'), ('Sep 6, 2001', '2001-09-06 00:00:00.00'), ('September 6, 2001', '2001-09-06 00:00:00.00'), ('sep 6 01', '2001-09-06 00:00:00.00'), ('Sep 6, 01', '2001-09-06 00:00:00.00'), ('September 6, 01', '2001-09-06 00:00:00.00'), ('30 Apr 2006 20:19:00', '2006-04-30 20:19:00.00'), # ISO formats ('1994-11-06 08:49:37', '1994-11-06 08:49:37.00'), ('010203', '2001-02-03 00:00:00.00'), ('2001-02-03 00:00:00.00', '2001-02-03 00:00:00.00'), ('2001-02 00:00:00.00', '2001-02-01 00:00:00.00'), ('2001-02-03', '2001-02-03 00:00:00.00'), ('2001-02', '2001-02-01 00:00:00.00'), ('20000824/2300', '2000-08-24 23:00:00.00'), ('20000824/0102', '2000-08-24 01:02:00.00'), ('20000824', '2000-08-24 00:00:00.00'), ('20000824/020301', '2000-08-24 02:03:01.00'), ('20000824 020301', '2000-08-24 02:03:01.00'), ('-20000824 020301', '-2000-08-24 02:03:01.00'), ('20000824T020301', '2000-08-24 02:03:01.00'), ('20000824 020301', '2000-08-24 02:03:01.00'), ('2000-08-24 02:03:01.00', '2000-08-24 02:03:01.00'), ('T020311', '%s 02:03:11.00' % t.date), ('2003-12-9', '2003-12-09 00:00:00.00'), ('03-12-9', '2003-12-09 00:00:00.00'), ('003-12-9', '0003-12-09 00:00:00.00'), ('0003-12-9', '0003-12-09 00:00:00.00'), ('2003-1-9', '2003-01-09 00:00:00.00'), ('03-1-9', '2003-01-09 00:00:00.00'), ('003-1-9', '0003-01-09 00:00:00.00'), ('0003-1-9', '0003-01-09 00:00:00.00'), # US formats ('06/11/94 08:49:37', '1994-06-11 08:49:37.00'), ('11/06/94 08:49:37', '1994-11-06 08:49:37.00'), ('9/23/2001', '2001-09-23 00:00:00.00'), ('9-23-2001', '2001-09-23 00:00:00.00'), ('9/6', '%s-09-06 00:00:00.00' % t.year), ('09/6', '%s-09-06 00:00:00.00' % t.year), ('9/06', '%s-09-06 00:00:00.00' % t.year), ('09/06', '%s-09-06 00:00:00.00' % t.year), ('9/6/2001', '2001-09-06 00:00:00.00'), ('09/6/2001', '2001-09-06 00:00:00.00'), ('9/06/2001', '2001-09-06 00:00:00.00'), ('09/06/2001', '2001-09-06 00:00:00.00'), ('9-6-2001', '2001-09-06 00:00:00.00'), ('09-6-2001', '2001-09-06 00:00:00.00'), ('9-06-2001', '2001-09-06 00:00:00.00'), ('09-06-2001', '2001-09-06 00:00:00.00'), ('2002/05/28 13:10:56.1147 GMT+2', '2002-05-28 13:10:56.11'), ('1970/01/01', '1970-01-01 00:00:00.00'), ('20021025 12:00 PM', '2002-10-25 12:00:00.00'), ('20021025 12:30 PM', '2002-10-25 12:30:00.00'), ('20021025 12:00 AM', '2002-10-25 00:00:00.00'), ('20021025 12:30 AM', '2002-10-25 00:30:00.00'), ('20021025 1:00 PM', '2002-10-25 13:00:00.00'), ('20021025 2:00 AM', '2002-10-25 02:00:00.00'), ('Thursday, February 06, 2003 12:40 PM', '2003-02-06 12:40:00.00'), ('Mon, 18 Sep 2006 23:03:00', '2006-09-18 23:03:00.00'), # European formats ('6.11.2001, 08:49:37', '2001-11-06 08:49:37.00'), ('06.11.2001, 08:49:37', '2001-11-06 08:49:37.00'), ('06.11. 08:49:37', '%s-11-06 08:49:37.00' % t.year), #('21/12/2002', '2002-12-21 00:00:00.00'), #('21/08/2002', '2002-08-21 00:00:00.00'), #('21-08-2002', '2002-08-21 00:00:00.00'), #('13/01/03', '2003-01-13 00:00:00.00'), #('13/1/03', '2003-01-13 00:00:00.00'), #('13/1/3', '2003-01-13 00:00:00.00'), #('13/01/3', '2003-01-13 00:00:00.00'), # Time only formats ('01:03', '%s 01:03:00.00' % t.date), ('01:03:11', '%s 01:03:11.00' % t.date), ('01:03:11.50', '%s 01:03:11.50' % t.date), ('01:03:11.50 AM', '%s 01:03:11.50' % t.date), ('01:03:11.50 PM', '%s 13:03:11.50' % t.date), ('01:03:11.50 a.m.', '%s 01:03:11.50' % t.date), ('01:03:11.50 p.m.', '%s 13:03:11.50' % t.date), # Invalid formats ('6..2001, 08:49:37', '%s 08:49:37.00' % t.date), ('9//2001', 'ignore'), ('06--94 08:49:37', 'ignore'), ('20000824020301', 'ignore'), ('20-03 00:00:00.00', 'ignore'), ('9/2001', 'ignore'), ('9-6', 'ignore'), ('09-6', 'ignore'), ('9-06', 'ignore'), ('09-06', 'ignore'), ('20000824/23', 'ignore'), ('November 1994 08:49:37', 'ignore'), ('Nov. 94', 'ignore'), ('Mon, 18 Sep 2006 23:03:00 +1234567890', 'ignore'), ] # Add Unicode versions try: unicode except NameError: pass else: k = [] for text, result in l: k.append((unicode(text), result)) l.extend(k) for text, reference in l: try: value = DateTimeFromString(text) except: if reference is None: continue else: value = str(sys.exc_info()[1]) valid_datetime = validateDateTimeString(text) valid_date = validateDateString(text) if str(value) != reference and \ not reference == 'ignore': print 'Failed to parse "%s"' % text print ' expected: %s' % (reference or '<exception>') print ' parsed: %s' % value elif _debug: print 'Parsed "%s" successfully' % text if _debug: if not valid_datetime: print ' "%s" failed date/time validation' % text if not valid_date: print ' "%s" failed date validation' % text et = DateTime.now() print 'done. (after %f seconds)' % ((et-t).seconds) ### print 'Testing DateTimeDelta Parser...' t = DateTime.now() l = [ # Literal formats ('Sun Nov 6 08:49:37 1994', '08:49:37.00'), ('1 day, 8 hours, 49 minutes, 37 seconds', '1:08:49:37.00'), ('10 days, 8 hours, 49 minutes, 37 seconds', '10:08:49:37.00'), ('8 hours, 49 minutes, 37 seconds', '08:49:37.00'), ('49 minutes, 37 seconds', '00:49:37.00'), ('37 seconds', '00:00:37.00'), ('37.5 seconds', '00:00:37.50'), ('8 hours later', '08:00:00.00'), ('2 days', '2:00:00:00.00'), ('2 days 23h', '2:23:00:00.00'), ('2 days 23:57', '2:23:57:00.00'), ('2 days 23:57:13', '2:23:57:13.00'), ('', '00:00:00.00'), # ISO formats ('1994-11-06 08:49:37', '08:49:37.00'), ('10:08:49:37', '10:08:49:37.00'), ('08:49:37', '08:49:37.00'), ('08:49', '08:49:00.00'), ('-10:08:49:37', '-10:08:49:37.00'), ('-08:49:37', '-08:49:37.00'), ('-08:49', '-08:49:00.00'), ('- 10:08:49:37', '-10:08:49:37.00'), ('- 08:49:37', '-08:49:37.00'), ('- 08:49', '-08:49:00.00'), ('10:08:49:37.5', '10:08:49:37.50'), ('08:49:37.5', '08:49:37.50'), ('10:8:49:37', '10:08:49:37.00'), ('8:9:37', '08:09:37.00'), ('8:9', '08:09:00.00'), ('8', '00:00:00.00'), # Invalid formats #('', None), #('8', None), ] for text, reference in l: try: value = DateTimeDeltaFromString(text) except: if reference is None: continue else: value = str(sys.exc_info()[1]) if str(value) != reference and \ not reference == 'ignore': print 'Failed to parse "%s"' % text print ' expected: %s' % (reference or '<exception>') print ' parsed: %s' % value elif _debug: print 'Parsed "%s" successfully' % text et = DateTime.now() print 'done. (after %f seconds)' % ((et-t).seconds) ### print 'Testing Time Parser...' t = DateTime.now() l = [ # Standard formats ('08:49:37 AM', '08:49:37.00'), ('08:49:37 PM', '20:49:37.00'), ('12:00:00 AM', '00:00:00.00'), ('12:00:00 PM', '12:00:00.00'), ('8:09:37', '08:09:37.00'), ('8:09', '08:09:00.00'), # ISO formats ('08:49:37', '08:49:37.00'), ('08:49', '08:49:00.00'), ('08:49:37.5', '08:49:37.50'), ('08:49:37,5', '08:49:37.50'), ('08:09', '08:09:00.00'), # Invalid formats ('', None), ('8:9:37', 'XXX Should give an exception'), ('08:9:37', 'XXX Should give an exception'), ('8:9', None), ('8', None), ] for text, reference in l: try: value = TimeFromString(text, formats=('standard', 'iso')) except: if reference is None: continue else: value = str(sys.exc_info()[1]) if str(value) != reference and \ not reference == 'ignore': print 'Failed to parse "%s"' % text print ' expected: %s' % (reference or '<exception>') print ' parsed: %s' % value elif _debug: print 'Parsed "%s" successfully' % text et = DateTime.now() print 'done. (after %f seconds)' % ((et-t).seconds)
def ds_updateSymbolData(symbol, begin_date, start=0): symbol = string.lower(symbol) lastdate = DateTime.now() if begin_date > lastdate: return else: date = DateTime.DateTime(begin_date.year, 1, 1) print 'Getting data for ', symbol, ' from ', str(date) infile = di.getData(symbol, date, lastdate) throwawayline = infile.readline() indata = infile.readlines() index = 0 got_start = 0 startyear = date.year endyear = lastdate.year curdate = lastdate for i in range(endyear - startyear + 1): print 'Adding year: ', `endyear - i` datastring = '' while index < len(indata): line = indata[index] nums = string.split(line, ',') if len(nums) >= 5: d = ds_toDate(nums[0]) if d.year < (endyear - i): break else: index = index+1 curdate = ds_toDate(nums[0]) datastring = datastring + line dir = os.path.join(data_dir, symbol) datfilename = os.path.join(dir, `curdate.year`, 'daily.dat') print 'Writing to: ', datfilename, len(datastring) datfile = open(datfilename, 'w') datfile.write(datastring) datfile.close() # We didn't get all the data we asked for. Set the start # date and exit our loop if index >= len(indata): begin_date = curdate got_start = 1 print 'Hit got start for ', symbol break if db.has_key(symbol): symbdata = db[symbol] else: symbdata = {} if start: symbdata['StartDate'] = begin_date if not symbdata.has_key('GotStart'): if got_start: symbdata['GotStart'] = 1 symbdata['LastDate'] = lastdate db[symbol] = symbdata
def _test(): import DateTime d = DateTime.now() for lang in (English,German,French,Spanish,Portuguese): print lang.__class__.__name__,':',lang.ctime(d)
def IsoTime(context): import DateTime d = DateTime.now() return DateTime.ISO.str(d)