def AddMonths(date, months): day = date.day for m in range(months): date = DateTime.Date(date.year, date.month, date.GetDaysInMonth()) + 1 date = DateTime.Date(date.year, date.month, min(day, date.GetDaysInMonth())) return date
def noia_check(): """Asserts that status matches history""" ppq = PaidPrinterQuotas.PaidPrinterQuotas(db) # calculate total for 'free, paid and total' pages and compare # with status. Unfortunately summing kroner has no meaning. person_stats = {} for row in ppq.get_quota_status(): pid = row['person_id'] and long(row['person_id']) or 'NULL' person_stats[pid] = { 'free': int(row['free_quota']), 'kroner': float(row['kroner']), 'accum': int(row['accum_quota']), 'total': int(row['total_pages']) } logger.debug("listed %i quota_status entries" % len(person_stats)) unknown = [] n = 0 for row in (ppq.get_payment_stats(DateTime.Date(1980, 1, 1, 1, 1, 1), DateTime.Date(2020, 1, 1, 1, 1, 1), group_by=('person_id', )) + ppq.get_pagecount_stats(DateTime.Date(1980, 1, 1, 1, 1, 1), DateTime.Date(2020, 1, 1, 1, 1, 1), group_by=('person_id', ))): n += 1 pid = row['person_id'] and long(row['person_id']) or 'NULL' if not person_stats.has_key(pid): unknown.append(pid) continue tmp = person_stats[pid] tmp['free'] -= int(row['free']) tmp['accum'] -= int(row['accum']) tmp['kroner'] -= float(row['kroner']) tmp['total'] -= int(row['total']) logger.debug("listed %i quota_payment entries" % n) if unknown: logger.debug("No paid_quota_status entry for %s" % unknown) ok_count = 0 for pid in person_stats.keys(): ok = True for k in ('free', 'total', 'kroner', 'accum'): if (person_stats[pid][k] != 0 and # we may be off by 1.0e-14 abs(person_stats[pid][k]) > 0.0001): logger.warn("noia check failed for %s: %s" % (pid, repr(person_stats[pid]))) ok = False if ok: ok_count += 1 logger.debug("Found %i OK records" % ok_count)
def _parse_date(self, date): """Convert a written date into DateTime object. Possible syntaxes are: YYYY-MM-DD (2005-04-03) YYYY-MM-DDTHH:MM (2005-04-03T02:01) THH:MM (T02:01) Time of day defaults to midnight. If date is unspecified, the resulting time is between now and 24 hour into future. """ if not date: # TBD: Is this correct behaviour? mx.DateTime.DateTime # objects allow comparison to None, although that is # hardly what we expect/want. return None if isinstance(date, DateTime.DateTimeType): # Why not just return date? Answer: We do some sanity # checks below. date = date.Format("%Y-%m-%dT%H:%M") if date.count('T') == 1: date, time = date.split('T') try: hour, min = [int(x) for x in time.split(':')] except ValueError: raise CerebrumError("Time of day must be on format HH:MM") if date == '': now = DateTime.now() target = DateTime.Date(now.year, now.month, now.day, hour, min) if target < now: target += DateTime.DateTimeDelta(1) date = target.Format("%Y-%m-%d") else: hour = min = 0 try: y, m, d = [int(x) for x in date.split('-')] except ValueError: raise CerebrumError("Dates must be on format YYYY-MM-DD") # TODO: this should be a proper delta, but rather than using # pgSQL specific code, wait until Python has standardised on a # Date-type. if y > 2050: raise CerebrumError("Too far into the future: %r" % date) if y < 1800: raise CerebrumError("Too long ago: %r" % date) try: return DateTime.Date(y, m, d, hour, min) except: raise CerebrumError("Illegal date: %r" % date)
def register_to(setfn, integer_datetimes): if integer_datetimes: pack_time = pgtype.pack_int_time pack_timestamp = pgtype.pack_int_timestamp pack_date = pgtype.pack_int_date pack_interval = pgtype.pack_int_interval usec_mul = 1000000L else: pack_time = pgtype.pack_flt_time pack_timestamp = pgtype.pack_flt_timestamp pack_date = pgtype.pack_flt_date pack_interval = pgtype.pack_flt_interval usec_mul = 1000000.0 timestamp_epoch = DateTime.DateTime(2000, 1, 1) date_epoch = DateTime.Date(2000, 1, 1) def to_time(value): return pack_time(round(value.seconds, 2) * usec_mul) setfn(DateTime.DateTimeDeltaType, to_time) def to_timestamp(value): delta = value - timestamp_epoch return pack_timestamp(round(delta.seconds, 2) * usec_mul) setfn(DateTime.DateTimeType, to_timestamp) def to_interval(value): seconds = value.seconds + ((value.hours * 60.0) + value.minutes) * 60.0 months = value.months + (value.years * 12.0) return pack_interval(round(seconds, 2) * usec_mul, value.days, months) setfn(DateTime.RelativeDateTime, to_interval)
def __init__(self, db=None, logger=None, dryrun=False, *rest, **kw): """ Constructs a PasswordNotifier. :param Cerebrum.Database db: Database object to use. If `None`, this object will fetch a new db connection with `Factory.get('Database')`. This is the default. :param logging.Logger logger: Logger object to use. If `None`, this object will fetch a new logger with `Factory.get_logger('crontab')`. This is the default. :param bool dryrun: If this object should refrain from doing changes, and only print debug info. Default is `False`. """ self.logger = logger or Utils.Factory.get_logger('console') self.db = db or Utils.Factory.get("Database")() self.dryrun = bool(dryrun) self.now = self.db.Date(*(time.localtime()[:3])) self.today = dt.Date(*(time.localtime()[:3])) account = Utils.Factory.get("Account")(self.db) account.find_by_name(cereconf.INITIAL_ACCOUNTNAME) self.splattee_id = account.entity_id self.constants = Utils.Factory.get('Constants')(db) self.splatted_users = []
def __init__(self, *args, **kwargs): aw.Panel.__init__(self, *args, **kwargs) wdr.AllegatiPanelFunc(self) self.gridall = AllegatiGrid(self.FindWindowById(wdr.ID_PANELGRID)) datelab = Env.Azienda.Esercizio.dataElab d1 = DateTime.Date(datelab.year,1,1) d2 = DateTime.Date(datelab.year,12,31) for cid, val in ((wdr.ID_DATA1, d1), (wdr.ID_DATA2, d2)): self.FindWindowById(cid).SetValue(val) self.Bind(gl.EVT_GRID_CELL_LEFT_DCLICK, self.OnDClick, self.gridall) for cid, func in ((wdr.ID_UPDATE, self.OnUpdate), (wdr.ID_PRINT, self.OnPrint)): self.Bind(wx.EVT_BUTTON, func, id=cid)
def RECLASS_CCOND(cc, coverage, ccdates, tsky): # re-classify cloud conditions from 1-128 to 0-9 for easier coding for day in ccdates: dc = DateTime.Date(day[0], day[1], day[2]) j_day = dc.day_of_year for i in range(len(cc[day[0]][j_day][day[3]])): c = cc[day[0]][j_day][day[3]][i] if c == 5 or c == 7: # CLR coverage[day[0]][j_day][day[3]][i] = 0 elif c == 25 or c == 43: # FEW or -SCT coverage[day[0]][j_day][day[3]][i] = 1 elif c == 45 or c == 47: # SCT coverage[day[0]][j_day][day[3]][i] = 2 elif c == 63: # -BKN coverage[day[0]][j_day][day[3]][i] = 3 elif c == 65 or c == 67: # BKN coverage[day[0]][j_day][day[3]][i] = 4 elif c == 83: # -OVC coverage[day[0]][j_day][day[3]][i] = 5 elif c == 85 or c == 87: # OVC coverage[day[0]][j_day][day[3]][i] = 6 elif c == 105: # -X coverage[day[0]][j_day][day[3]][i] = 8 elif c == 125: # X coverage[day[0]][j_day][day[3]][i] = 7 elif c == 128 or c == -999: # miss coverage[day[0]][j_day][day[3]][i]=\ RECLASS_TSKY(tsky[day[0]][j_day][day[3]]) else: coverage[day[0]][j_day][day[3]][i] = 9 return coverage
def truncate_log(to_date, logfilename, person_id=None): pq_util = PPQUtil.PPQUtil(db) ppq = PaidPrinterQuotas.PaidPrinterQuotas(db) to_date = DateTime.Date( *([int(x) for x in (to_date + '-0-0-0').split('-')])) from_date = DateTime.Date(1980, 1, 1, 1, 1, 1) persons = {} if person_id: persons[person_id] = True else: # find potential victims for row in ppq.get_pagecount_stats(from_date, to_date, group_by=('person_id', )): if row['person_id'] is not None: persons[long(row['person_id'])] = True for row in ppq.get_payment_stats(from_date, to_date, group_by=('person_id', )): if row['person_id'] is not None: persons[long(row['person_id'])] = True out = open(logfilename, 'a') out.write("Truncate job started at %s\n" % time.asctime()) for person_id in persons.keys() + [None]: removed, new_status = pq_util.truncate_log( person_id, to_date, 'quota_tools', reset_balance=(person_id is None)) if not removed: continue logger.debug( "removed %i entries for %s" % (len(removed), db.pythonify_data(removed[0]['person_id']))) out.write("new balance: %s\n" % repr(new_status)) for row in removed: row = dict([(k, db.pythonify_data(v)) for k, v in row.items()]) row['tstamp'] = row['tstamp'].strftime('%Y-%m-%d %H:%M.%S') out.write("removed: %s\n" % repr(row)) try: db.commit() except: out.write("WARNING: Commit threw exception for this person\n") raise out.close()
def get_last_period_dates(self, company, date): """ return the start date and end date of the last period to display """ # return the first day and last day of the month if company.timesheet_range == 'month': start_date = DateTime.Date(date.tm_year, date.tm_mon, 1) end_date = start_date + DateTime.RelativeDateTime(months=+1) #return the first and last days of the week elif company.timesheet_range == 'week': start_date = DateTime.Date( date.tm_year, date.tm_mon, date.tm_mday) + DateTime.RelativeDateTime( weekday=(DateTime.Monday, 0)) end_date = DateTime.Date(date.tm_year, date.tm_mon, date.tm_mday) + DateTime.RelativeDateTime( weekday=(DateTime.Sunday, 0)) # return the first and last days of the year elif company.timesheet_range == 'year': start_date = DateTime.Date(date.tm_year, 1, 1) end_date = DateTime.Date(date.tm_year, 12, 31) return (start_date, end_date)
def OnCalendarCall(self, event): if self._cal is None: pos = self.GetParent().ClientToScreen(self.GetPosition()) pos[1] += 20 dlg = CalDialog(self, -1, "", pos) dlg.SetDate(self.GetValue()) if dlg.ShowModal() == 1: d = dlg.date self.SetValue( DateTime.Date(d.GetYear(), d.GetMonth() + 1, d.GetDay())) dlg.Destroy() event.Skip()
def _get_dates_ds(self): ds = Dataset('dates_and_times') ds.addcolumnfromseq('a', label='Date 1', datatype='date', data=[DateTime.Date(1956,4,23), DateTime.Date(2003,9,30), DateTime.Date(2002,3,1), DateTime.Date(2000,6,21), DateTime.Date(2009,5,27), DateTime.Date(3003,9,11), DateTime.Date(1903,4,2), DateTime.Date(1803,9,9), DateTime.Date(1803,9,9), DateTime.Date(103,9,29)]) ds.addcolumnfromseq('b', label='Time 1', datatype='time', data=[DateTime.Time(1,4,23.1), DateTime.Time(20,9,30.2), DateTime.Time(8,3,1.3), DateTime.Time(18,6,21.44), DateTime.Time(0,0,0.0), DateTime.Time(12,9,11.5), DateTime.Time(19,4,2), DateTime.Time(18,9,9.789876353663554648477647863563), DateTime.Time(18,9,9), DateTime.Time(23,59,59.9999999999999999999999)]) ds.addcolumnfromseq('c', label='Datetime 1', datatype='datetime', data=[DateTime.DateTime(1956,4,23,23,59,59.9999999999999999999999), DateTime.DateTime(2003,9,30,18,9,9), DateTime.DateTime(2002,3,1,18,9,9.789876353663554648477647863563), DateTime.DateTime(2000,6,21,19,4,2), DateTime.DateTime(2009,5,27,12,9,11.5), DateTime.DateTime(3003,9,11,0,0,0.0), DateTime.DateTime(1903,4,2,18,6,21.44), DateTime.DateTime(1803,9,9,8,3,1.3), DateTime.DateTime(1803,9,9,20,9,30.2), DateTime.DateTime(103,9,29,1,4,23.1)]) return ds
def __init__(self, arg): if isinstance(arg, DateTime.DateTimeType): self._value = arg elif isinstance(arg, DatetimeFormat): self._value = arg._value elif not arg: self._value = None else: try: self._value = DateTime.Date(*parse_datetime(arg)[:3]) except Error: raise Error('could not parse date "%s"' % arg) except DateTime.Error: raise Error('invalid date "%s"' % arg)
def register_from(setfn, integer_datetimes): if integer_datetimes: unpack_time = pgtype.unpack_int_time unpack_timestamp = pgtype.unpack_int_timestamp unpack_date = pgtype.unpack_int_date unpack_interval = pgtype.unpack_int_interval else: unpack_time = pgtype.unpack_flt_time unpack_timestamp = pgtype.unpack_flt_timestamp unpack_date = pgtype.unpack_flt_date unpack_interval = pgtype.unpack_flt_interval timestamp_epoch = DateTime.DateTime(2000, 1, 1) date_epoch = DateTime.Date(2000, 1, 1) def from_timestamp(buf): seconds = round(unpack_timestamp(buf) / pgtype.usec_mul, 2) delta = DateTime.DateTimeDeltaFromSeconds(seconds) return timestamp_epoch + delta setfn(pgoid.timestamp, from_timestamp) def from_time(buf): seconds = round(unpack_time(buf) / pgtype.usec_mul, 2) return DateTime.Time(seconds=seconds) setfn(pgoid.time, from_time) def from_date(buf): delta = DateTime.DateTimeDeltaFromDays(unpack_date(buf)) return date_epoch + delta setfn(pgoid.date, from_date) def from_interval(buf): microseconds, days, months = unpack_interval(buf) seconds = round(microseconds / pgtype.usec_mul, 2) # Unfortunately, we can't use divmod here... hours = int(seconds / 3600.0) seconds = math.fmod(seconds, 3600.0) minutes = int(seconds / 60.0) seconds = math.fmod(seconds, 60.0) years = int(months / 12.0) months = int(math.fmod(months, 12)) return DateTime.RelativeDateTime(years, months, days, hours, minutes, seconds) setfn(pgoid.interval, from_interval)
def GetValue(self, adapt_date=True, adapt_year=True): out = None try: masked = self.maskedCtrl cdate = masked.GetValue() dd = int(cdate[0:2]) mm = int(cdate[3:5]) yyyy = int((cdate[6:10]).strip() or 0) if not yyyy and adapt_year: yyyy = YEAR_DEFAULT out = DateTime.Date(yyyy, mm, dd) if adapt_date: if int(cdate[0:2].strip() or 0) != out.day\ or int(cdate[3:5].strip() or 0) != out.month\ or int(cdate[6:10].strip() or 0) != out.year: ndate = str(out.day).zfill(2) + cdate[2] + str( out.month).zfill(2) + cdate[5] + str(out.year).zfill(2) s1, s2 = masked.GetSelection() masked.SetValue(ndate) masked.SetSelection(s1, s2) except: pass return out
def SOLAR_MAIN(ICAO,sd,ed,stpr0,wthr0,dwpt0,visi0,ccnd0,chgt0,ceil0,tsky0): # Define units 1=langleys; 2=MJ/m2; 3=W/m2; 4=BTU/ft2 units=1 # Define Temporal resolution of model output # 1=Daily; 2=Hourly temp_res=2 # Missing Value and initial value for asos miss = -999; asos = 0 # Value used when ceiling is "unlimited" unlimited = 100000. # Codes for haze and fog occurrences haze_list=[81,82] fog_list=[70,71,72,73,74,75,77,78] ######### Obtain the station information ################################### sinf = stationInfo() asos_date = sinf.getvar(ICAO, 'asos_date') lat = sinf.getvar(ICAO, 'lat') lon = sinf.getvar(ICAO, 'lon') utc_lapse = sinf.getvar(ICAO, 'gmt_offset') snow_station = sinf.getvar(ICAO, 'snow_station') ############################################################################ ######### Find nearest snow station if not identified ###################### if snow_station == None: varmajor = 11; detailed_check = 1 snow_station = getNearestCoop (lat,lon,varmajor,sd,ed,detailed_check) sinf.setvar(ICAO, 'snow_station', snow_station) ############################################################################ ######### Set utc_lapse to 5 if not available ############################## if utc_lapse == -999: print 'Error: no gmt offset available; using 5' utc_lapse = 5 ############################################################################ ######### Needed since MORECS does full days ############################### if ed[3] > 0: ed = (DateTime.DateTime(*ed) + DateTime.RelativeDate(days=+1,hour=0)).tuple()[:3] ############################################################################ ### Initialize hourly data dictionaries wx,ceil={},{} condition,height={},{} pres,dew,vis,tsky,snow={},{},{},{},{} for yr in range(sd[0],ed[0]+1): wx[yr]={}; ceil[yr]={} condition[yr]={}; height[yr]={} pres[yr]={}; dew[yr]={}; vis[yr]={}; tsky[yr]={}; snow[yr]={} for d in range(1,367): wx[yr][d]={}; ceil[yr][d]={} condition[yr][d]={}; height[yr][d]={} pres[yr][d]={}; dew[yr][d]={}; vis[yr][d]={}; tsky[yr][d]={} for h in range(24): wx[yr][d][h]=[] condition[yr][d][h]=[]; height[yr][d][h]=[] ### Determine number of 30-day periods ### d1=DateTime.Date(sd[0],sd[1],sd[2]) d2=DateTime.Date(ed[0],ed[1],ed[2]) num_periods=int(math.floor((d2.absdate-d1.absdate-1)/30.0))+1 SR_out_dates=[]; SR_out=[] ### Loop through 30-day periods for period in range(num_periods): # Create Date List for this period date_list=[] ds=DateTime.Date(sd[0],sd[1],sd[2])+DateTime.RelativeDate(days=int(30.0*period)) date_list.append([ds.year,ds.month,ds.day]) for p_inc in range(1,31): dn = ds + DateTime.RelativeDate(days=p_inc) if dn <= d2: date_list.append([dn.year,dn.month,dn.day]) else: break ################ For obtaining hourly data via TSVars ################### vsd,ved = date_list[0],date_list[-1] vsd.append(0) ved.append(0) pres_list,pres_dates = getHourlyVars(ICAO, 'stpr', stpr0, vsd, ved) wx_list,wx_dates = getHourlyVars(ICAO, 'wthr', wthr0, vsd, ved) dew_list,dew_dates = getHourlyVars(ICAO, 'dwpt', dwpt0, vsd, ved) vis_list,vis_dates = getHourlyVars(ICAO, 'visi', visi0, vsd, ved) tsky_list,tsky_dates = getHourlyVars(ICAO, 'tsky', tsky0, vsd, ved) ceil_list,ceil_dates = getHourlyVars(ICAO, 'ceil', ceil0, vsd, ved) condition_list,condition_dates = getHourlyVars(ICAO, 'ccnd', ccnd0, vsd, ved) height_list,height_dates = getHourlyVars(ICAO, 'chgt', chgt0, vsd, ved) ######################################################################### ############### Specify Unlimited Ceiling ('Unl') as 100000 feet ######## ############### No longer needed, getting ceiling as float ######## #for c in range(len(ceil_list)): # if string.find(ceil_list[c],'Unl')!=-1: ceil_list[c]='1000.0' ############### Obtain Snow Cover via TSVars ############################ # Obtain snow cover data for this station var_min_list=[1,4] if snow_station != -1: snow_list,snow_dates,valid_snow = \ solar_routines.GET_DAILY_TSVAR(snow_station,date_list[0],date_list[-1],11,var_min_list) else: valid_snow = 0 if valid_snow: pass else: snow_list=[] snow_dates=date_list for i in range(len(snow_dates)): snow_list.append(miss) ######################################################################### ################ Convert lists to dictionaries ########################## for i in range(len(snow_dates)): yr=snow_dates[i][0]; month=snow_dates[i][1]; day=snow_dates[i][2] ds = DateTime.Date(yr,month,day) j_day = ds.day_of_year snow[yr][j_day]=float(snow_list[i]) for i in range(len(pres_dates)): yr=pres_dates[i][0]; month=pres_dates[i][1]; day=pres_dates[i][2]; hr=pres_dates[i][3] ds = DateTime.Date(yr,month,day) j_day = ds.day_of_year if pres_list[i] != miss: pres[yr][j_day][hr]=pres_list[i]*3387.0/1000.0 else: pres[yr][j_day][hr]=pres_list[i] dew[yr][j_day][hr]=dew_list[i] vis[yr][j_day][hr]=vis_list[i] tsky[yr][j_day][hr]=tsky_list[i] # ceil[yr][j_day][hr]=float(ceil_list[i]) ceil[yr][j_day][hr]=ceil_list[i] condition[yr][j_day][hr]=condition_list[i].value() height[yr][j_day][hr]=height_list[i].value() wx[yr][j_day][hr]=wx_list[i].value() if len(condition[yr][j_day][hr])==0: condition[yr][j_day][hr]=[-999.0,-999.0,-999.0,-999.0] for x in range(4): try: if condition[yr][j_day][hr][x]<0: condition[yr][j_day][hr][x]=-999.0 except: condition[yr][j_day][hr].append(5) if len(height[yr][j_day][hr])==0: height[yr][j_day][hr]=[-999.0,-999.0,-999.0,-999.0] for x in range(4): try: if height[yr][j_day][hr][x]<0: height[yr][j_day][hr][x]=-999.0 else: height[yr][j_day][hr][x]=height[yr][j_day][hr][x]/100.0 except: height[yr][j_day][hr].append(-999.0) ######################################################################### ## Re-classify model definitions of sky condition 0-9 instead of 1-128 ## cover=condition; cover_dates=condition_dates coverage = solar_routines.RECLASS_CCOND(condition,cover,condition_dates,tsky) ######################################################################### ### Loop through hourly data for d in date_list: if d==date_list[-1]: continue trans,cl_alb={},{} # Check if asos is commissioned at this station for this date (d) if (d[0]>asos_date[0]) or \ (d[0]==asos_date[0] and d[1]>asos_date[1]) or \ (d[0]==asos_date[0] and d[1]==asos_date[1] and d[2]>asos_date[2]): asos=1 else: asos=0 # define julian day d_cal = DateTime.Date(d[0],d[1],d[2]) d_cal_next = d_cal+DateTime.RelativeDate(days=1) julian_day=d_cal.day_of_year julian_day_next=d_cal_next.day_of_year if len(height[d[0]][julian_day][0]) == 0: for h in range(24): sr_date = [d[0],d[1],d[2],h] SR_out_dates.append(sr_date) SR_out.append(miss) continue # assign snow depth, obtained previously from nearby coop station try: snodpth=snow[d[0]][julian_day] except: snodpth=miss try: snodpth_next=snow[d[0]][julian_day_next] except: snodpth_next=miss # assign snow depth by averaging today's and next day's snow depth if (snodpth!=miss and snodpth_next!=miss): sno=int((snodpth+snodpth_next)/2.0) elif (snodpth==miss and snodpth_next!=miss): sno=snodpth_next elif (snodpth!=miss and snodpth_next==miss): sno=snodpth else: sno=0 haze_hourly=[]; fog_hourly=[] for i in range(24): numlyr = 0 # determine if fog or haze is present haze,fog=0,0 for w in wx[d[0]][julian_day][i]: if w in haze_list: haze=1 if w in fog_list: fog=1 haze_hourly.append(haze); fog_hourly.append(fog) # If cloud base height is missing, coverage is -BKN, -OVC # or -X and ceiling is unlimited, assign a height of 20,000 ft for l in range(3): # loops through lowest 3 cloud layers if (height[d[0]][julian_day][i][l]<0.0 and \ (coverage[d[0]][julian_day][i][l]==3 or \ coverage[d[0]][julian_day][i][l]==4 or \ coverage[d[0]][julian_day][i][l]==5 or \ coverage[d[0]][julian_day][i][l]==6) and \ ceil[d[0]][julian_day][i]==unlimited): ceil[d[0]][julian_day][i] = 200. if height[d[0]][julian_day][i][l]!=miss: numlyr = numlyr+1 # If cloud base height is missing, calls the subroutine ceiling to # obtain an estimate based on the ceiling height for l in range(3): # loops through lowest 3 cloud layers if (height[d[0]][julian_day][i][l]<0.0 and \ coverage[d[0]][julian_day][i][l]!=9 and \ coverage[d[0]][julian_day][i][l]!=0 and \ ceil[d[0]][julian_day][i]!=miss): zhight=random.random() height_new = solar_routines.CEILING(height[d[0]][julian_day][i],\ coverage[d[0]][julian_day][i],ceil[d[0]][julian_day][i],zhight) height[d[0]][julian_day][i]=height_new break # Hours with a height given but no coverage will be assigned # a coverage of obscured if (numlyr==1 and height[d[0]][julian_day][i][0]!=miss and \ coverage[d[0]][julian_day][i][0]==9 and vis[d[0]][julian_day][i]<=1): coverage[d[0]][julian_day][i][0] = 7 # call routine to get the cloud transmission values trans[i],cl_alb[i] = \ solar_routines.TRAN(d[0],d[1],asos,haze,fog,\ vis[d[0]][julian_day][i],height[d[0]][julian_day][i],\ coverage[d[0]][julian_day][i],ceil[d[0]][julian_day][i],miss) # sys.stdout.write('%s: Trans: ' % d[0:3]) # for kle in range(0,24): # sys.stdout.write(' %5.2f' % (trans[kle])) # sys.stdout.write('\n') # sys.stdout.write('%s Cloud ht:' % d[0:3]) # for kle in range(0,24): # top = max(height[d[0]][julian_day][kle]) # sys.stdout.write(' %5d' % (int(top))) # sys.stdout.write('\n') # call routine to calculate global solar radiation values sun_top,sun_clear,sun_cloud,zenith = \ solar_routines.SOLAR_CALC_H(d[0],d[1],julian_day,\ pres[d[0]][julian_day],dew[d[0]][julian_day],trans,cl_alb,\ height[d[0]][julian_day],coverage[d[0]][julian_day],\ vis[d[0]][julian_day],sno,haze_hourly, fog_hourly,\ lat,lon,miss,utc_lapse,ICAO,asos) for h in range(24): sr_date=[d[0],d[1],d[2],h] SR_out_dates.append(sr_date) if sun_cloud[h] == miss or sun_cloud[h] == -99: # print 'Missing solar',ICAO,sr_date,trans[h],height[d[0]][julian_day][h],coverage[d[0]][julian_day][h],vis[d[0]][julian_day][h],pres[d[0]][julian_day][h],dew[d[0]][julian_day][h],sno,fog_hourly[h],haze_hourly[h] # print 'Missing solar',ICAO,sr_date,ceil[d[0]][julian_day][h] SR_out.append(miss) elif units==1: SR_out.append(sun_cloud[h]*23.88) elif units==2: SR_out.append(sun_cloud[h]) elif units==3: SR_out.append(sun_cloud[h]/0.0036) elif units==4: SR_out.append(sun_cloud[h]*88.054) else: pass return SR_out_dates, SR_out
def generate_task_realisasi_bulanan(self, cr, uid, ids, context=None): if context is None: context = {} task = {} task_pool = self.pool.get('project.task') stage_pool = self.pool.get('project.task.type') for task_generate in self.browse(cr, uid, ids, context=context): #check Duplicate #Init Field target_category = 'bulanan' description = '' lama_kegiatan = task_generate.lama_kegiatan user_id = task_generate.user_id.id target_period_year = task_generate.target_period_year target_period_month = 'xx' date_start = 'xx' date_end = 'xx' company_id = None currency_id = None user_id_bkd = None employee = self.get_employee_from_user_id(cr, uid, task_generate) if user_id != uid: raise osv.except_osv( _('Invalid Action!'), _('Anda Tidak Memiliki Priviledge Untuk Proses Ini.')) if not employee: raise osv.except_osv( _('Invalid Action, Data Pegawai Tidak Lengkap'), _('Proses Tidak Dapat Dilanjutkan Karena Ada Beberapa Informasi Kepegawaian Belum Diisi, Khususnya Data Pejabat Penilai Dan Atasan Banding.' )) else: company = employee.company_id company_id = company.id currency_id = employee.company_id.currency_id #print "company_id : ",company_id,' - ',currency_id if not company_id: raise osv.except_osv( _('Invalid Action, Data Pegawai Tidak Lengkap'), _('Proses Tidak Dapat Dilanjutkan Karena Unit Dinas Pegawai Belum Dilengkapi.' )) #print "employee parent : ",employee.parent_id if not task_generate.user_id_bkd: if not company.user_id_bkd: raise osv.except_osv( _('Invalid Action, Data Dinas Kurang Lengkap'), _('Staff Pemeriksa Dari BKD Tidak Tersedia Untuk Unit Anda, Silahkan hubungi Admin Atau isi Data Pemeriksa.' )) else: user_id_bkd = company.user_id_bkd.id else: user_id_bkd = task_generate.user_id_bkd.id if not employee.user_id_atasan: raise osv.except_osv( _('Invalid Action, Data Pegawai Tidak Lengkap'), _('Proses Tidak Dapat Dilanjutkan Karena Data Pejabat Penilai Belum Terisi.' )) if not employee.user_id_banding: raise osv.except_osv( _('Invalid Action, Data Pegawai Tidak Lengkap'), _('Proses Tidak Dapat Dilanjutkan Karena Data Pejabat Pengajuan Banding.' )) user_id_atasan = task_generate.user_id_atasan.id user_id_banding = task_generate.user_id_banding.id if not task_generate.user_id_atasan.id: user_id_atasan = employee.user_id_atasan.user_id.id if not task_generate.user_id_banding.id: user_id_banding = employee.user_id_banding.user_id.id task.update({ 'project_id': None, 'user_id': user_id, 'company_id': company_id, 'description': description, 'name': task_generate.name, 'code': None, 'target_category': target_category, #'sequence': target_obj.priority, 'target_type_id': task_generate.target_type_id, 'target_period_year': target_period_year, 'user_id_atasan': user_id_atasan or False, 'user_id_banding': user_id_banding or False, 'user_id_bkd': user_id_bkd or False, 'priority': '2', 'currency_id': currency_id, 'target_waktu': 0, 'target_kualitas': 0, 'target_jumlah_kuantitas_output': 0, 'task_category': 'non_skp', }) #Update Task Target Bulanan now = DateTime.today() first_task_id = None if task_generate.date_start: curr_date = DateTime.strptime(task_generate.date_start, '%Y-%m-%d') else: january = DateTime.Date(now.year, 1, 1) curr_date = DateTime.strptime(january.strftime('%Y-%m-%d'), '%Y-%m-%d') first_date = curr_date #print "THIS IS A DATE ",curr_date for i in range(0, lama_kegiatan): next_date = curr_date + DateTime.RelativeDateTime(months=i) target_period_month = next_date.strftime('%m') task.update({ 'target_period_month': target_period_month, 'name': '%s %s' % (task_generate.name, target_period_month), }) #Check Duplicate Do Not Create task_ids = task_pool.search( cr, uid, [('user_id', '=', user_id), ('target_period_month', '=', target_period_month), ('target_period_year', '=', target_period_year), ('target_type_id', '=', task_generate.target_type_id), ('work_state', '!=', 'draft')], context=None) if task_ids: continue else: #Delete Duplicate task_ids = task_pool.search( cr, uid, [('user_id', '=', user_id), ('target_period_month', '=', target_period_month), ('target_period_year', '=', target_period_year), ('target_type_id', '=', task_generate.target_type_id), ('work_state', '=', 'draft')], context=None) task_pool.unlink(cr, uid, task_ids, context=None) date_start = 'xx' date_end = 'xx' stage_ids = stage_pool.search(cr, uid, [('sequence', '=', 0)], context=None) work_state = 'draft' if stage_ids: task.update({ 'stage_id': stage_ids[0], 'work_state': work_state, 'state': 'draft', 'currency_id': currency_id }) #insert task task_id = task_pool.create(cr, uid, task) return { 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_model': 'notification.generate.task', 'target': 'new', 'context': context, #['notif_booking'], }
def runTest(self): self.roundtrip(None) self.both(DateTime.Date(2007,5,8)) self.both(DateTime.Date(1900,5,8)) self.both(DateTime.Date(2200,5,8))
def solar_main_fcst2(stnid, sd, ed): # Initialize output lists SR_out = {} # Make all stations ASOS asos = 1 # don't have this in metadata; assume we're in the East utc_lapse = 5 ######### Needed since MORECS does full days ############################### if sd[3] > 0: sd[3] = 0 if ed[3] < 23: ed[3] = 23 # Create date list date_list = [] ds = DateTime.Date(sd[0], sd[1], sd[2]) d2 = DateTime.Date(ed[0], ed[1], ed[2]) p_inc = 0 while p_inc >= 0: dn = ds + DateTime.RelativeDate(days=p_inc) if dn <= d2: date_list.append([dn.year, dn.month, dn.day]) p_inc = p_inc + 1 else: break # get lat, lon and forecast data ll, hrly_fcst = get_srfcst_data(stnid, sd, ed) lat = ll[0] lon = ll[1] # don't have pressure forecasts pres_list = [] for h in range(24): pres_list.append(30. * 3397. / 1000.) # set snow depth to zero sno = 0 ### Loop through hourly data for d in date_list: trans = {} cl_alb = {} # define julian day julian_day = DateTime.Date(d[0], d[1], d[2]).day_of_year # fill in tsky tsky_list = [] for h in range(24): if hrly_fcst['tsky'].has_key((d[0], d[1], d[2], h)): tsky_list.append(hrly_fcst['tsky'][(d[0], d[1], d[2], h)]) else: tsky_list.append(miss) dwpt_list = [] for i in range(24): # build a list of dewpoint values for the day if hrly_fcst['dwpt'].has_key((d[0], d[1], d[2], i)): dwpt_list.append(hrly_fcst['dwpt'][(d[0], d[1], d[2], i)]) else: dwpt_list.append(miss) # determine if fog or haze is present haze, fog = 0, 0 # add folllowing if/when wx forecasts become available # for w in wx[d[0]][julian_day][i]: # if w in [81,82]: haze=1 # if w in [70,71,72,73,74,75,77,78]: fog=1 # use equation to estimate the cloud transmission values if tsky_list[i] == miss: cl = estmiss(tsky_list, i, miss) else: cl = tsky_list[i] if cl == miss: # print 'tsky missing for:',d,i trans[i] = 1.0 cl_alb[i] = miss else: # replace just this: # trans[i] = -2.8855*(cl**4) + 5.3559*(cl**3) - 3.4322*(cl**2) + 0.295*cl + 0.9999 # with this: if cl < 0.75: trans[i] = -2.8855 * (cl**4) + 5.3559 * ( cl**3) - 3.4322 * (cl**2) + 0.295 * cl + 0.9999 else: trans[ i] = 1.549 - 1.216 * cl #added this condition 2/6/2013 -kle # refine # # and then this: (3/26/2012 -kle) # 0-20 -0.1 # 20-30 -0.04 # 30-60 -0.02 # 60-100 -0.1 if cl == 0.0: pass elif cl <= 0.2 or cl > 0.6: trans[i] = trans[i] - 0.1 elif cl <= 0.3: trans[i] = trans[i] - 0.04 elif cl <= 0.6: trans[i] = trans[i] - 0.02 if trans[i] > 1: trans[i] = 0.9999 # Need visibility for following adjustments # # Adjusts transmissivity value when visibility is less than 1 mile # visib = vis[d[0]][julian_day][i] #changed [h] ti [i] - kle 7/20/2010 # if visib != miss and visib < 1.0: # trans[i] = trans[i]*0.72 # # Adjusts transmissivity if haze is reported with other than clear skies # elif haze==1: # trans[i]=trans[i]*0.92 # # if trans[i]==0.9999: # clear skies # # Adjusts clear sky transmissivity if visibility is less than # # 10 miles and no fog or haze is reported # if visib != miss and visib < 10.0 and (fog==0 and haze==0): # trans[i] = 0.96 # # Adjusts clear sky tranmissivity if fog or haze is reported # elif visib < 1.0 and (fog==1 or haze==1): # trans[i] = 0.92 # Cloud albedo if cl == 0: cl_alb[i] = 0.0 else: cl_alb[i] = 0.5 # call routine to calculate global solar radiation values sun_top,sun_clear,sun_cloud,zenith = SOLAR_CALC_H(d[0], d[1],\ julian_day, pres_list, dwpt_list, trans, cl_alb,\ miss,miss,miss,sno,miss,miss,lat,lon,miss,utc_lapse,miss,asos) for h in range(24): sr_date = (d[0], d[1], d[2], h) if sun_cloud[h] == miss or sun_cloud[h] == -99: SR_out[sr_date] = miss else: SR_out[sr_date] = sun_cloud[h] * 23.88 #in langleys return SR_out
def MonthEndDate(date): assert isinstance(date, DateTime.DateTimeType),\ "L'argomento deve essere di tipo mx.DateTime" return DateTime.Date(date.year, date.month, date.GetDaysInMonth())