def getSampleTimesInFormat(grid, timeformat, timezone="UTC", outformat="string"): """ A Helper function to return times of a grid in specified format as a list. """ from ucar.visad.data import CalendarDateTime from visad import DateTime from ucar.visad.UtcDate import formatUtcDate from visad import VisADException from java.util import TimeZone dateTimes = CalendarDateTime.timeSetToArray(GridUtil.getTimeSet(grid)) TIMEZONE = TimeZone.getTimeZone(timezone) temp = [] for i in range(grid.getDomainSet().getLength()): if (str(outformat).lower() in ("string", "str")): temp.append(str(dateTimes[i].formattedString(timeformat, TIMEZONE))) elif (str(outformat).lower() in ("float", "flt")): temp.append( float(dateTimes[i].formattedString(timeformat, TIMEZONE))) elif (str(outformat).lower() in ("int", "integer")): temp.append( Integer(dateTimes[i].formattedString(timeformat, TIMEZONE))) else: raise VisADException("Unrecognized output format") return temp
def set_timezone(self, tz): ''' Set the timezone for the start and ending dates ''' if tz not in TimeZone.getAvailableIDs(): raise Exception('Incompatible timezone.\n Application quitting.') self.timezone = tz
def to_java_calendar(value): """ Converts any of the supported date types to ``java.util.Calendar``. If ``value`` does not have timezone information, the system default will be used. Examples: .. code-block:: calendar_time = to_java_calendar(items["date_item"]) Args: value: the value to convert Returns: java.util.Calendar: the converted value Raises: TypeError: if the type of ``value`` is not supported by this package """ if isinstance(value, Calendar): return value value_zoneddatetime = to_java_zoneddatetime(value) new_calendar = Calendar.getInstance(TimeZone.getTimeZone(value_zoneddatetime.getZone().getId())) new_calendar.set(Calendar.YEAR, value_zoneddatetime.getYear()) new_calendar.set(Calendar.MONTH, value_zoneddatetime.getMonthValue() - 1) new_calendar.set(Calendar.DAY_OF_MONTH, value_zoneddatetime.getDayOfMonth()) new_calendar.set(Calendar.HOUR_OF_DAY, value_zoneddatetime.getHour()) new_calendar.set(Calendar.MINUTE, value_zoneddatetime.getMinute()) new_calendar.set(Calendar.SECOND, value_zoneddatetime.getSecond()) new_calendar.set(Calendar.MILLISECOND, int(value_zoneddatetime.getNano() / 1000000)) return new_calendar
def to_joda_datetime(value): """ Converts any of the supported date types to ``org.joda.time.DateTime``. If ``value`` does not have timezone information, the system default will be used. Examples: .. code-block:: joda_time = to_joda_datetime(items["date_item"]) Args: value: the value to convert Returns: org.joda.time.DateTime: the converted value Raises: TypeError: if the type of ``value`` is not suported by this package """ if isinstance(value, DateTime): return value value_zoneddatetime = to_java_zoneddatetime(value) return DateTime( value_zoneddatetime.toInstant().toEpochMilli(), DateTimeZone.forTimeZone( TimeZone.getTimeZone(value_zoneddatetime.getZone())))
def to_java_calendar(value): """Converts any known DateTime type to a ``java.util.calendar`` type. Args: value: any known DateTime value. Returns: | A ``java.util.calendar`` representing ``value``. | If ``value`` does not have timezone information, the system default will be used. Raises: TypeError: type of ``value`` is not recognized by this package. """ if isinstance(value, Calendar): return value value_zoneddatetime = to_java_zoneddatetime(value) new_calendar = Calendar.getInstance( TimeZone.getTimeZone(value_zoneddatetime.getZone().getId())) new_calendar.set(Calendar.YEAR, value_zoneddatetime.getYear()) new_calendar.set(Calendar.MONTH, value_zoneddatetime.getMonthValue() - 1) new_calendar.set(Calendar.DAY_OF_MONTH, value_zoneddatetime.getDayOfMonth()) new_calendar.set(Calendar.HOUR_OF_DAY, value_zoneddatetime.getHour()) new_calendar.set(Calendar.MINUTE, value_zoneddatetime.getMinute()) new_calendar.set(Calendar.SECOND, value_zoneddatetime.getSecond()) new_calendar.set(Calendar.MILLISECOND, int(value_zoneddatetime.getNano() / 1000000)) return new_calendar
def __init__( self, desc, tz = TimeZone.getDefault() ): self.id = desc.getString( "id" ) self.tz = tz self.last = None self.next = None self.scheds = [] a = desc.getJSONArray( "scheds" ) for i in range( a.length() ): self.scheds.append( Sched( a.getJSONObject( i ) ) ) self.cmd = desc.getString( "cmd" )
def next( self, last, tz = TimeZone.getDefault() ): cal = Calendar.getInstance( tz, Locale.US ) limit = cal.clone() limit.add( Calendar.YEAR, 1 ) cal.setTime( last ) cal.add( Calendar.MINUTE, self.wait ) n = self.findNext( cal, limit ) if n == None: return None return n.getTime()
def __init__(self): dataDir = Settings.dataDir + 'ProgrammingEmail/CustomizeEmailHeaders/' # Create a instance of MailMessage class message = MailMessage() # Set subject of the message message.setSubject("New message created by Aspose.Email for Java") # Set Html body message.setHtmlBody("<b>This line is in bold.</b> <br/> <br/>" + "<font color=blue>This line is in blue color</font>") # Set sender information message.setFrom(MailAddress("*****@*****.**", "Sender Name", False)) # Add TO recipients message.getTo().add(MailAddress("*****@*****.**", "Recipient 1", False)) # Message subject message.setSubject("Customizing Email Headers") # Specify Date timeZone=TimeZone() calendar=Calendar() calendar = calendar.getInstance(timeZone.getTimeZone("GMT")) date = calendar.getTime() message.setDate(date) # Specify XMailer message.setXMailer("Aspose.Email") # Specify Secret Header message.getHeaders().add("secret-header", "mystery") # Save message to disc messageFormat=MessageFormat() message.save(dataDir + "MsgHeaders.msg", messageFormat.getMsg()) # Display Status print "Customized message headers Successfully."
def test_date(self): self.assertTrue(hasattr(date, "__tojava__")) x = date(2007, 1, 3) y = x.__tojava__(Date) self.assertIsInstance(y, Date) # Note that java.sql.Date operates regarding to default timezone, so adjust offset off = TimeZone.getDefault().getRawOffset() # It's sufficient for the date to fit; we modulo away the time, so this test # won't run into TimeZone issues. self.assertEqual((y.getTime()+off)//(1000*60*60*24), (x - date(1970, 1, 1)).total_seconds()//(60*60*24))
def getSqlDateInGMT(date): """ @type: long->str @param: date - timestamp """ dateObj = Date(date) df = SimpleDateFormat() df.setTimeZone(TimeZone.getTimeZone("GMT")) # Default Java Date.toString() format df.applyPattern('EEE MMM dd HH:mm:ss zzz yyyy') return df.format(dateObj)
def set_tzdss(self, tz): ''' Specifies time zone to use for data stored to a HEC-DSS file. If not specified, the data will be stored to the HEC-DSS file in the time zone specified in the USGS text. Valid values for dss_time_zone are valid shef_time_zone values plus any valid Java time zone ID (https://en.wikipedia.org/wiki/List_of_tz_database_time_zones). ''' if tz not in TimeZone.getAvailableIDs(): sys.exit(1) #raise Exception('Incompatible timezone.\n Application quitting.') self.tzdss = tz
def unmixIntervals(grid,cumTime=6,accum=True): """ Unmix fields which are on mixed intervals. Assumes that every other timestep is twice the value of the previous. cumTime is 6 for 3,6 hour mixed. 12 would be for 6/12 mixed intervals. """ from ucar.visad.data import CalendarDateTime as cdt from java.util import TimeZone as tz ugrid = grid.clone() timeSet = GridUtil.getTimeSet(grid) timeArray = cdt.timeSetToArray(timeSet) for time in range(len(timeArray)): if (time > 0): prevhour = int(timeArray[time-1].formattedString("HH",tz.getTimeZone("GMT"))) nowhour = int(timeArray[time].formattedString("HH",tz.getTimeZone("GMT"))) if (nowhour%cumTime == 0 and prevhour%cumTime != 0): if (accum): tsgrid = sub(grid.getSample(time),grid.getSample(time-1)) else: tsgrid = sub(2*grid.getSample(time),grid.getSample(time-1)) ugrid.setSample(time,tsgrid) return ugrid
def test_datetime_with_time_zone(self): oldDefaultTimeZone = TimeZone.getDefault() try: TimeZone.setDefault(TimeZone.getTimeZone("GMT+4")) c = tWithDateTimeZCursor(self.context) zoneId = ZoneId.of("GMT+2") #This is the datetime we will insert localDateTime = LocalDateTime.of(2017, Month.DECEMBER, 31, 22, 0, 0) zonedDateTime = ZonedDateTime.of(localDateTime, zoneId) #This is the datetime we expect the database to receive utcDateTime = LocalDateTime.of(2018, Month.JANUARY, 1, 0, 0, 0) c.eventDate = zonedDateTime c.insert() c.clear() c.first() zoneIdAfterSelect = ZoneId.of("GMT+4") self.assertEquals(utcDateTime, c.eventDate.toLocalDateTime()) self.assertEquals(zoneIdAfterSelect, c.eventDate.getZone()) finally: TimeZone.setDefault(oldDefaultTimeZone)
def isDayTime(self, forecastDate, sunrise, sunset): # Only check the time, not the date, of the forecast against the # sunrise and sunset times. cal = GregorianCalendar(TimeZone.getTimeZone("UTC")) cal.setTime(forecastDate) riseCal = GregorianCalendar(TimeZone.getTimeZone("UTC")) riseCal.setTime(sunrise) riseCal.set(cal.get(cal.YEAR), cal.get(cal.MONTH), cal.get(cal.DATE)) setCal = GregorianCalendar(TimeZone.getTimeZone("UTC")) setCal.setTime(sunset) setCal.set(cal.get(cal.YEAR), cal.get(cal.MONTH), cal.get(cal.DATE)) if cal.compareTo(riseCal) == -1: return False elif cal.compareTo(setCal) == 1: return False return True
def to_java_calendar(value): '''Returns java.util.calendar type (with system timezone if none specified). Accepts any date type used by this module''' if isinstance(value, Calendar): return value value_zoneddatetime = to_java_zoneddatetime(value) new_calendar = Calendar.getInstance(TimeZone.getTimeZone(value_zoneddatetime.getZone().getID())) new_calendar.set(Calendar.YEAR, value_zoneddatetime.getYear) new_calendar.set(Calendar.MONTH, value_zoneddatetime.getMonthValue - 1) new_calendar.set(Calendar.DAY_OF_MONTH, value_zoneddatetime.getDayOfMonth) new_calendar.set(Calendar.HOUR_OF_DAY, value_zoneddatetime.getHour) new_calendar.set(Calendar.MINUTE, value_zoneddatetime.getMinute) new_calendar.set(Calendar.SECOND, value_zoneddatetime.getSecond) new_calendar.set(Calendar.MILLISECOND, int(value_zoneddatetime.getNano / 1000000)) return new_calendar
def __init__(self, name, value): self._name = name if not value: self._value = value else: if len(value) == 10: # date format parsed_value = SimpleDateFormat('yyyy-MM-dd').parse(value) else: # assume datetime format # remove microseconds if necessary if len(value) == 27: value = '%sZ' % value[:19] sdf = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'") sdf.setTimeZone(TimeZone.getTimeZone("UTC")) parsed_value = sdf.parse(value) self._value = parsed_value
def _parseDateString(installDateString): installationDateAsDate = None if installDateString: for format in ['yyyyMMdd', 'yyyyMMddHHmmss.SSSSSS-000', 'EEE dd MMM yyyy HH:mm:ss aa zzz']: if len(installDateString) == len(format): try: from java.text import SimpleDateFormat from java.util import TimeZone dateFormatter = SimpleDateFormat(format) dateFormatter.setTimeZone(TimeZone.getTimeZone("GMT")) installationDateAsDate = dateFormatter.parse(installDateString) except java.text.ParseException: # could not parse date # print 'could not parse' + installDateString + ' as ' + format pass return installationDateAsDate
def getSampleTimesInFormat(grid,timeformat,timezone="UTC",outformat="string"): """ A Helper function to return times of a grid in specified format as a list. """ from ucar.visad.data import CalendarDateTime from visad import DateTime from ucar.visad.UtcDate import formatUtcDate from visad import VisADException from java.util import TimeZone dateTimes = CalendarDateTime.timeSetToArray(GridUtil.getTimeSet(grid)) TIMEZONE=TimeZone.getTimeZone(timezone) temp=[] for i in range(grid.getDomainSet().getLength()): if (str(outformat).lower() in ("string","str")): temp.append(str(dateTimes[i].formattedString(timeformat,TIMEZONE))) elif (str(outformat).lower() in ("float","flt")): temp.append(float(dateTimes[i].formattedString(timeformat,TIMEZONE))) elif (str(outformat).lower() in ("int","integer")): temp.append(Integer(dateTimes[i].formattedString(timeformat,TIMEZONE))) else: raise VisADException("Unrecognized output format") return temp
def _parseDateString(installDateString): installationDateAsDate = None if installDateString: for format in [ 'yyyyMMdd', 'yyyyMMddHHmmss.SSSSSS-000', 'EEE dd MMM yyyy HH:mm:ss aa zzz' ]: if len(installDateString) == len(format): try: from java.text import SimpleDateFormat from java.util import TimeZone dateFormatter = SimpleDateFormat(format) dateFormatter.setTimeZone(TimeZone.getTimeZone("GMT")) installationDateAsDate = dateFormatter.parse( installDateString) except java.text.ParseException: # could not parse date # print 'could not parse' + installDateString + ' as ' + format pass return installationDateAsDate
def to_joda_datetime(value): # type: (t.Any) -> JodaDateTime """ Converts any of the supported date types to ``org.joda.time.DateTime``. If ``value`` does not have timezone information, the system default will be used. Examples: .. code-block:: joda_time = to_joda_datetime(items["date_item"]) Args: value: the value to convert Returns: org.joda.time.DateTime: the converted value None: if ``org.joda.time`` is not available Raises: TypeError: if the type of ``value`` is not suported by this package """ if JodaDateTime is None: frame = inspect.stack()[1] getLogger("date").warn( "'{func}' ({file}:{line}) called 'to_joda_datetime' but Joda is not available" .format(file=frame.filename, line=frame.lineno, func=frame.function)) del frame return None if isinstance(value, JodaDateTime): return value value_zoneddatetime = to_java_zoneddatetime(value) return JodaDateTime( value_zoneddatetime.toInstant().toEpochMilli(), JodaDateTimeZone.forTimeZone( TimeZone.getTimeZone(value_zoneddatetime.getZone())))
def makeTimeSeriesContainer(station, interval, tz, records, decodeInfo): global timezones sdf = SimpleDateFormat("ddMMMyyyy, HH:mm") if dssTimezone: if not timezones["DSS"]: timezones["DSS"] = TimeZone.getTimeZone( tzInfo[dssTimezone]["JAVA"]) sdf.setTimeZone(timezones["DSS"]) else: sdf.setTimeZone(timezones["USGS"]) dd, decodeInfo = decodeInfo cal = Calendar.getInstance() t = HecTime() tsc = TimeSeriesContainer() tsc.interval = interval times = [] values = [] tsc.quality = None factor = decodeInfo["DSS_FACTOR"] for j in range(len(records)): millis, value = records[j] cal.setTimeInMillis(millis) t.set(sdf.format(cal.getTime())) times.append(t.value()) try: values.append(float(value) * factor) except: values.append(Constants.UNDEFINED) tsc.times = times tsc.values = values tsc.startTime = times[0] tsc.endTime = times[-1] tsc.numberValues = len(values) tsc.timeZoneID = sdf.getTimeZone().getID() tsc.timeZoneRawOffset = sdf.getTimeZone().getRawOffset() return tsc
# by Alexei Znamensky - russoz AT cpan.org # import sys, java from java.util import Date,TimeZone from java.text import SimpleDateFormat def log(msg): print >> sys.stderr, "=== ["+df.format(Date())+"]", msg if len(sys.argv) != 2: print >> sys.stderr, 'update-ear.py: <enterprise-app> <ear-file>' sys.exit(1) tzspec="$spec->{tz}" tz = TimeZone.getTimeZone(tzspec) df = SimpleDateFormat("yyyy.MM.dd HH:mm:ss.SSS z") df.setTimeZone(tz) appname=sys.argv[0] appear =sys.argv[1] options = [ "-update", "-appname", appname, "-update.ignore.new", "-verbose" ] try: log("Installing Application from "+appear) AdminApp.install( appear, options ) log("Installation completed") log("Saving configuration") if $spec->{really_do} == 1:
# This list has been explicitly chosen to: # * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE) # * Prefer ECDHE over DHE for better performance # * Prefer any AES-GCM over any AES-CBC for better performance and security # * Then Use HIGH cipher suites as a fallback # * Then Use 3DES as fallback which is secure but slow # * Disable NULL authentication, NULL encryption, MD5 MACs, DSS, and RC4 for # security reasons _RESTRICTED_SERVER_CIPHERS = ( 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:' '!eNULL:!MD5:!DSS:!RC4' ) _rfc2822_date_format = SimpleDateFormat("MMM dd HH:mm:ss yyyy z", Locale.US) _rfc2822_date_format.setTimeZone(TimeZone.getTimeZone("GMT")) _ldap_rdn_display_names = { # list from RFC 2253 "CN": "commonName", "E": "emailAddress", "L": "localityName", "ST": "stateOrProvinceName", "O": "organizationName", "OU": "organizationalUnitName", "C": "countryName", "STREET": "streetAddress", "DC": "domainComponent", "UID": "userid" }
def parse(self, stream): factory = XmlPullParserFactory.newInstance() parser = factory.newPullParser() parser.setInput(stream, None) eventType = parser.getEventType() section = "" sections = {"location", "credit", "tabular"} dateFormat = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss") dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")) sunrise = Date() sunset = Date() place = "" credit = "" forecasts = [] forecast = Forecast() while eventType != XmlPullParser.END_DOCUMENT: eventType = parser.next() if eventType == XmlPullParser.START_TAG: name = parser.getName() if name in sections: section = name elif section != "": if name == "name": while eventType != XmlPullParser.TEXT: eventType = parser.next() place = parser.getText() elif name == "link": credit = parser.getAttributeValue(None, "text") elif name == "time": forecast = Forecast() forecast.place = place forecast.credit = credit from_ = parser.getAttributeValue(None, "from") to_ = parser.getAttributeValue(None, "to") forecast.from_ = dateFormat.parse( from_, ParsePosition(0)) forecast.to_ = dateFormat.parse(to_, ParsePosition(0)) elif name == "symbol": forecast.description = parser.getAttributeValue( None, "name") symbol = parser.getAttributeValue(None, "numberEx") forecast.midDate = Date(forecast.from_.getTime()/2 + \ forecast.to_.getTime()/2) try: forecast.symbol = self.symbols[symbol] continue except KeyError: pass if self.isDayTime(forecast.midDate, sunrise, sunset): symbol += "d" else: symbol += "n" try: forecast.symbol = self.symbols[symbol] except KeyError: forecast.symbol = -1 elif name == "windSpeed": forecast.windSpeed = parser.getAttributeValue( None, "name") elif name == "temperature": forecast.temperature = parser.getAttributeValue( None, "value") forecast.temperatureUnit = parser.getAttributeValue( None, "unit") elif name == "sun": rise = parser.getAttributeValue(None, "rise") sset = parser.getAttributeValue(None, "set") sunrise = dateFormat.parse(rise, ParsePosition(0)) sunset = dateFormat.parse(sset, ParsePosition(0)) elif eventType == XmlPullParser.END_TAG: name = parser.getName() if name == section and name in sections: section = "" elif section == "tabular" and name == "time": forecasts.add(forecast) return forecasts
def main(): #-------# # setup # #-------# reTime = re.compile( "^((?:[0-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])$", re.I) reDuration = re.compile( "^(-?)P(?=\d|T\d)(?:(\d+)Y)?(?:(\d+)M)?(?:(\d+)([DW]))?(?:T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+(?:\.\d+)?)S)?)?$", re.I) reInputLine = re.compile( "^((\w+)\s*/\s*((?:[^.]+[.]){5}[^.]+?))\s*=\s*((?:/(.*)){6}/)$") inFileName = None dssFileName = "cwms-data.dss" dssVersion = 7 beginTime = None endTime = None duration = None timeZoneStr = None timeZone = None beginTimeCal = None endTimeCal = None inputLines = None timeseries = {} urlTemplate = "http://cwms-data.usace.army.mil/cwms-data/timeseries?office=%s&name=%s&begin=@&end=@&timezone=@&format=json" #--------------------------# # process the command line # #--------------------------# try: opts, args = getopt.getopt( sys.argv[1:], "i:d:v:b:e:z:", ["in=", "dss-file=", "dss-version=", "begin=", "end=", "tz="]) if args: raise Exception("Unexpected argument(s) encountered: %s" % args) except Exception as exc: usage(str(exc)) for opt, arg in opts: if opt in ("-i", "--in"): inFileName = arg elif opt in ("-d", "--dss-file"): dssFileName = arg elif opt in ("-v", "--dss-version"): dssVersion = arg elif opt in ("-b", "--begin"): beginTime = arg elif opt in ("-e", "--end"): endTime = arg elif opt in ("-z", "--tz"): timeZoneStr = arg #--------------------# # validate arguments # #--------------------# try: dssVersion = int(dssVersion) if dssVersion not in (6, 7): raise Exception except: usage("Invalid DSS version: %s" % dssVersion) if beginTime: if not reTime.match(beginTime) and not reDuration.match(beginTime): usage("Invalid time for begin_time: %s" % beginTime) # if not beginTime.startswith("-") and not beginTime.upper().startswith("P-") : # beginTime = "-%s" % beginTime urlTemplate = urlTemplate.replace("&begin=@", "&begin=%s" % beginTime) else: urlTemplate = urlTemplate.replace("&begin=@", "") if endTime: if not reTime.match(endTime) and not reDuration.match(endTime): usage("Invalid time for end_time: %s" % endTime) # if not endTime.startswith("-") and not endTime.upper().startswith("P-") : # endTime = "-%s" % endTime urlTemplate = urlTemplate.replace("&end=@", "&end=%s" % endTime) else: urlTemplate = urlTemplate.replace("&end=@", "") if timeZoneStr: try: timeZone = TimeZone.getTimeZone(timeZoneStr).getID() except Exception as exc: usage("Invalid time_zone: %s" % timeZoneStr) else: timeZone = TimeZone.getTimeZone("UTC").getID() urlTemplate = urlTemplate.replace("&timezone=@", "&timezone=%s" % timeZone) Heclib.zset("MLVL", "", 0) Heclib.zset("DSSV", "", dssVersion) output("%s %s (%s) starting up" % (progName, version, verDate)) output("Will store data to %s" % dssFileName) #----------------------# # parse the input file # #----------------------# try: if inFileName: output("Reading %s" % inFileName) with open(inFileName) as f: lines = f.read().strip().split("\n") else: output("Reading stdin") lines = sys.stdin.read().strip().split("\n") except Exception as exc: usage(str(exc)) for line in [ line for line in [line.strip() for line in lines] if line and not line.startswith("#") ]: m = reInputLine.match(line) if not m: output("Invalid input line: %s" % line) continue timeseries.setdefault(m.group(2), {})[m.group(3)] = m.group(4) #-------------------# # retrieve the data # #-------------------# jsonData = [] names = [] for office in sorted(timeseries.keys()): nameStr = "" count = 0 for tsid in sorted(timeseries[office].keys()): if nameStr and len(nameStr) + len(tsid) > 1500: output("Retrieving %d time series for office %s" % (count, office)) url = urlTemplate % (office, nameStr) session = urllib2.urlopen(url.replace(" ", "%20")) names.append((office, nameStr)) jsonData.append(session.read()) session.close() nameStr = "" count = 0 else: if nameStr: nameStr += "|" nameStr += tsid count += 1 if nameStr: output("Retrieving %d time series for office %s" % (count, office)) url = urlTemplate % (office, nameStr) session = urllib2.urlopen(url.replace(" ", "%20")) names.append((office, nameStr)) jsonData.append(session.read()) session.close() nameStr = "" count = 0 #--------------# # write to DSS # #--------------# processed = set() if jsonData: dssFile = HecDss.open(dssFileName) for i in range(len(jsonData)): office, tsids = names[i] try: obj = json.loads(jsonData[i]) except ValueError: tsids = tsids.split("|") output( "Error retrieving data for these time series for %s:\n\t%s\nData = %s" % (office, "\n\t".join(tsids), jsonData[i])) timeSeriesData = obj["time-series"]["time-series"] for j in range(len(timeSeriesData)): tsid = timeSeriesData[j]["name"] pathname = timeseries[office][tsid] output("Processing %s/%s" % (office, tsid), newline=False) tsc = makeTimeSeriesContainer(timeSeriesData[j], timeZone, pathname) if tsc: processed.add((office, tsid)) output(" .. %d values stored to %s" % (tsc.numberValues, tsc.fullName), continuation=True) dssFile.put(tsc) dssFile.close() no_data = {} for office in timeseries.keys(): for tsid in timeseries[office].keys(): if (office, tsid) not in processed: no_data.setdefault(office, []).append(tsid) if no_data: output("No data were retrieved for the following:") for office in sorted(no_data.keys()): for tsid in sorted(no_data[office]): output(" %s/%s = %s" % (office, tsid, timeseries[office][tsid]), continuation=True) output("%s %s (%s) Done" % (progName, version, verDate))
def listADDEImages(localEntry=None, server=None, dataset=None, descriptor=None, accounting=DEFAULT_ACCOUNTING, location=None, coordinateSystem=CoordinateSystems.LATLON, place=None, mag=None, position=None, unit=None, day=None, time=None, debug=False, band=None, size=None, showUrls=True): """Creates a list of ADDE images. Args: localEntry: Local ADDE dataset. server: ADDE server. dataset: ADDE dataset group name. descriptor: ADDE dataset descriptor. day: Day range. ('begin date', 'end date') time: ('begin time', 'end time') position: Position number. Values may be integers or the string "ALL". (default=0) band: McIDAS band number; only images that have matching band number will be returned. accounting: ('user', 'project number') User and project number required by servers using McIDAS accounting. default = ('idv','0') Returns: ADDE image matching the given criteria, if any. """ if localEntry: server = localEntry.getAddress() dataset = localEntry.getGroup() descriptor = localEntry.getDescriptor().upper() elif (server is None) or (dataset is None) or (descriptor is None): raise TypeError( "must provide localEntry or server, dataset, and descriptor values." ) if server == "localhost" or server == "127.0.0.1": port = EntryStore.getLocalPort() else: port = "112" # server = '%s:%s' % (server, port) user = accounting[0] proj = accounting[1] debug = str(debug).lower() if mag: mag = '&MAG=%s %s' % (mag[0], mag[1]) else: mag = '' if unit: origUnit = unit unit = '&UNIT=%s' % (unit) else: # origUnit = None unit = '' if place is Places.CENTER: place = '&PLACE=CENTER' elif place is Places.ULEFT: place = '&PLACE=ULEFT' else: # raise ValueError() place = '' if coordinateSystem is CoordinateSystems.LATLON: coordSys = 'LATLON' elif coordinateSystem is CoordinateSystems.AREA or coordinateSystem is CoordinateSystems.IMAGE: coordSys = 'LINELE' else: raise ValueError() if location: location = '&%s=%s %s' % (coordSys, location[0], location[1]) else: location = '' if size: if size == 'ALL': size = '&SIZE=99999 99999' else: size = '&SIZE=%s %s' % (size[0], size[1]) else: size = '' if time: time = '&TIME=%s %s I' % (time[0], time[1]) else: time = '' if band: band = '&BAND=%s' % (str(band)) else: band = '&BAND=ALL' if position is not None: if isinstance(position, int): position = '&POS=%s' % (position) elif isinstance(position, tuple): if len(position) != 2: raise ValueError( 'position range may only contain values for the beginning and end of a range.' ) position = '&POS=%s %s' % (str(position[0]), str(position[1])) else: position = '&POS=%s' % (str(position).upper()) else: position = '&POS=0' tz = TimeZone.getTimeZone('Z') dateFormat = SimpleDateFormat() dateFormat.setTimeZone(tz) dateFormat.applyPattern('yyyyDDD') timeFormat = SimpleDateFormat() timeFormat.setTimeZone(tz) timeFormat.applyPattern('HH:mm:ss') addeUrlFormat = "adde://%(server)s/imagedirectory?&PORT=%(port)s&COMPRESS=gzip&USER=%(user)s&PROJ=%(proj)s&VERSION=1&DEBUG=%(debug)s&TRACE=0&GROUP=%(dataset)s&DESCRIPTOR=%(descriptor)s%(band)s%(location)s%(place)s%(size)s%(unit)s%(mag)s%(day)s%(time)s%(position)s" urls = [] areaDirectories = [] dates = _normalizeDates(day) for date in dates: formatValues = { 'server': server, 'port': port, 'user': user, 'proj': proj, 'debug': debug, 'dataset': dataset, 'descriptor': descriptor, 'band': band, 'location': location, 'place': place, 'size': size, 'unit': unit, 'mag': mag, 'day': date, 'time': time, 'position': position, } url = addeUrlFormat % formatValues if showUrls: print url adl = AreaDirectoryList(url) results = adl.getSortedDirs() for imageTimes in results: for areaDirectory in imageTimes: urls.append(url) areaDirectories.append(areaDirectory) temp = _AreaDirectoryList() for i, d in enumerate(areaDirectories): nominalTime = d.getNominalTime() tempDay = str( dateFormat.format(nominalTime, StringBuffer(), FieldPosition(0))) tempTime = str( timeFormat.format(nominalTime, StringBuffer(), FieldPosition(0))) bandList = list(d.getBands()) # tempUnitList = list(d.getCalInfo()[0]) # unitList = tempUnitList[::2] # unitDescList = tempUnitList[1::2] # calInfo = dict(zip(unitList, unitDescList)) if unit: unitList = [origUnit] else: unitList = map(str, list(d.getCalInfo()[0])[::2]) for band in bandList: for calUnit in unitList: dt = { 'server': server, 'dataset': dataset, 'descriptor': descriptor, 'bandNumber': band, 'bandList': bandList, 'debug': debug, 'accounting': accounting, 'day': tempDay, 'time': tempTime, 'imageSize': (d.getLines(), d.getElements()), 'centerLocation': (d.getCenterLatitude(), d.getCenterLongitude()), 'resolution': (d.getCenterLatitudeResolution(), d.getCenterLongitudeResolution()), 'unitList': unitList, 'unitType': calUnit, 'bands': bandList, 'band-count': d.getNumberOfBands(), 'calinfo': map(str, list(d.getCalInfo()[0])), 'calibration-scale-factor': d.getCalibrationScaleFactor(), 'calibration-type': str(d.getCalibrationType()), 'calibration-unit-name': d.getCalibrationUnitName(), 'center-latitude': d.getCenterLatitude(), 'center-latitude-resolution': d.getCenterLatitudeResolution(), 'center-longitude': d.getCenterLongitude(), 'center-longitude-resolution': d.getCenterLongitudeResolution(), 'directory-block': list(d.getDirectoryBlock()), 'elements': d.getElements(), 'lines': d.getLines(), 'memo-field': str(d.getMemoField()), 'nominal-time': DateTime(d.getNominalTime()), 'sensor-id': d.getSensorID(), 'sensor-type': str(d.getSensorType()), 'source-type': str(d.getSourceType()), 'start-time': DateTime(d.getStartTime()), 'url': urls[i], } temp.append(dt) return temp
def listADDEImageTimes(localEntry=None, server=None, dataset=None, descriptor=None, accounting=DEFAULT_ACCOUNTING, location=None, coordinateSystem=CoordinateSystems.LATLON, place=None, mag=None, position=None, unit=None, day=None, time=None, debug=False, band=None, size=None, showUrls=True): if localEntry: server = localEntry.getAddress() dataset = localEntry.getGroup() descriptor = localEntry.getDescriptor().upper() elif (server is None) or (dataset is None) or (descriptor is None): raise TypeError( "must provide localEntry or server, dataset, and descriptor values." ) if server == "localhost" or server == "127.0.0.1": port = EntryStore.getLocalPort() else: port = "112" # server = '%s:%s' % (server, port) user = accounting[0] proj = accounting[1] debug = str(debug).lower() if mag: mag = '&MAG=%s %s' % (mag[0], mag[1]) else: mag = '' if unit: origUnit = unit unit = '&UNIT=%s' % (unit) else: # origUnit = None unit = '' if place is Places.CENTER: place = '&PLACE=CENTER' elif place is Places.ULEFT: place = '&PLACE=ULEFT' else: # raise ValueError() place = '' if coordinateSystem is CoordinateSystems.LATLON: coordSys = 'LATLON' elif coordinateSystem is CoordinateSystems.AREA or coordinateSystem is CoordinateSystems.IMAGE: coordSys = 'LINELE' else: raise ValueError() if location: location = '&%s=%s %s' % (coordSys, location[0], location[1]) else: location = '' if size: if size == 'ALL': size = '&SIZE=99999 99999' else: size = '&SIZE=%s %s' % (size[0], size[1]) else: size = '' if time: time = '&TIME=%s %s I' % (time[0], time[1]) else: time = '' if band: band = '&BAND=%s' % (str(band)) else: band = '&BAND=ALL' if position is not None: if isinstance(position, int): position = '&POS=%s' % (position) elif isinstance(position, tuple): if len(position) != 2: raise ValueError( 'position range may only contain values for the beginning and end of a range.' ) position = '&POS=%s %s' % (str(position[0]), str(position[1])) else: position = '&POS=%s' % (str(position).upper()) else: position = '&POS=0' tz = TimeZone.getTimeZone('Z') dateFormat = SimpleDateFormat() dateFormat.setTimeZone(tz) dateFormat.applyPattern('yyyyDDD') timeFormat = SimpleDateFormat() timeFormat.setTimeZone(tz) timeFormat.applyPattern('HH:mm:ss') addeUrlFormat = "adde://%(server)s/imagedirectory?&PORT=%(port)s&COMPRESS=gzip&USER=%(user)s&PROJ=%(proj)s&VERSION=1&DEBUG=%(debug)s&TRACE=0&GROUP=%(dataset)s&DESCRIPTOR=%(descriptor)s%(band)s%(location)s%(place)s%(size)s%(unit)s%(mag)s%(day)s%(time)s%(position)s" urls = [] areaDirectories = [] dates = _normalizeDates(day) for date in dates: formatValues = { 'server': server, 'port': port, 'user': user, 'proj': proj, 'debug': debug, 'dataset': dataset, 'descriptor': descriptor, 'band': band, 'location': location, 'place': place, 'size': size, 'unit': unit, 'mag': mag, 'day': date, 'time': time, 'position': position, } url = addeUrlFormat % formatValues if showUrls: print url adl = AreaDirectoryList(url) results = adl.getSortedDirs() for imageTimes in results: for areaDirectory in imageTimes: urls.append(url) areaDirectories.append(areaDirectory) uniques = set() times = [] for d in areaDirectories: dt = DateTime(d.getNominalTime()) if dt not in uniques: d = { 'day': str(dt.formattedString('yyyyDDD', tz)), 'time': str(dt.formattedString('HH:mm:ss', tz)), } times.append(d) uniques.add(dt) uniques = None return sorted(times)
def listADDEImageTimes(localEntry=None, server=None, dataset=None, descriptor=None, accounting=DEFAULT_ACCOUNTING, location=None, coordinateSystem=CoordinateSystems.LATLON, place=None, mag=None, position=None, unit=None, day=None, time=None, debug=False, band=None, size=None, showUrls=True): if localEntry: server = localEntry.getAddress() dataset = localEntry.getGroup() descriptor = localEntry.getDescriptor().upper() elif (server is None) or (dataset is None) or (descriptor is None): raise TypeError("must provide localEntry or server, dataset, and descriptor values.") if server == "localhost" or server == "127.0.0.1": port = EntryStore.getLocalPort() else: port = "112" # server = '%s:%s' % (server, port) user = accounting[0] proj = accounting[1] debug = str(debug).lower() if mag: mag = '&MAG=%s %s' % (mag[0], mag[1]) else: mag = '' if unit: origUnit = unit unit = '&UNIT=%s' % (unit) else: # origUnit = None unit = '' if place is Places.CENTER: place = '&PLACE=CENTER' elif place is Places.ULEFT: place = '&PLACE=ULEFT' else: # raise ValueError() place = '' if coordinateSystem is CoordinateSystems.LATLON: coordSys = 'LATLON' elif coordinateSystem is CoordinateSystems.AREA or coordinateSystem is CoordinateSystems.IMAGE: coordSys = 'LINELE' else: raise ValueError() if location: location = '&%s=%s %s' % (coordSys, location[0], location[1]) else: location = '' if size: if size == 'ALL': size = '&SIZE=99999 99999' else: size = '&SIZE=%s %s' % (size[0], size[1]) else: size = '' if time: time = '&TIME=%s %s I' % (time[0], time[1]) else: time = '' if band: band = '&BAND=%s' % (str(band)) else: band = '&BAND=ALL' if position is not None: if isinstance(position, int): position = '&POS=%s' % (position) elif isinstance(position, tuple): if len(position) != 2: raise ValueError('position range may only contain values for the beginning and end of a range.') position = '&POS=%s %s' % (str(position[0]), str(position[1])) else: position = '&POS=%s' % (str(position).upper()) else: position = '&POS=0' tz = TimeZone.getTimeZone('Z') dateFormat = SimpleDateFormat() dateFormat.setTimeZone(tz) dateFormat.applyPattern('yyyyDDD') timeFormat = SimpleDateFormat(); timeFormat.setTimeZone(tz) timeFormat.applyPattern('HH:mm:ss') addeUrlFormat = "adde://%(server)s/imagedirectory?&PORT=%(port)s&COMPRESS=gzip&USER=%(user)s&PROJ=%(proj)s&VERSION=1&DEBUG=%(debug)s&TRACE=0&GROUP=%(dataset)s&DESCRIPTOR=%(descriptor)s%(band)s%(location)s%(place)s%(size)s%(unit)s%(mag)s%(day)s%(time)s%(position)s" urls = [] areaDirectories = [] dates = _normalizeDates(day) for date in dates: formatValues = { 'server': server, 'port': port, 'user': user, 'proj': proj, 'debug': debug, 'dataset': dataset, 'descriptor': descriptor, 'band': band, 'location': location, 'place': place, 'size': size, 'unit': unit, 'mag': mag, 'day': date, 'time': time, 'position': position, } url = addeUrlFormat % formatValues if showUrls: print url adl = AreaDirectoryList(url) results = adl.getSortedDirs() for imageTimes in results: for areaDirectory in imageTimes: urls.append(url) areaDirectories.append(areaDirectory) uniques = set() times = [] for d in areaDirectories: dt = DateTime(d.getNominalTime()) if dt not in uniques: d = { 'day': str(dt.formattedString('yyyyDDD', tz)), 'time': str(dt.formattedString('HH:mm:ss', tz)), } times.append(d) uniques.add(dt) uniques = None return sorted(times)
def format_time_as_year_month_day(t): from java.util import Calendar, TimeZone gmtCal = Calendar.getInstance(TimeZone.getTimeZone('GMT')) gmtCal.setTime(t.date) return "%d,%d,%d"%(gmtCal.get(Calendar.YEAR),gmtCal.get(Calendar.MONTH),gmtCal.get(Calendar.DATE))
elif o == '-s': startlist.append( a ) elif o == '-k': stoplist.append( a ) elif o == '-h': print sys.argv[0] + " [-options]" print "\t-h\n\t\tThis" print "\t-r\n\t\tRun all pending jobs." print "\t-l\n\t\tDump all jobs." print "\t-f FILENAME\n\t\tDefault: FILENAME=acroncfg.json" print "\t-s ID\n\t\tStart a job." print "\t-k ID\n\t\tStop a job." quit() config = loadjson( configfn ) tz = TimeZone.getDefault() if config.has( "tz" ): tz = TimeZone.getTimeZone( config.getString( "tz" ) ) statefn = config.getString( "statefile" ) if os.path.isfile( statefn ): states = loadjson( statefn ) else: states = JSONObject() jobsdesc = config.getJSONArray( "jobs" ) jobs = {} for i in range( jobsdesc.length() ): j = Job( jobsdesc.getJSONObject( i ), tz ) if states.has( j.id ): j.setState( states.getJSONObject( j.id ) ) jobs[j.id] = j
def evaluate(self, times, max_time, origins_csv, destinations_csv, csv_writer, split=500, do_merge=False): ''' evaluate the shortest paths between origins and destinations uses the routing options set in setup() (run it first!) Parameters ---------- times: list of date times, the desired start/arrival times for evaluation origins_csv: file with origin points destinations_csv: file with destination points csv_writer: CSVWriter, configured writer to write results do_merge: merge the results over time, only keeping the best connections max_time: maximum travel-time in seconds (the smaller this value, the smaller the shortest path tree, that has to be created; saves processing time) ''' origins = self.otp.loadCSVPopulation(origins_csv, LATITUDE_COLUMN, LONGITUDE_COLUMN) destinations = self.otp.loadCSVPopulation(destinations_csv, LATITUDE_COLUMN, LONGITUDE_COLUMN) sources = origins if not self.arrive_by else destinations n_slices = (sources.size() / split) + 1 if n_slices > 1: print 'Splitting sources into {} part(s) with {} points each part'.format(n_slices, split) from_index = 0; to_index = 0; i = 1 while True: if to_index >= sources.size(): break from_index = to_index to_index += split if to_index >= sources.size(): to_index = sources.size() sliced_sources = sources.get_slice(from_index, to_index) if n_slices > 1: print('calculating part {}/{}'.format(i, n_slices)) i += 1 if not self.arrive_by: origins = sliced_sources else: destinations = sliced_sources self.request.setOrigins(origins) self.request.setDestinations(destinations) self.request.setLogProgress(self.print_every_n_lines) if self.arrive_by: time_note = ' arrival time ' else: time_note = 'start time ' # # if evaluation is performed in a time window, routes exceeding the window will be ignored # # (worstTime already takes care of this, but the time needed to reach the snapped the OSM point is also taken into account here) # if len(times) > 1: # print 'Cutoff set: routes with {}s exceeding the time window ({}) will be ignored (incl. time to reach OSM-net)'.format(time_note, times[-1]) # cutoff = times[-1] # self.request.setCutoffTime(cutoff.year, cutoff.month, cutoff.day, cutoff.hour, cutoff.minute, cutoff.second) # iterate all times results = [] # dimension (if not merged): times x targets (origins resp. destinations) sdf = SimpleDateFormat('HH:mm:ss') sdf.setTimeZone(TimeZone.getTimeZone("GMT +2")) for t, date_time in enumerate(times): # compare seconds since epoch (different ways to get it from java/python date) epoch = datetime.utcfromtimestamp(0) time_since_epoch = (date_time - epoch).total_seconds() self.request.setDateTime(date_time.year, date_time.month, date_time.day, date_time.hour, date_time.minute, date_time.second) # has to be set every time after setting datetime (and also AFTER setting arriveby) self.request.setMaxTimeSec(max_time) msg = 'Starting evaluation of routes with ' + time_note + date_time.strftime(DATETIME_FORMAT) print msg results_dt = self.batch_processor.evaluate(self.request) # if there already was a calculation: merge it with new results if do_merge and len(results) > 0: for i, prev_result in enumerate(results[0]): if prev_result is not None: prev_result.merge(results_dt[i]) #write and append if no merging is needed (saves memory) else: search_time = sdf.format(date_time) csv_writer.write(results_dt, additional_columns={'search_time': search_time}, append=True) for r in results_dt: del(r) if do_merge: # flatten the results results = [r for res in results for r in res] csv_writer.write(results, append=False)
DefaultValueType = "pointrel:text/utf-8" delimiterStartAndEnd = '~' delimiterMiddle = '-' pointrelTripleIDPrefix = "pointrel://tripleID/" EMPTY_MARKER = "%s%s0" % (delimiterStartAndEnd, delimiterStartAndEnd) DEFAULT_REPOSITORY_EXTENSION = ".pointrel" DEFAULT_REPOSITORY_NAME = "repository.pointrel" # Times in ISO 8601 # http://www.cl.cam.ac.uk/~mgk25/iso-time.html # SimpleDateFormat needs to have a local copy in each thread if multithreaded, so shooud use this in single threasd for now ISO8601TimestampFormat = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") ISO8601TimestampFormat.setTimeZone(TimeZone.getTimeZone("UTC")) def stringForTimestamp(timestamp): return ISO8601TimestampFormat.format(timestamp) def timestampForString(timestampString): date = ISO8601TimestampFormat.parse(timestampString) timestamp = Timestamp(date.getTime()) return timestamp def generateUniqueReferenceForRepository(): randomUUID = UUID.randomUUID() newText = RepositoryReferenceScheme + randomUUID.toString() return newText class Record:
def outputShefText(station, interval, tz, records, decodeInfo): ''' Output the rdb-formatted data retrieved from the USGS ''' global locations, timezones dd, decodeInfo = decodeInfo cal = Calendar.getInstance() sdfDate = SimpleDateFormat("yyyyMMdd") sdfTime = SimpleDateFormat("HHmm") if shefTimezone: tz = tzInfo[shefTimezone]["SHEF"] if not timezones["SHEF"]: timezones["SHEF"] = TimeZone.getTimeZone( tzInfo[shefTimezone]["JAVA"]) for sdf in (sdfDate, sdfTime): sdf.setTimeZone(timezones["SHEF"]) else: for sdf in (sdfDate, sdfTime): sdf.setTimeZone(timezones["USGS"]) recordCount = len(records) if interval == IRREGULAR_INTERVAL: for i in range(recordCount): cal.setTimeInMillis(records[i][0]) record = ".A %s %8s" % (locations[station]["SHEF_LOC"], sdfDate.format(cal.getTime())) record += " %s DH%4s /" % (tzInfo[tz]["SHEF"], sdfTime.format(cal.getTime())) try: param = locations[station]["SHEF_PARAMETER_OVERRIDES"][dd] except: param = decodeInfo["SHEF_PARAMETER"] factor = decodeInfo["SHEF_FACTOR"] unitSystem = "ES"[decodeInfo["SHEF_UNIT"]] try: record += " DU%s / %s %s /" % ( unitSystem, param, fmtFloat(float(records[i][1]) * factor)) except: record += " %s M /" % (param) outputShefRecord(record) else: cal.setTimeInMillis(records[0][0]) try: param = locations[station]["SHEF_PARAMETER_OVERRIDES"][dd] except: param = decodeInfo["SHEF_PARAMETER"] factor = decodeInfo["SHEF_FACTOR"] unitSystem = "ES"[decodeInfo["SHEF_UNIT"]] record = ".E %s %8s" % (locations[station]["SHEF_LOC"], sdfDate.format(cal.getTime())) record += " %s DH%4s / DU%s / %s /" % (tzInfo[tz]["SHEF"], sdfTime.format(cal.getTime()), unitSystem, param) if interval % 60: record += " DIN+%2.2d /" % interval else: record += " DIH+%2.2d /" % (interval / 60) for j in range(recordCount): try: record += " %s /" % fmtFloat(float(records[j][1]) * factor) except: record += " M /" outputShefRecord(record)
script_name = "{}.py".format(arg2) # Get the watershed name for the slug ws_name = cavistatus.get_watershed().getName() if ws_name is None else ws_name ws_name_slug = re.sub(r'\s+|_', '-', ws_name).lower() tw = cavistatus.get_timewindow() if tw != None: st, et = tw print("Time window: {}".format(tw)) else: raise Exception('No forecast open on Modeling tab to get a timewindow.') st = HecTime(st, HecTime.MINUTE_GRANULARITY) st.showTimeAsBeginningOfDay(True) # Convert start to UTC print('Converting time window to UTC for API request.') ws_tz = cavistatus.get_timezone() HecTime.convertTimeZone(st, ws_tz, TimeZone.getTimeZone('UTC')) et = HecTime(et, HecTime.MINUTE_GRANULARITY) et.showTimeAsBeginningOfDay(True) # Convert end to UTC HecTime.convertTimeZone(et, ws_tz, TimeZone.getTimeZone('UTC')) after = '{}-{:02d}-{:02d}T{:02d}:{:02d}:00Z'.format(st.year(), st.month(), st.day(), st.hour(), st.minute()) before = '{}-{:02d}-{:02d}T{:02d}:{:02d}:00Z'.format(et.year(), et.month(), et.day(), et.hour(), et.minute()) # DSS filename and path if dssfilename is None: dssfilename = 'data.dss' if not dssfilename.endswith('.dss'): dssfilename += '.dss' if dsspath is None: dsspath = cavistatus.get_database_directory() dsspath = os.path.expandvars(dsspath) # Join the path and dbdss = os.path.join(dsspath, dssfilename) print('DSS: {}'.format(dbdss))
def authenticate(self, configurationAttributes, requestParameters, step): authenticationService = CdiUtil.bean(AuthenticationService) userService = CdiUtil.bean(UserService) identity = CdiUtil.bean(Identity) credentials = identity.getCredentials() if step == 1: print "Basic (with password update). Authenticate for step 1" user_name = credentials.getUsername() user_password = credentials.getPassword() logged_in = False if StringHelper.isNotEmptyString( user_name) and StringHelper.isNotEmptyString( user_password): logged_in = authenticationService.authenticate( user_name, user_password) if not logged_in: return False find_user_by_uid = authenticationService.getAuthenticatedUser() user_expDate = find_user_by_uid.getAttribute( "oxPasswordExpirationDate", False) if user_expDate == None: print "Basic (with password update). Authenticate for step 1. User has no oxPasswordExpirationDate date" return False dt = StaticUtils.decodeGeneralizedTime(user_expDate) # Get Current Date calendar = GregorianCalendar(TimeZone.getTimeZone("UTC")) now = calendar.getTime() if now.compareTo(dt) > 0: # Add 90 Days to current date calendar.setTime(now) calendar.add(calendar.DATE, 90) dt_plus_90 = calendar.getTime() expDate = StaticUtils.encodeGeneralizedTime(dt_plus_90) identity.setWorkingParameter("expDate", expDate) return True elif step == 2: print "Basic (with password update). Authenticate for step 2" user = authenticationService.getAuthenticatedUser() if user == None: print "Basic (with password update). Authenticate for step 2. Failed to determine user name" return False user_name = user.getUserId() find_user_by_uid = userService.getUser(user_name) newExpDate = identity.getWorkingParameter("expDate") if find_user_by_uid == None: print "Basic (with password update). Authenticate for step 2. Failed to find user" return False print "Basic (with password update). Authenticate for step 2" update_button = requestParameters.get("loginForm:updateButton") if ArrayHelper.isEmpty(update_button): return True find_user_by_uid.setAttribute("oxPasswordExpirationDate", newExpDate) new_password_array = requestParameters.get("new_password") if ArrayHelper.isEmpty(new_password_array) or StringHelper.isEmpty( new_password_array[0]): print "Basic (with password update). Authenticate for step 2. New password is empty" return False new_password = new_password_array[0] find_user_by_uid.setAttribute("userPassword", new_password) print "Basic (with password update). Authenticate for step 2. Attempting to set new user '%s' password" % user_name userService.updateUser(find_user_by_uid) print "Basic (with password update). Authenticate for step 2. Password updated successfully" return True else: return False
def makeTimeSeriesContainer(tsData, timeZone, pathname=None): ''' Construct a TimeSeriesContainer object from a python dictionary that was created from a single "time-series" returned from the CWMS RADAR web service ''' #---------------# # initial setup # #---------------# tsc = None try: tz = TimeZone.getTimeZone(timeZone) sdf8601 = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX") sdfHecTime = SimpleDateFormat("ddMMMyyyy, HH:mm") cal = Calendar.getInstance() for obj in sdf8601, sdfHecTime, cal: obj.setTimeZone(tz) ht = HecTime() times, values, qualities = [], [], [] #------------------# # process the data # #------------------# if tsData.has_key("regular-interval-values"): #----------------------------------------# # regular time series (a lot to process) # #----------------------------------------# rts = tsData["regular-interval-values"] intvlStr = rts["interval"] unit = rts["unit"].split()[0] if intvlStr.startswith("PT"): intvlNum, intvlUnit = int(intvlStr[2:-1]), intvlStr[-1] try: factor, field = { "M": (1, Calendar.MINUTE), "H": (60, Calendar.HOUR_OF_DAY) }[intvlUnit] except KeyError: raise Exception("Unexpected interval: %s" % intvlStr) else: intvlNum, intvlUnit = int(intvlStr[1:-1]), intvlStr[-1] try: factor, field = { "Y": (1440 * 365, Calendar.YEAR), "M": (1440 * 30, Calendar.MONTH), "D": (1440, Calendar.DATE) }[intvlUnit] except KeyError: raise Exception("Unexpected interval: %s" % intvlStr) intvl = intvlNum * factor segmentCount = rts["segment-count"] cal.setTimeInMillis( sdf8601.parse(rts["segments"][0]["first-time"]).getTime()) for i in range(segmentCount): for j in range(rts["segments"][i]["value-count"]): ht.set(sdfHecTime.format(cal.getTimeInMillis())) v, q = rts["segments"][i]["values"][j] times.append(ht.value()) values.append(v) qualities.append(q) cal.add(field, intvlNum) if i < segmentCount - 1: nextBegin = sdf8601.parse( rts["segments"][i + 1]["first-time"]).getTime() time = cal.getTimeInMillis() while time < nextBegin: ht.set(sdfHecTime.format(time)) times.append(ht.value()) values.append(Constants.UNDEFINED) qualities.append(0) cal.add(field, intvlNum) time = cal.getTimeInMillis() elif tsData.has_key("irregular-interval-values"): #------------------------------# # irregular time series (easy) # #------------------------------# its = tsData["irregular-interval-values"] unit = its["unit"].split()[0] intvl = 0 for t, v, q in its["values"]: ht.set(sdfHecTime.format(sdf8601.parse(t))) times.append(ht.value()) values.append(v) qualities.append(q) else: raise Exception("Time series has no values") #--------------------------------------------------# # code common to regular and irregular time series # #--------------------------------------------------# tsc = TimeSeriesContainer() tsc.times = times tsc.values = values tsc.quality = qualities tsc.numberValues = len(times) tsc.startTime = times[0] tsc.endTime = times[-1] tsc.interval = intvl tsc.units = unit tsc.timeZoneID = timeZone tsc.timeZoneRawOffset = tz.getRawOffset() name = tsData["name"] loc, param, paramType, intv, dur, ver = name.split(".") if pathname: #---------------------------# # use pathname if specified # #---------------------------# A, B, C, D, E, F = 1, 2, 3, 4, 5, 6 parts = pathname.split("/") parts[D] = '' tsc.fullName = "/".join(parts) tsc.watershed = parts[A] try: tsc.location, tsc.subLocation = parts[B].split("-", 1) except: tsc.location = parts[B] try: tsc.parameter, tsc.subParameter = parts[C].split("-", 1) except: tsc.parameter = parts[C] try: tsc.version, tsc.subVersion = parts[F].split("-", 1) except: tsc.version = parts[F] else: #--------------------------------------# # no pathname, use CWMS time series id # #--------------------------------------# try: tsc.location, tsc.subLocation = loc.split("-", 1) except: tsc.location = loc try: tsc.parameter, tsc.subParameter = param.split("-", 1) except: tsc.parameter = param try: tsc.version, tsc.subVersion = ver.split("-", 1) except: tsc.version = ver tsc.type = { "Total": "PER-CUM", "Max": "PER-MAX", "Min": "PER-MIN", "Const": "INST-VAL", "Ave": "PER-AVER", "Inst": ("INST-VAL", "INST-CUM")[param.startswith("Precip")] }[paramType] except: output(traceback.format_exc()) return tsc
def date_as_string(date): tz = TimeZone.getTimeZone("UTC") df = SimpleDateFormat("yyyy-MM-dd HH:mm:ss") df.setTimeZone(tz) return df.format(date)
# Pretend to be OpenSSL OPENSSL_VERSION = "OpenSSL 1.0.0 (as emulated by Java SSL)" OPENSSL_VERSION_NUMBER = 0x1000000 OPENSSL_VERSION_INFO = (1, 0, 0, 0, 0) CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED = list(range(3)) # Do not support PROTOCOL_SSLv2, it is highly insecure and it is optional _, PROTOCOL_SSLv3, PROTOCOL_SSLv23, PROTOCOL_TLSv1 = list(range(4)) _PROTOCOL_NAMES = { PROTOCOL_SSLv3: 'SSLv3', PROTOCOL_SSLv23: 'SSLv23', PROTOCOL_TLSv1: 'TLSv1'} _rfc2822_date_format = SimpleDateFormat("MMM dd HH:mm:ss yyyy z", Locale.US) _rfc2822_date_format.setTimeZone(TimeZone.getTimeZone("GMT")) _ldap_rdn_display_names = { # list from RFC 2253 "CN": "commonName", "L": "localityName", "ST": "stateOrProvinceName", "O": "organizationName", "OU": "organizationalUnitName", "C": "countryName", "STREET": "streetAddress", "DC": "domainComponent", "UID": "userid" } _cert_name_types = [
def listADDEImages(localEntry=None, server=None, dataset=None, descriptor=None, accounting=DEFAULT_ACCOUNTING, location=None, coordinateSystem=CoordinateSystems.LATLON, place=None, mag=None, position=None, unit=None, day=None, time=None, debug=False, band=None, size=None, showUrls=True): """Creates a list of ADDE images. Args: localEntry: Local ADDE dataset. server: ADDE server. dataset: ADDE dataset group name. descriptor: ADDE dataset descriptor. day: Day range. ('begin date', 'end date') time: ('begin time', 'end time') position: Position number. Values may be integers or the string "ALL". (default=0) band: McIDAS band number; only images that have matching band number will be returned. accounting: ('user', 'project number') User and project number required by servers using McIDAS accounting. default = ('idv','0') Returns: ADDE image matching the given criteria, if any. """ if localEntry: server = localEntry.getAddress() dataset = localEntry.getGroup() descriptor = localEntry.getDescriptor().upper() elif (server is None) or (dataset is None) or (descriptor is None): raise TypeError("must provide localEntry or server, dataset, and descriptor values.") if server == "localhost" or server == "127.0.0.1": port = EntryStore.getLocalPort() else: port = "112" # server = '%s:%s' % (server, port) user = accounting[0] proj = accounting[1] debug = str(debug).lower() if mag: mag = '&MAG=%s %s' % (mag[0], mag[1]) else: mag = '' if unit: origUnit = unit unit = '&UNIT=%s' % (unit) else: # origUnit = None unit = '' if place is Places.CENTER: place = '&PLACE=CENTER' elif place is Places.ULEFT: place = '&PLACE=ULEFT' else: # raise ValueError() place = '' if coordinateSystem is CoordinateSystems.LATLON: coordSys = 'LATLON' elif coordinateSystem is CoordinateSystems.AREA or coordinateSystem is CoordinateSystems.IMAGE: coordSys = 'LINELE' else: raise ValueError() if location: location = '&%s=%s %s' % (coordSys, location[0], location[1]) else: location = '' if size: if size == 'ALL': size = '&SIZE=99999 99999' else: size = '&SIZE=%s %s' % (size[0], size[1]) else: size = '' if time: time = '&TIME=%s %s I' % (time[0], time[1]) else: time = '' if band: band = '&BAND=%s' % (str(band)) else: band = '&BAND=ALL' if position is not None: if isinstance(position, int): position = '&POS=%s' % (position) elif isinstance(position, tuple): if len(position) != 2: raise ValueError('position range may only contain values for the beginning and end of a range.') position = '&POS=%s %s' % (str(position[0]), str(position[1])) else: position = '&POS=%s' % (str(position).upper()) else: position = '&POS=0' tz = TimeZone.getTimeZone('Z') dateFormat = SimpleDateFormat() dateFormat.setTimeZone(tz) dateFormat.applyPattern('yyyyDDD') timeFormat = SimpleDateFormat(); timeFormat.setTimeZone(tz) timeFormat.applyPattern('HH:mm:ss') addeUrlFormat = "adde://%(server)s/imagedirectory?&PORT=%(port)s&COMPRESS=gzip&USER=%(user)s&PROJ=%(proj)s&VERSION=1&DEBUG=%(debug)s&TRACE=0&GROUP=%(dataset)s&DESCRIPTOR=%(descriptor)s%(band)s%(location)s%(place)s%(size)s%(unit)s%(mag)s%(day)s%(time)s%(position)s" urls = [] areaDirectories = [] dates = _normalizeDates(day) for date in dates: formatValues = { 'server': server, 'port': port, 'user': user, 'proj': proj, 'debug': debug, 'dataset': dataset, 'descriptor': descriptor, 'band': band, 'location': location, 'place': place, 'size': size, 'unit': unit, 'mag': mag, 'day': date, 'time': time, 'position': position, } url = addeUrlFormat % formatValues if showUrls: print url adl = AreaDirectoryList(url) results = adl.getSortedDirs() for imageTimes in results: for areaDirectory in imageTimes: urls.append(url) areaDirectories.append(areaDirectory) temp = _AreaDirectoryList() for i, d in enumerate(areaDirectories): nominalTime = d.getNominalTime() tempDay = str(dateFormat.format(nominalTime, StringBuffer(), FieldPosition(0))) tempTime = str(timeFormat.format(nominalTime, StringBuffer(), FieldPosition(0))) bandList = list(d.getBands()) # tempUnitList = list(d.getCalInfo()[0]) # unitList = tempUnitList[::2] # unitDescList = tempUnitList[1::2] # calInfo = dict(zip(unitList, unitDescList)) if unit: unitList = [origUnit] else: unitList = map(str, list(d.getCalInfo()[0])[::2]) for band in bandList: for calUnit in unitList: dt = { 'server': server, 'dataset': dataset, 'descriptor': descriptor, 'bandNumber': band, 'bandList': bandList, 'debug': debug, 'accounting': accounting, 'day': tempDay, 'time': tempTime, 'imageSize': (d.getLines(), d.getElements()), 'centerLocation': (d.getCenterLatitude(), d.getCenterLongitude()), 'resolution': (d.getCenterLatitudeResolution(), d.getCenterLongitudeResolution()), 'unitList': unitList, 'unitType': calUnit, 'bands': bandList, 'band-count': d.getNumberOfBands(), 'calinfo': map(str, list(d.getCalInfo()[0])), 'calibration-scale-factor': d.getCalibrationScaleFactor(), 'calibration-type': str(d.getCalibrationType()), 'calibration-unit-name': d.getCalibrationUnitName(), 'center-latitude': d.getCenterLatitude(), 'center-latitude-resolution': d.getCenterLatitudeResolution(), 'center-longitude': d.getCenterLongitude(), 'center-longitude-resolution': d.getCenterLongitudeResolution(), 'directory-block': list(d.getDirectoryBlock()), 'elements': d.getElements(), 'lines': d.getLines(), 'memo-field': str(d.getMemoField()), 'nominal-time': DateTime(d.getNominalTime()), 'sensor-id': d.getSensorID(), 'sensor-type': str(d.getSensorType()), 'source-type': str(d.getSourceType()), 'start-time': DateTime(d.getStartTime()), 'url': urls[i], } temp.append(dt) return temp
if dssTimezone: log.output("Outputting to DSS file %s in %s time zone" % (dssFilename, dssTimezone)) else: log.output( "Outputting to DSS file %s in time zone used by USGS" % dssFilename) if outputFormat & CWMS_DB: log.output("Outputting to CWMS database with store rule %s" % storeRule) #-----------------------------------# # set up the Java time zones to use # #-----------------------------------# timezones = {"USGS": None, "SHEF": None, "DSS": None} if shefTimezone: timezones["SHEF"] = TimeZone.getTimeZone(tzInfo[shefTimezone]["JAVA"]) if dssTimezone: timezones["DSS"] = TimeZone.getTimeZone(tzInfo[dssTimezone]["JAVA"]) #-----------------------------------------------# # read the parameter aliases file, if it exists # #-----------------------------------------------# "[USGS_PARAMETER],ALIAS" parameterAliases = {} aliasfile = open(paramAliasFilename, 'r') lines = aliasfile.read().strip().replace('\r', '').split("\n") aliasfile.close() parameterAliasKeys = parseCsv(lines[0].upper()) for line in lines[1:]: line = line.strip() if not line or line.startswith("#"): continue fields = parseCsv(line)
def processLocation(location, locationInfo, seq=None, total=None): global timezones, parameters, successful, notfound, nodata, haserror if os.path.isfile(location): f = open(location) data = f.read().strip().replace('\r', '') f.close() location = os.path.splitext(os.path.basename(location))[0] else: data = getData(location, seq, total) if not data: if outputLevel > NONE: log.output("*** No data ***") nodata.append(location) return tz = "" tzField = None paramsToOutput = locationInfo["PARAMETERS"] if outputFormat & USGS_TEXT: sys.stdout.write(data) if data.find("No sites/data found using the selection criteria specified" ) != -1: if outputLevel > NONE: log.output("*** Site not found ***") notfound.append(location) notfoundDb[location] = "True" return if outputFormat & (SHEF_TEXT | DSS_FILE | CWMS_DB): if outputLevel > NORMAL: log.output( "Parsing the following data:\n---------------------------") log.output("%s\n" % data) #------------------------------------------------------------------------------# # read through all the comment lines, and find the location name and time zone # #------------------------------------------------------------------------------# stationText = "# USGS %s " % location lines = data.replace("\r\n", "\n").split("\n") for i in range(len(lines)): line = lines[i].strip() if not line: continue if line[0] == "#": pass else: break else: if outputLevel > NONE: log.output("*** No values ***") nodata.append(location) return #-----------------------------------------------------# # process the 1st header line, describing field names # #-----------------------------------------------------# fields = lines[i].split() fieldCount = len(fields) if fieldCount < 3 or " ".join( fields[:3]) != "agency_cd site_no datetime": if outputLevel > NONE: log.output("*** Unexpected format on header line 1 ***") log.output("%s" % lines[i]) haserror.append(location) return paramCount = fieldCount - 3 if paramCount < 1: if outputLevel > NONE: log.output("*** No parameters ***") nodata.append(location) return paramNames = fields[:] fieldsToOutput = [] decodeInfo = [] for j in range(3, len(paramNames)): if paramNames[j] == "tz_cd": tzField = j else: paramName = paramNames[j].split("_", 1)[1] if paramName in paramsToOutput: fieldsToOutput.append(j) try: decodeInfo.append( (paramNames[j][:2], parameters[paramName])) except: if outputLevel > NONE: log.output("*** Unexpected parameter %s ***" % paramName) haserror.append(location) return if not fieldsToOutput: if outputLevel > NONE: log.output("*** No data ***") nodata.append(location) return if not tzField: if outputLevel > NONE: log.output("*** No timezone field specified ***") haserror.append(location) return #--------------------------# # skip the 2nd header line # #--------------------------# i += 1 #-----------------------------------------------------------------------------# # read the data records, keeping track of whether we have regular time-series # #-----------------------------------------------------------------------------# records = [] sdf = SimpleDateFormat("yyyy-MM-dd HH:mm") cal = Calendar.getInstance() recordCount = 0 for i in range(i + 1, len(lines)): if not lines[i].strip(): continue fields = lines[i].split("\t") if fields[0] != "USGS": continue if fields[1] != location: if outputLevel > NONE: log.output( "*** Unexpected location on data record %d ***" % recordCount) if outputLevel > NORMAL: log.output("%s" % lines[i]) haserror.append(location) return if not tz: tz = fields[tzField] timezones["USGS"] = TimeZone.getTimeZone(tzInfo[tz]["JAVA"]) sdf.setTimeZone(timezones["USGS"]) if outputLevel > NORMAL: log.output("Initial time zone is %s" % tz) else: if fields[tzField] != tz: if outputLevel > NORMAL: log.output("Time zone switched from %s to %s" % (tz, fields[tzField])) tz = fields[tzField] timezones["USGS"] = TimeZone.getTimeZone( tzInfo[tz]["JAVA"]) sdf.setTimeZone(timezones["USGS"]) recordCount += 1 cal.setTime(sdf.parse(fields[2])) valueFields = [] for j in fieldsToOutput: valueFields.append(fields[j]) records.append((cal.getTimeInMillis(), valueFields)) if recordCount == 0: if outputLevel > NONE: log.output("*** No data ***") nodata.append(location) return #--------------------------------------------------# # create individual time series for each parameter # #--------------------------------------------------# intvl = [None for i in range(len(fieldsToOutput))] ts = [[] for i in range(len(fieldsToOutput))] for i in range(len(records)): millis, values = records[i] for j in range(len(values)): if values[j] != "": ts[j].append((millis, values[j])) #--------------------------------------------------------------------# # analyze interval for each time series, allowing for missing values # #--------------------------------------------------------------------# for i in range(len(ts)): intervalCounts = {} for j in range(1, len(ts[i])): intv = (ts[i][j][0] - ts[i][j - 1][0]) / 60000 count = intervalCounts.setdefault(intv, 0) intervalCounts[intv] = count + 1 intvs = intervalCounts.keys() intvs.sort() if len(intvs) == 0: #------------------------------------------------# # not enough values (2) to determine an interval # #------------------------------------------------# intvl[i] = IRREGULAR_INTERVAL elif len(intvs) == 1: #---------------------------------# # only one interval found in data # #---------------------------------# intvl[i] = intvs[0] else: #----------------------------------# # multiple intervals found in data # #----------------------------------# for j in range(1, len(intvs)): if intvs[j] % intvs[0]: #-------------------------------------------------# # interval is not a multiple of smallest interval # #-------------------------------------------------# intvl[i] = IRREGULAR_INTERVAL break if intvl[i] is None: #--------------------------------# # interval still hasn't been set # #--------------------------------# if intervalCounts[intvs[0]] > 3 * max([ intervalCounts[intvs[x]] for x in range(1, len(intvs)) ]): #-----------------------------------------------------------# # smallest interval accounts for > 75% of intervals, use it # #-----------------------------------------------------------# intvl[i] = intvs[0] else: #------------------------------------------------# # can't determine a predominant regular interval # #------------------------------------------------# intvl[i] = IRREGULAR_INTERVAL #------------------------------------------------------------# # add in any missing values for regular interval time series # #------------------------------------------------------------# if intvl[i] != IRREGULAR_INTERVAL: for j in range(1, len(ts[i]))[::-1]: intv = (ts[i][j][0] - ts[i][j - 1][0]) / 60000 intervalsToAdd = range( (ts[i][j - 1][0] / 60000) + intvl[i], ts[i][j][0] / 60000, intvl[i]) for k in intervalsToAdd[::-1]: ts[i].insert(j, (k * 60000, "")) #------------------------------# # output and/or store the data # #------------------------------# for i in range(len(ts)): if not intvl[i] or not ts[i]: continue if outputFormat & SHEF_TEXT: outputShefText(location, intvl[i], tz, ts[i], decodeInfo[i]) if outputFormat & DSS_FILE: tsc = makeTimeSeriesContainer(location, intvl[i], tz, ts[i], decodeInfo[i]) storeToDss(tsc, location, decodeInfo[i]) if outputFormat & CWMS_DB: storeToCwmsDb(location, intvl[i], tz, ts[i], decodeInfo[i]) successful.append(location)
) ) continue # This user is using the defer-zimlet searchparams = ZSearchParams( "inid: %s and date:<=+0minute" % (defer_folder_id) ) searchparams.setTypes(ZSearchParams.TYPE_MESSAGE) searchparams.setTimeZone( TimeZone.getTimeZone( sa.getPrefTimeZoneId()[0] ) ) searchparams.setLimit(9999) # Get E-Mails in the defer folder aged today and older results = mailbox.search(searchparams) if results.getHits().size() > 0: logging.info( "Found %d deferred mails" % ( results.getHits().size() )
# # THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS # FOR A PARTICULAR PURPOSE. THIS CODE AND INFORMATION ARE NOT SUPPORTED BY XEBIALABS. # from java.util import Date from java.text import ParseException from java.text import SimpleDateFormat from sets import Set from java.util import TimeZone formatter = SimpleDateFormat( snFormat ) xlrFormat = SimpleDateFormat( xlFormat ) formatter.setTimeZone(TimeZone.getTimeZone(snTimeZone)) startDate = snData[startField] print "------" try: print "Schedule Task=> date = %s" % (startDate) date = formatter.parse(startDate) print "Schedule Task=> date = %s" % (date) release = getCurrentRelease() releaseID = release.id phaseTitle=targetPhase taskTitle=targetTask print "Schedule Task=> Phase / Task = %s / %s" % ( phaseTitle, taskTitle ) phase = phaseApi.searchPhasesByTitle( phaseTitle, releaseID ) print "Schedule Task=> phase = %s" % ( phase ) phaseID = phase[0].id task = taskApi.searchTasksByTitle( taskTitle, phaseTitle, releaseID)
def convert_to_date(time_val): from java.util import TimeZone, Date return Date(time_val.date.time - TimeZone.getDefault().getRawOffset())
def listADDEImages(server, dataset, descriptor, accounting=DEFAULT_ACCOUNTING, location=None, coordinateSystem=CoordinateSystems.LATLON, place=None, mag=None, position=0, unit=None, day=None, time=None, debug=False, band=None, size=None): """Creates a list of ADDE images. Args: localEntry: Local ADDE dataset. server: ADDE server. dataset: ADDE dataset group name. descriptor: ADDE dataset descriptor. day: Day range. ('begin date', 'end date') time: ('begin time', 'end time') position: Position number. Values may be integers or the string "ALL". (default=0) band: McIDAS band number; only images that have matching band number will be returned. accounting: ('user', 'project number') User and project number required by servers using McIDAS accounting. default = ('idv','0') Returns: ADDE image matching the given criteria, if any. """ user = accounting[0] proj = accounting[1] debug = str(debug).lower() if mag: mag = '&MAG=%s %s' % (mag[0], mag[1]) else: mag = '' if unit: unit = '&UNIT=%s' % (unit) else: unit = '' if place is Places.CENTER: place = '&PLACE=CENTER' elif place is Places.ULEFT: place = '&PLACE=ULEFT' else: # raise ValueError() place = '' if coordinateSystem is CoordinateSystems.LATLON: coordSys = 'LATLON' elif coordinateSystem is CoordinateSystems.AREA or coordinateSystem is CoordinateSystems.IMAGE: coordSys = 'LINELE' else: raise ValueError() if location: location = '%s=%s %s' % (coordSys, location[0], location[1]) else: location = '' if size: if size == 'ALL': size = '&SIZE=99999 99999' else: size = '&SIZE=%s %s' % (size[0], size[1]) else: size = '' if time: time = '&TIME=%s %s I' % (time[0], time[1]) else: time = '' if band: band = '&BAND=%s' % (str(band)) else: band = '&BAND=ALL' tz = TimeZone.getTimeZone("Z") dateFormat = SimpleDateFormat() dateFormat.setTimeZone(tz) dateFormat.applyPattern('yyyyDDD') timeFormat = SimpleDateFormat(); timeFormat.setTimeZone(tz) timeFormat.applyPattern('HH:mm:ss') addeUrlFormat = "adde://%(server)s/imagedirectory?&PORT=112&COMPRESS=gzip&USER=%(user)s&PROJ=%(proj)s&VERSION=1&DEBUG=%(debug)s&TRACE=0&GROUP=%(dataset)s&DESCRIPTOR=%(descriptor)s%(band)s%(location)s%(place)s%(size)s%(unit)s%(mag)s%(day)s%(time)s&POS=%(position)s" areaDirectories = [] dates = _normalizeDates(day) if not dates: formatValues = { 'server': server, 'user': user, 'proj': proj, 'debug': debug, 'dataset': dataset, 'descriptor': descriptor, 'band': band, 'location': location, 'place': place, 'size': size, 'unit': unit, 'mag': mag, 'day': '', 'time': time, 'position': position, } url = addeUrlFormat % formatValues print url adl = AreaDirectoryList(url) dirs = adl.getSortedDirs() for areaDirectory in dirs[0]: areaDirectories.append(areaDirectory) else: for date in dates: urlDate = '&DAY=%s' % (date) formatValues = { 'server': server, 'user': user, 'proj': proj, 'debug': debug, 'dataset': dataset, 'descriptor': descriptor, 'band': band, 'location': location, 'place': place, 'size': size, 'unit': unit, 'mag': mag, 'day': urlDate, 'time': time, 'position': position, } url = addeUrlFormat % formatValues print url adl = AreaDirectoryList(url) dirs = adl.getSortedDirs() for areaDirectory in dirs[0]: areaDirectories.append(areaDirectory) temp = _AreaDirectoryList() for i, d in enumerate(areaDirectories): # print i, d.getBands(), d.getSensorType(), d.getCenterLatitude(), d.getCenterLongitude() print i, d nominalTime = d.getNominalTime() tempDay = dateFormat.format(nominalTime, StringBuffer(), FieldPosition(0)).toString() tempTime = timeFormat.format(nominalTime, StringBuffer(), FieldPosition(0)).toString() tempBand = list(d.getBands()) if len(tempBand) == 1: tempBand = tempBand[0] else: # raise Exception pass dt = { 'server': server, 'dataset': dataset, 'descriptor': descriptor, 'band': tempBand, 'debug': debug, 'accounting': accounting, 'day': tempDay, 'time': (tempTime, tempTime), 'coordinateSystem': CoordinateSystems.AREA, 'location': (d.getLines(), d.getElements()), } temp.append(dt) return temp