def getSqlDateInGMT(date): """ @type: long->str @param: date - timestamp """ dateObj = Date(date) df = SimpleDateFormat() df.setTimeZone(TimeZone.getTimeZone("GMT")) # Default Java Date.toString() format df.applyPattern('EEE MMM dd HH:mm:ss zzz yyyy') return df.format(dateObj)
def _parseDateString(installDateString): installationDateAsDate = None if installDateString: for format in ['yyyyMMdd', 'yyyyMMddHHmmss.SSSSSS-000', 'EEE dd MMM yyyy HH:mm:ss aa zzz']: if len(installDateString) == len(format): try: from java.text import SimpleDateFormat from java.util import TimeZone dateFormatter = SimpleDateFormat(format) dateFormatter.setTimeZone(TimeZone.getTimeZone("GMT")) installationDateAsDate = dateFormatter.parse(installDateString) except java.text.ParseException: # could not parse date # print 'could not parse' + installDateString + ' as ' + format pass return installationDateAsDate
def __init__(self, name, value): self._name = name if not value: self._value = value else: if len(value) == 10: # date format parsed_value = SimpleDateFormat('yyyy-MM-dd').parse(value) else: # assume datetime format # remove microseconds if necessary if len(value) == 27: value = '%sZ' % value[:19] sdf = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'") sdf.setTimeZone(TimeZone.getTimeZone("UTC")) parsed_value = sdf.parse(value) self._value = parsed_value
def _parseDateString(installDateString): installationDateAsDate = None if installDateString: for format in [ 'yyyyMMdd', 'yyyyMMddHHmmss.SSSSSS-000', 'EEE dd MMM yyyy HH:mm:ss aa zzz' ]: if len(installDateString) == len(format): try: from java.text import SimpleDateFormat from java.util import TimeZone dateFormatter = SimpleDateFormat(format) dateFormatter.setTimeZone(TimeZone.getTimeZone("GMT")) installationDateAsDate = dateFormatter.parse( installDateString) except java.text.ParseException: # could not parse date # print 'could not parse' + installDateString + ' as ' + format pass return installationDateAsDate
def makeTimeSeriesContainer(station, interval, tz, records, decodeInfo): global timezones sdf = SimpleDateFormat("ddMMMyyyy, HH:mm") if dssTimezone: if not timezones["DSS"]: timezones["DSS"] = TimeZone.getTimeZone( tzInfo[dssTimezone]["JAVA"]) sdf.setTimeZone(timezones["DSS"]) else: sdf.setTimeZone(timezones["USGS"]) dd, decodeInfo = decodeInfo cal = Calendar.getInstance() t = HecTime() tsc = TimeSeriesContainer() tsc.interval = interval times = [] values = [] tsc.quality = None factor = decodeInfo["DSS_FACTOR"] for j in range(len(records)): millis, value = records[j] cal.setTimeInMillis(millis) t.set(sdf.format(cal.getTime())) times.append(t.value()) try: values.append(float(value) * factor) except: values.append(Constants.UNDEFINED) tsc.times = times tsc.values = values tsc.startTime = times[0] tsc.endTime = times[-1] tsc.numberValues = len(values) tsc.timeZoneID = sdf.getTimeZone().getID() tsc.timeZoneRawOffset = sdf.getTimeZone().getRawOffset() return tsc
# This list has been explicitly chosen to: # * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE) # * Prefer ECDHE over DHE for better performance # * Prefer any AES-GCM over any AES-CBC for better performance and security # * Then Use HIGH cipher suites as a fallback # * Then Use 3DES as fallback which is secure but slow # * Disable NULL authentication, NULL encryption, MD5 MACs, DSS, and RC4 for # security reasons _RESTRICTED_SERVER_CIPHERS = ( 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:' '!eNULL:!MD5:!DSS:!RC4' ) _rfc2822_date_format = SimpleDateFormat("MMM dd HH:mm:ss yyyy z", Locale.US) _rfc2822_date_format.setTimeZone(TimeZone.getTimeZone("GMT")) _ldap_rdn_display_names = { # list from RFC 2253 "CN": "commonName", "E": "emailAddress", "L": "localityName", "ST": "stateOrProvinceName", "O": "organizationName", "OU": "organizationalUnitName", "C": "countryName", "STREET": "streetAddress", "DC": "domainComponent", "UID": "userid" }
def listADDEImages(localEntry=None, server=None, dataset=None, descriptor=None, accounting=DEFAULT_ACCOUNTING, location=None, coordinateSystem=CoordinateSystems.LATLON, place=None, mag=None, position=None, unit=None, day=None, time=None, debug=False, band=None, size=None, showUrls=True): """Creates a list of ADDE images. Args: localEntry: Local ADDE dataset. server: ADDE server. dataset: ADDE dataset group name. descriptor: ADDE dataset descriptor. day: Day range. ('begin date', 'end date') time: ('begin time', 'end time') position: Position number. Values may be integers or the string "ALL". (default=0) band: McIDAS band number; only images that have matching band number will be returned. accounting: ('user', 'project number') User and project number required by servers using McIDAS accounting. default = ('idv','0') Returns: ADDE image matching the given criteria, if any. """ if localEntry: server = localEntry.getAddress() dataset = localEntry.getGroup() descriptor = localEntry.getDescriptor().upper() elif (server is None) or (dataset is None) or (descriptor is None): raise TypeError("must provide localEntry or server, dataset, and descriptor values.") if server == "localhost" or server == "127.0.0.1": port = EntryStore.getLocalPort() else: port = "112" # server = '%s:%s' % (server, port) user = accounting[0] proj = accounting[1] debug = str(debug).lower() if mag: mag = '&MAG=%s %s' % (mag[0], mag[1]) else: mag = '' if unit: origUnit = unit unit = '&UNIT=%s' % (unit) else: # origUnit = None unit = '' if place is Places.CENTER: place = '&PLACE=CENTER' elif place is Places.ULEFT: place = '&PLACE=ULEFT' else: # raise ValueError() place = '' if coordinateSystem is CoordinateSystems.LATLON: coordSys = 'LATLON' elif coordinateSystem is CoordinateSystems.AREA or coordinateSystem is CoordinateSystems.IMAGE: coordSys = 'LINELE' else: raise ValueError() if location: location = '&%s=%s %s' % (coordSys, location[0], location[1]) else: location = '' if size: if size == 'ALL': size = '&SIZE=99999 99999' else: size = '&SIZE=%s %s' % (size[0], size[1]) else: size = '' if time: time = '&TIME=%s %s I' % (time[0], time[1]) else: time = '' if band: band = '&BAND=%s' % (str(band)) else: band = '&BAND=ALL' if position is not None: if isinstance(position, int): position = '&POS=%s' % (position) elif isinstance(position, tuple): if len(position) != 2: raise ValueError('position range may only contain values for the beginning and end of a range.') position = '&POS=%s %s' % (str(position[0]), str(position[1])) else: position = '&POS=%s' % (str(position).upper()) else: position = '&POS=0' tz = TimeZone.getTimeZone('Z') dateFormat = SimpleDateFormat() dateFormat.setTimeZone(tz) dateFormat.applyPattern('yyyyDDD') timeFormat = SimpleDateFormat(); timeFormat.setTimeZone(tz) timeFormat.applyPattern('HH:mm:ss') addeUrlFormat = "adde://%(server)s/imagedirectory?&PORT=%(port)s&COMPRESS=gzip&USER=%(user)s&PROJ=%(proj)s&VERSION=1&DEBUG=%(debug)s&TRACE=0&GROUP=%(dataset)s&DESCRIPTOR=%(descriptor)s%(band)s%(location)s%(place)s%(size)s%(unit)s%(mag)s%(day)s%(time)s%(position)s" urls = [] areaDirectories = [] dates = _normalizeDates(day) for date in dates: formatValues = { 'server': server, 'port': port, 'user': user, 'proj': proj, 'debug': debug, 'dataset': dataset, 'descriptor': descriptor, 'band': band, 'location': location, 'place': place, 'size': size, 'unit': unit, 'mag': mag, 'day': date, 'time': time, 'position': position, } url = addeUrlFormat % formatValues if showUrls: print url adl = AreaDirectoryList(url) results = adl.getSortedDirs() for imageTimes in results: for areaDirectory in imageTimes: urls.append(url) areaDirectories.append(areaDirectory) temp = _AreaDirectoryList() for i, d in enumerate(areaDirectories): nominalTime = d.getNominalTime() tempDay = str(dateFormat.format(nominalTime, StringBuffer(), FieldPosition(0))) tempTime = str(timeFormat.format(nominalTime, StringBuffer(), FieldPosition(0))) bandList = list(d.getBands()) # tempUnitList = list(d.getCalInfo()[0]) # unitList = tempUnitList[::2] # unitDescList = tempUnitList[1::2] # calInfo = dict(zip(unitList, unitDescList)) if unit: unitList = [origUnit] else: unitList = map(str, list(d.getCalInfo()[0])[::2]) for band in bandList: for calUnit in unitList: dt = { 'server': server, 'dataset': dataset, 'descriptor': descriptor, 'bandNumber': band, 'bandList': bandList, 'debug': debug, 'accounting': accounting, 'day': tempDay, 'time': tempTime, 'imageSize': (d.getLines(), d.getElements()), 'centerLocation': (d.getCenterLatitude(), d.getCenterLongitude()), 'resolution': (d.getCenterLatitudeResolution(), d.getCenterLongitudeResolution()), 'unitList': unitList, 'unitType': calUnit, 'bands': bandList, 'band-count': d.getNumberOfBands(), 'calinfo': map(str, list(d.getCalInfo()[0])), 'calibration-scale-factor': d.getCalibrationScaleFactor(), 'calibration-type': str(d.getCalibrationType()), 'calibration-unit-name': d.getCalibrationUnitName(), 'center-latitude': d.getCenterLatitude(), 'center-latitude-resolution': d.getCenterLatitudeResolution(), 'center-longitude': d.getCenterLongitude(), 'center-longitude-resolution': d.getCenterLongitudeResolution(), 'directory-block': list(d.getDirectoryBlock()), 'elements': d.getElements(), 'lines': d.getLines(), 'memo-field': str(d.getMemoField()), 'nominal-time': DateTime(d.getNominalTime()), 'sensor-id': d.getSensorID(), 'sensor-type': str(d.getSensorType()), 'source-type': str(d.getSourceType()), 'start-time': DateTime(d.getStartTime()), 'url': urls[i], } temp.append(dt) return temp
def listADDEImageTimes(localEntry=None, server=None, dataset=None, descriptor=None, accounting=DEFAULT_ACCOUNTING, location=None, coordinateSystem=CoordinateSystems.LATLON, place=None, mag=None, position=None, unit=None, day=None, time=None, debug=False, band=None, size=None, showUrls=True): if localEntry: server = localEntry.getAddress() dataset = localEntry.getGroup() descriptor = localEntry.getDescriptor().upper() elif (server is None) or (dataset is None) or (descriptor is None): raise TypeError("must provide localEntry or server, dataset, and descriptor values.") if server == "localhost" or server == "127.0.0.1": port = EntryStore.getLocalPort() else: port = "112" # server = '%s:%s' % (server, port) user = accounting[0] proj = accounting[1] debug = str(debug).lower() if mag: mag = '&MAG=%s %s' % (mag[0], mag[1]) else: mag = '' if unit: origUnit = unit unit = '&UNIT=%s' % (unit) else: # origUnit = None unit = '' if place is Places.CENTER: place = '&PLACE=CENTER' elif place is Places.ULEFT: place = '&PLACE=ULEFT' else: # raise ValueError() place = '' if coordinateSystem is CoordinateSystems.LATLON: coordSys = 'LATLON' elif coordinateSystem is CoordinateSystems.AREA or coordinateSystem is CoordinateSystems.IMAGE: coordSys = 'LINELE' else: raise ValueError() if location: location = '&%s=%s %s' % (coordSys, location[0], location[1]) else: location = '' if size: if size == 'ALL': size = '&SIZE=99999 99999' else: size = '&SIZE=%s %s' % (size[0], size[1]) else: size = '' if time: time = '&TIME=%s %s I' % (time[0], time[1]) else: time = '' if band: band = '&BAND=%s' % (str(band)) else: band = '&BAND=ALL' if position is not None: if isinstance(position, int): position = '&POS=%s' % (position) elif isinstance(position, tuple): if len(position) != 2: raise ValueError('position range may only contain values for the beginning and end of a range.') position = '&POS=%s %s' % (str(position[0]), str(position[1])) else: position = '&POS=%s' % (str(position).upper()) else: position = '&POS=0' tz = TimeZone.getTimeZone('Z') dateFormat = SimpleDateFormat() dateFormat.setTimeZone(tz) dateFormat.applyPattern('yyyyDDD') timeFormat = SimpleDateFormat(); timeFormat.setTimeZone(tz) timeFormat.applyPattern('HH:mm:ss') addeUrlFormat = "adde://%(server)s/imagedirectory?&PORT=%(port)s&COMPRESS=gzip&USER=%(user)s&PROJ=%(proj)s&VERSION=1&DEBUG=%(debug)s&TRACE=0&GROUP=%(dataset)s&DESCRIPTOR=%(descriptor)s%(band)s%(location)s%(place)s%(size)s%(unit)s%(mag)s%(day)s%(time)s%(position)s" urls = [] areaDirectories = [] dates = _normalizeDates(day) for date in dates: formatValues = { 'server': server, 'port': port, 'user': user, 'proj': proj, 'debug': debug, 'dataset': dataset, 'descriptor': descriptor, 'band': band, 'location': location, 'place': place, 'size': size, 'unit': unit, 'mag': mag, 'day': date, 'time': time, 'position': position, } url = addeUrlFormat % formatValues if showUrls: print url adl = AreaDirectoryList(url) results = adl.getSortedDirs() for imageTimes in results: for areaDirectory in imageTimes: urls.append(url) areaDirectories.append(areaDirectory) uniques = set() times = [] for d in areaDirectories: dt = DateTime(d.getNominalTime()) if dt not in uniques: d = { 'day': str(dt.formattedString('yyyyDDD', tz)), 'time': str(dt.formattedString('HH:mm:ss', tz)), } times.append(d) uniques.add(dt) uniques = None return sorted(times)
def parse(self, stream): factory = XmlPullParserFactory.newInstance() parser = factory.newPullParser() parser.setInput(stream, None) eventType = parser.getEventType() section = "" sections = {"location", "credit", "tabular"} dateFormat = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss") dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")) sunrise = Date() sunset = Date() place = "" credit = "" forecasts = [] forecast = Forecast() while eventType != XmlPullParser.END_DOCUMENT: eventType = parser.next() if eventType == XmlPullParser.START_TAG: name = parser.getName() if name in sections: section = name elif section != "": if name == "name": while eventType != XmlPullParser.TEXT: eventType = parser.next() place = parser.getText() elif name == "link": credit = parser.getAttributeValue(None, "text") elif name == "time": forecast = Forecast() forecast.place = place forecast.credit = credit from_ = parser.getAttributeValue(None, "from") to_ = parser.getAttributeValue(None, "to") forecast.from_ = dateFormat.parse( from_, ParsePosition(0)) forecast.to_ = dateFormat.parse(to_, ParsePosition(0)) elif name == "symbol": forecast.description = parser.getAttributeValue( None, "name") symbol = parser.getAttributeValue(None, "numberEx") forecast.midDate = Date(forecast.from_.getTime()/2 + \ forecast.to_.getTime()/2) try: forecast.symbol = self.symbols[symbol] continue except KeyError: pass if self.isDayTime(forecast.midDate, sunrise, sunset): symbol += "d" else: symbol += "n" try: forecast.symbol = self.symbols[symbol] except KeyError: forecast.symbol = -1 elif name == "windSpeed": forecast.windSpeed = parser.getAttributeValue( None, "name") elif name == "temperature": forecast.temperature = parser.getAttributeValue( None, "value") forecast.temperatureUnit = parser.getAttributeValue( None, "unit") elif name == "sun": rise = parser.getAttributeValue(None, "rise") sset = parser.getAttributeValue(None, "set") sunrise = dateFormat.parse(rise, ParsePosition(0)) sunset = dateFormat.parse(sset, ParsePosition(0)) elif eventType == XmlPullParser.END_TAG: name = parser.getName() if name == section and name in sections: section = "" elif section == "tabular" and name == "time": forecasts.add(forecast) return forecasts
def listADDEImages(server, dataset, descriptor, accounting=DEFAULT_ACCOUNTING, location=None, coordinateSystem=CoordinateSystems.LATLON, place=None, mag=None, position=0, unit=None, day=None, time=None, debug=False, band=None, size=None): """Creates a list of ADDE images. Args: localEntry: Local ADDE dataset. server: ADDE server. dataset: ADDE dataset group name. descriptor: ADDE dataset descriptor. day: Day range. ('begin date', 'end date') time: ('begin time', 'end time') position: Position number. Values may be integers or the string "ALL". (default=0) band: McIDAS band number; only images that have matching band number will be returned. accounting: ('user', 'project number') User and project number required by servers using McIDAS accounting. default = ('idv','0') Returns: ADDE image matching the given criteria, if any. """ user = accounting[0] proj = accounting[1] debug = str(debug).lower() if mag: mag = '&MAG=%s %s' % (mag[0], mag[1]) else: mag = '' if unit: unit = '&UNIT=%s' % (unit) else: unit = '' if place is Places.CENTER: place = '&PLACE=CENTER' elif place is Places.ULEFT: place = '&PLACE=ULEFT' else: # raise ValueError() place = '' if coordinateSystem is CoordinateSystems.LATLON: coordSys = 'LATLON' elif coordinateSystem is CoordinateSystems.AREA or coordinateSystem is CoordinateSystems.IMAGE: coordSys = 'LINELE' else: raise ValueError() if location: location = '%s=%s %s' % (coordSys, location[0], location[1]) else: location = '' if size: if size == 'ALL': size = '&SIZE=99999 99999' else: size = '&SIZE=%s %s' % (size[0], size[1]) else: size = '' if time: time = '&TIME=%s %s I' % (time[0], time[1]) else: time = '' if band: band = '&BAND=%s' % (str(band)) else: band = '&BAND=ALL' tz = TimeZone.getTimeZone("Z") dateFormat = SimpleDateFormat() dateFormat.setTimeZone(tz) dateFormat.applyPattern('yyyyDDD') timeFormat = SimpleDateFormat(); timeFormat.setTimeZone(tz) timeFormat.applyPattern('HH:mm:ss') addeUrlFormat = "adde://%(server)s/imagedirectory?&PORT=112&COMPRESS=gzip&USER=%(user)s&PROJ=%(proj)s&VERSION=1&DEBUG=%(debug)s&TRACE=0&GROUP=%(dataset)s&DESCRIPTOR=%(descriptor)s%(band)s%(location)s%(place)s%(size)s%(unit)s%(mag)s%(day)s%(time)s&POS=%(position)s" areaDirectories = [] dates = _normalizeDates(day) if not dates: formatValues = { 'server': server, 'user': user, 'proj': proj, 'debug': debug, 'dataset': dataset, 'descriptor': descriptor, 'band': band, 'location': location, 'place': place, 'size': size, 'unit': unit, 'mag': mag, 'day': '', 'time': time, 'position': position, } url = addeUrlFormat % formatValues print url adl = AreaDirectoryList(url) dirs = adl.getSortedDirs() for areaDirectory in dirs[0]: areaDirectories.append(areaDirectory) else: for date in dates: urlDate = '&DAY=%s' % (date) formatValues = { 'server': server, 'user': user, 'proj': proj, 'debug': debug, 'dataset': dataset, 'descriptor': descriptor, 'band': band, 'location': location, 'place': place, 'size': size, 'unit': unit, 'mag': mag, 'day': urlDate, 'time': time, 'position': position, } url = addeUrlFormat % formatValues print url adl = AreaDirectoryList(url) dirs = adl.getSortedDirs() for areaDirectory in dirs[0]: areaDirectories.append(areaDirectory) temp = _AreaDirectoryList() for i, d in enumerate(areaDirectories): # print i, d.getBands(), d.getSensorType(), d.getCenterLatitude(), d.getCenterLongitude() print i, d nominalTime = d.getNominalTime() tempDay = dateFormat.format(nominalTime, StringBuffer(), FieldPosition(0)).toString() tempTime = timeFormat.format(nominalTime, StringBuffer(), FieldPosition(0)).toString() tempBand = list(d.getBands()) if len(tempBand) == 1: tempBand = tempBand[0] else: # raise Exception pass dt = { 'server': server, 'dataset': dataset, 'descriptor': descriptor, 'band': tempBand, 'debug': debug, 'accounting': accounting, 'day': tempDay, 'time': (tempTime, tempTime), 'coordinateSystem': CoordinateSystems.AREA, 'location': (d.getLines(), d.getElements()), } temp.append(dt) return temp
def evaluate(self, times, max_time, origins_csv, destinations_csv, csv_writer, split=500, do_merge=False): ''' evaluate the shortest paths between origins and destinations uses the routing options set in setup() (run it first!) Parameters ---------- times: list of date times, the desired start/arrival times for evaluation origins_csv: file with origin points destinations_csv: file with destination points csv_writer: CSVWriter, configured writer to write results do_merge: merge the results over time, only keeping the best connections max_time: maximum travel-time in seconds (the smaller this value, the smaller the shortest path tree, that has to be created; saves processing time) ''' origins = self.otp.loadCSVPopulation(origins_csv, LATITUDE_COLUMN, LONGITUDE_COLUMN) destinations = self.otp.loadCSVPopulation(destinations_csv, LATITUDE_COLUMN, LONGITUDE_COLUMN) sources = origins if not self.arrive_by else destinations n_slices = (sources.size() / split) + 1 if n_slices > 1: print 'Splitting sources into {} part(s) with {} points each part'.format(n_slices, split) from_index = 0; to_index = 0; i = 1 while True: if to_index >= sources.size(): break from_index = to_index to_index += split if to_index >= sources.size(): to_index = sources.size() sliced_sources = sources.get_slice(from_index, to_index) if n_slices > 1: print('calculating part {}/{}'.format(i, n_slices)) i += 1 if not self.arrive_by: origins = sliced_sources else: destinations = sliced_sources self.request.setOrigins(origins) self.request.setDestinations(destinations) self.request.setLogProgress(self.print_every_n_lines) if self.arrive_by: time_note = ' arrival time ' else: time_note = 'start time ' # # if evaluation is performed in a time window, routes exceeding the window will be ignored # # (worstTime already takes care of this, but the time needed to reach the snapped the OSM point is also taken into account here) # if len(times) > 1: # print 'Cutoff set: routes with {}s exceeding the time window ({}) will be ignored (incl. time to reach OSM-net)'.format(time_note, times[-1]) # cutoff = times[-1] # self.request.setCutoffTime(cutoff.year, cutoff.month, cutoff.day, cutoff.hour, cutoff.minute, cutoff.second) # iterate all times results = [] # dimension (if not merged): times x targets (origins resp. destinations) sdf = SimpleDateFormat('HH:mm:ss') sdf.setTimeZone(TimeZone.getTimeZone("GMT +2")) for t, date_time in enumerate(times): # compare seconds since epoch (different ways to get it from java/python date) epoch = datetime.utcfromtimestamp(0) time_since_epoch = (date_time - epoch).total_seconds() self.request.setDateTime(date_time.year, date_time.month, date_time.day, date_time.hour, date_time.minute, date_time.second) # has to be set every time after setting datetime (and also AFTER setting arriveby) self.request.setMaxTimeSec(max_time) msg = 'Starting evaluation of routes with ' + time_note + date_time.strftime(DATETIME_FORMAT) print msg results_dt = self.batch_processor.evaluate(self.request) # if there already was a calculation: merge it with new results if do_merge and len(results) > 0: for i, prev_result in enumerate(results[0]): if prev_result is not None: prev_result.merge(results_dt[i]) #write and append if no merging is needed (saves memory) else: search_time = sdf.format(date_time) csv_writer.write(results_dt, additional_columns={'search_time': search_time}, append=True) for r in results_dt: del(r) if do_merge: # flatten the results results = [r for res in results for r in res] csv_writer.write(results, append=False)
DefaultValueType = "pointrel:text/utf-8" delimiterStartAndEnd = '~' delimiterMiddle = '-' pointrelTripleIDPrefix = "pointrel://tripleID/" EMPTY_MARKER = "%s%s0" % (delimiterStartAndEnd, delimiterStartAndEnd) DEFAULT_REPOSITORY_EXTENSION = ".pointrel" DEFAULT_REPOSITORY_NAME = "repository.pointrel" # Times in ISO 8601 # http://www.cl.cam.ac.uk/~mgk25/iso-time.html # SimpleDateFormat needs to have a local copy in each thread if multithreaded, so shooud use this in single threasd for now ISO8601TimestampFormat = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") ISO8601TimestampFormat.setTimeZone(TimeZone.getTimeZone("UTC")) def stringForTimestamp(timestamp): return ISO8601TimestampFormat.format(timestamp) def timestampForString(timestampString): date = ISO8601TimestampFormat.parse(timestampString) timestamp = Timestamp(date.getTime()) return timestamp def generateUniqueReferenceForRepository(): randomUUID = UUID.randomUUID() newText = RepositoryReferenceScheme + randomUUID.toString() return newText class Record:
def processLocation(location, locationInfo, seq=None, total=None): global timezones, parameters, successful, notfound, nodata, haserror if os.path.isfile(location): f = open(location) data = f.read().strip().replace('\r', '') f.close() location = os.path.splitext(os.path.basename(location))[0] else: data = getData(location, seq, total) if not data: if outputLevel > NONE: log.output("*** No data ***") nodata.append(location) return tz = "" tzField = None paramsToOutput = locationInfo["PARAMETERS"] if outputFormat & USGS_TEXT: sys.stdout.write(data) if data.find("No sites/data found using the selection criteria specified" ) != -1: if outputLevel > NONE: log.output("*** Site not found ***") notfound.append(location) notfoundDb[location] = "True" return if outputFormat & (SHEF_TEXT | DSS_FILE | CWMS_DB): if outputLevel > NORMAL: log.output( "Parsing the following data:\n---------------------------") log.output("%s\n" % data) #------------------------------------------------------------------------------# # read through all the comment lines, and find the location name and time zone # #------------------------------------------------------------------------------# stationText = "# USGS %s " % location lines = data.replace("\r\n", "\n").split("\n") for i in range(len(lines)): line = lines[i].strip() if not line: continue if line[0] == "#": pass else: break else: if outputLevel > NONE: log.output("*** No values ***") nodata.append(location) return #-----------------------------------------------------# # process the 1st header line, describing field names # #-----------------------------------------------------# fields = lines[i].split() fieldCount = len(fields) if fieldCount < 3 or " ".join( fields[:3]) != "agency_cd site_no datetime": if outputLevel > NONE: log.output("*** Unexpected format on header line 1 ***") log.output("%s" % lines[i]) haserror.append(location) return paramCount = fieldCount - 3 if paramCount < 1: if outputLevel > NONE: log.output("*** No parameters ***") nodata.append(location) return paramNames = fields[:] fieldsToOutput = [] decodeInfo = [] for j in range(3, len(paramNames)): if paramNames[j] == "tz_cd": tzField = j else: paramName = paramNames[j].split("_", 1)[1] if paramName in paramsToOutput: fieldsToOutput.append(j) try: decodeInfo.append( (paramNames[j][:2], parameters[paramName])) except: if outputLevel > NONE: log.output("*** Unexpected parameter %s ***" % paramName) haserror.append(location) return if not fieldsToOutput: if outputLevel > NONE: log.output("*** No data ***") nodata.append(location) return if not tzField: if outputLevel > NONE: log.output("*** No timezone field specified ***") haserror.append(location) return #--------------------------# # skip the 2nd header line # #--------------------------# i += 1 #-----------------------------------------------------------------------------# # read the data records, keeping track of whether we have regular time-series # #-----------------------------------------------------------------------------# records = [] sdf = SimpleDateFormat("yyyy-MM-dd HH:mm") cal = Calendar.getInstance() recordCount = 0 for i in range(i + 1, len(lines)): if not lines[i].strip(): continue fields = lines[i].split("\t") if fields[0] != "USGS": continue if fields[1] != location: if outputLevel > NONE: log.output( "*** Unexpected location on data record %d ***" % recordCount) if outputLevel > NORMAL: log.output("%s" % lines[i]) haserror.append(location) return if not tz: tz = fields[tzField] timezones["USGS"] = TimeZone.getTimeZone(tzInfo[tz]["JAVA"]) sdf.setTimeZone(timezones["USGS"]) if outputLevel > NORMAL: log.output("Initial time zone is %s" % tz) else: if fields[tzField] != tz: if outputLevel > NORMAL: log.output("Time zone switched from %s to %s" % (tz, fields[tzField])) tz = fields[tzField] timezones["USGS"] = TimeZone.getTimeZone( tzInfo[tz]["JAVA"]) sdf.setTimeZone(timezones["USGS"]) recordCount += 1 cal.setTime(sdf.parse(fields[2])) valueFields = [] for j in fieldsToOutput: valueFields.append(fields[j]) records.append((cal.getTimeInMillis(), valueFields)) if recordCount == 0: if outputLevel > NONE: log.output("*** No data ***") nodata.append(location) return #--------------------------------------------------# # create individual time series for each parameter # #--------------------------------------------------# intvl = [None for i in range(len(fieldsToOutput))] ts = [[] for i in range(len(fieldsToOutput))] for i in range(len(records)): millis, values = records[i] for j in range(len(values)): if values[j] != "": ts[j].append((millis, values[j])) #--------------------------------------------------------------------# # analyze interval for each time series, allowing for missing values # #--------------------------------------------------------------------# for i in range(len(ts)): intervalCounts = {} for j in range(1, len(ts[i])): intv = (ts[i][j][0] - ts[i][j - 1][0]) / 60000 count = intervalCounts.setdefault(intv, 0) intervalCounts[intv] = count + 1 intvs = intervalCounts.keys() intvs.sort() if len(intvs) == 0: #------------------------------------------------# # not enough values (2) to determine an interval # #------------------------------------------------# intvl[i] = IRREGULAR_INTERVAL elif len(intvs) == 1: #---------------------------------# # only one interval found in data # #---------------------------------# intvl[i] = intvs[0] else: #----------------------------------# # multiple intervals found in data # #----------------------------------# for j in range(1, len(intvs)): if intvs[j] % intvs[0]: #-------------------------------------------------# # interval is not a multiple of smallest interval # #-------------------------------------------------# intvl[i] = IRREGULAR_INTERVAL break if intvl[i] is None: #--------------------------------# # interval still hasn't been set # #--------------------------------# if intervalCounts[intvs[0]] > 3 * max([ intervalCounts[intvs[x]] for x in range(1, len(intvs)) ]): #-----------------------------------------------------------# # smallest interval accounts for > 75% of intervals, use it # #-----------------------------------------------------------# intvl[i] = intvs[0] else: #------------------------------------------------# # can't determine a predominant regular interval # #------------------------------------------------# intvl[i] = IRREGULAR_INTERVAL #------------------------------------------------------------# # add in any missing values for regular interval time series # #------------------------------------------------------------# if intvl[i] != IRREGULAR_INTERVAL: for j in range(1, len(ts[i]))[::-1]: intv = (ts[i][j][0] - ts[i][j - 1][0]) / 60000 intervalsToAdd = range( (ts[i][j - 1][0] / 60000) + intvl[i], ts[i][j][0] / 60000, intvl[i]) for k in intervalsToAdd[::-1]: ts[i].insert(j, (k * 60000, "")) #------------------------------# # output and/or store the data # #------------------------------# for i in range(len(ts)): if not intvl[i] or not ts[i]: continue if outputFormat & SHEF_TEXT: outputShefText(location, intvl[i], tz, ts[i], decodeInfo[i]) if outputFormat & DSS_FILE: tsc = makeTimeSeriesContainer(location, intvl[i], tz, ts[i], decodeInfo[i]) storeToDss(tsc, location, decodeInfo[i]) if outputFormat & CWMS_DB: storeToCwmsDb(location, intvl[i], tz, ts[i], decodeInfo[i]) successful.append(location)
def date_as_string(date): tz = TimeZone.getTimeZone("UTC") df = SimpleDateFormat("yyyy-MM-dd HH:mm:ss") df.setTimeZone(tz) return df.format(date)
import sys, java from java.util import Date,TimeZone from java.text import SimpleDateFormat def log(msg): print >> sys.stderr, "=== ["+df.format(Date())+"]", msg if len(sys.argv) != 2: print >> sys.stderr, 'update-ear.py: <enterprise-app> <ear-file>' sys.exit(1) tzspec="$spec->{tz}" tz = TimeZone.getTimeZone(tzspec) df = SimpleDateFormat("yyyy.MM.dd HH:mm:ss.SSS z") df.setTimeZone(tz) appname=sys.argv[0] appear =sys.argv[1] options = [ "-update", "-appname", appname, "-update.ignore.new", "-verbose" ] try: log("Installing Application from "+appear) AdminApp.install( appear, options ) log("Installation completed") log("Saving configuration") if $spec->{really_do} == 1: AdminConfig.save() else:
def listADDEImageTimes(localEntry=None, server=None, dataset=None, descriptor=None, accounting=DEFAULT_ACCOUNTING, location=None, coordinateSystem=CoordinateSystems.LATLON, place=None, mag=None, position=None, unit=None, day=None, time=None, debug=False, band=None, size=None, showUrls=True): if localEntry: server = localEntry.getAddress() dataset = localEntry.getGroup() descriptor = localEntry.getDescriptor().upper() elif (server is None) or (dataset is None) or (descriptor is None): raise TypeError( "must provide localEntry or server, dataset, and descriptor values." ) if server == "localhost" or server == "127.0.0.1": port = EntryStore.getLocalPort() else: port = "112" # server = '%s:%s' % (server, port) user = accounting[0] proj = accounting[1] debug = str(debug).lower() if mag: mag = '&MAG=%s %s' % (mag[0], mag[1]) else: mag = '' if unit: origUnit = unit unit = '&UNIT=%s' % (unit) else: # origUnit = None unit = '' if place is Places.CENTER: place = '&PLACE=CENTER' elif place is Places.ULEFT: place = '&PLACE=ULEFT' else: # raise ValueError() place = '' if coordinateSystem is CoordinateSystems.LATLON: coordSys = 'LATLON' elif coordinateSystem is CoordinateSystems.AREA or coordinateSystem is CoordinateSystems.IMAGE: coordSys = 'LINELE' else: raise ValueError() if location: location = '&%s=%s %s' % (coordSys, location[0], location[1]) else: location = '' if size: if size == 'ALL': size = '&SIZE=99999 99999' else: size = '&SIZE=%s %s' % (size[0], size[1]) else: size = '' if time: time = '&TIME=%s %s I' % (time[0], time[1]) else: time = '' if band: band = '&BAND=%s' % (str(band)) else: band = '&BAND=ALL' if position is not None: if isinstance(position, int): position = '&POS=%s' % (position) elif isinstance(position, tuple): if len(position) != 2: raise ValueError( 'position range may only contain values for the beginning and end of a range.' ) position = '&POS=%s %s' % (str(position[0]), str(position[1])) else: position = '&POS=%s' % (str(position).upper()) else: position = '&POS=0' tz = TimeZone.getTimeZone('Z') dateFormat = SimpleDateFormat() dateFormat.setTimeZone(tz) dateFormat.applyPattern('yyyyDDD') timeFormat = SimpleDateFormat() timeFormat.setTimeZone(tz) timeFormat.applyPattern('HH:mm:ss') addeUrlFormat = "adde://%(server)s/imagedirectory?&PORT=%(port)s&COMPRESS=gzip&USER=%(user)s&PROJ=%(proj)s&VERSION=1&DEBUG=%(debug)s&TRACE=0&GROUP=%(dataset)s&DESCRIPTOR=%(descriptor)s%(band)s%(location)s%(place)s%(size)s%(unit)s%(mag)s%(day)s%(time)s%(position)s" urls = [] areaDirectories = [] dates = _normalizeDates(day) for date in dates: formatValues = { 'server': server, 'port': port, 'user': user, 'proj': proj, 'debug': debug, 'dataset': dataset, 'descriptor': descriptor, 'band': band, 'location': location, 'place': place, 'size': size, 'unit': unit, 'mag': mag, 'day': date, 'time': time, 'position': position, } url = addeUrlFormat % formatValues if showUrls: print url adl = AreaDirectoryList(url) results = adl.getSortedDirs() for imageTimes in results: for areaDirectory in imageTimes: urls.append(url) areaDirectories.append(areaDirectory) uniques = set() times = [] for d in areaDirectories: dt = DateTime(d.getNominalTime()) if dt not in uniques: d = { 'day': str(dt.formattedString('yyyyDDD', tz)), 'time': str(dt.formattedString('HH:mm:ss', tz)), } times.append(d) uniques.add(dt) uniques = None return sorted(times)
# Pretend to be OpenSSL OPENSSL_VERSION = "OpenSSL 1.0.0 (as emulated by Java SSL)" OPENSSL_VERSION_NUMBER = 0x1000000 OPENSSL_VERSION_INFO = (1, 0, 0, 0, 0) CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED = list(range(3)) # Do not support PROTOCOL_SSLv2, it is highly insecure and it is optional _, PROTOCOL_SSLv3, PROTOCOL_SSLv23, PROTOCOL_TLSv1 = list(range(4)) _PROTOCOL_NAMES = { PROTOCOL_SSLv3: 'SSLv3', PROTOCOL_SSLv23: 'SSLv23', PROTOCOL_TLSv1: 'TLSv1'} _rfc2822_date_format = SimpleDateFormat("MMM dd HH:mm:ss yyyy z", Locale.US) _rfc2822_date_format.setTimeZone(TimeZone.getTimeZone("GMT")) _ldap_rdn_display_names = { # list from RFC 2253 "CN": "commonName", "L": "localityName", "ST": "stateOrProvinceName", "O": "organizationName", "OU": "organizationalUnitName", "C": "countryName", "STREET": "streetAddress", "DC": "domainComponent", "UID": "userid" } _cert_name_types = [
def listADDEImages(localEntry=None, server=None, dataset=None, descriptor=None, accounting=DEFAULT_ACCOUNTING, location=None, coordinateSystem=CoordinateSystems.LATLON, place=None, mag=None, position=None, unit=None, day=None, time=None, debug=False, band=None, size=None, showUrls=True): """Creates a list of ADDE images. Args: localEntry: Local ADDE dataset. server: ADDE server. dataset: ADDE dataset group name. descriptor: ADDE dataset descriptor. day: Day range. ('begin date', 'end date') time: ('begin time', 'end time') position: Position number. Values may be integers or the string "ALL". (default=0) band: McIDAS band number; only images that have matching band number will be returned. accounting: ('user', 'project number') User and project number required by servers using McIDAS accounting. default = ('idv','0') Returns: ADDE image matching the given criteria, if any. """ if localEntry: server = localEntry.getAddress() dataset = localEntry.getGroup() descriptor = localEntry.getDescriptor().upper() elif (server is None) or (dataset is None) or (descriptor is None): raise TypeError( "must provide localEntry or server, dataset, and descriptor values." ) if server == "localhost" or server == "127.0.0.1": port = EntryStore.getLocalPort() else: port = "112" # server = '%s:%s' % (server, port) user = accounting[0] proj = accounting[1] debug = str(debug).lower() if mag: mag = '&MAG=%s %s' % (mag[0], mag[1]) else: mag = '' if unit: origUnit = unit unit = '&UNIT=%s' % (unit) else: # origUnit = None unit = '' if place is Places.CENTER: place = '&PLACE=CENTER' elif place is Places.ULEFT: place = '&PLACE=ULEFT' else: # raise ValueError() place = '' if coordinateSystem is CoordinateSystems.LATLON: coordSys = 'LATLON' elif coordinateSystem is CoordinateSystems.AREA or coordinateSystem is CoordinateSystems.IMAGE: coordSys = 'LINELE' else: raise ValueError() if location: location = '&%s=%s %s' % (coordSys, location[0], location[1]) else: location = '' if size: if size == 'ALL': size = '&SIZE=99999 99999' else: size = '&SIZE=%s %s' % (size[0], size[1]) else: size = '' if time: time = '&TIME=%s %s I' % (time[0], time[1]) else: time = '' if band: band = '&BAND=%s' % (str(band)) else: band = '&BAND=ALL' if position is not None: if isinstance(position, int): position = '&POS=%s' % (position) elif isinstance(position, tuple): if len(position) != 2: raise ValueError( 'position range may only contain values for the beginning and end of a range.' ) position = '&POS=%s %s' % (str(position[0]), str(position[1])) else: position = '&POS=%s' % (str(position).upper()) else: position = '&POS=0' tz = TimeZone.getTimeZone('Z') dateFormat = SimpleDateFormat() dateFormat.setTimeZone(tz) dateFormat.applyPattern('yyyyDDD') timeFormat = SimpleDateFormat() timeFormat.setTimeZone(tz) timeFormat.applyPattern('HH:mm:ss') addeUrlFormat = "adde://%(server)s/imagedirectory?&PORT=%(port)s&COMPRESS=gzip&USER=%(user)s&PROJ=%(proj)s&VERSION=1&DEBUG=%(debug)s&TRACE=0&GROUP=%(dataset)s&DESCRIPTOR=%(descriptor)s%(band)s%(location)s%(place)s%(size)s%(unit)s%(mag)s%(day)s%(time)s%(position)s" urls = [] areaDirectories = [] dates = _normalizeDates(day) for date in dates: formatValues = { 'server': server, 'port': port, 'user': user, 'proj': proj, 'debug': debug, 'dataset': dataset, 'descriptor': descriptor, 'band': band, 'location': location, 'place': place, 'size': size, 'unit': unit, 'mag': mag, 'day': date, 'time': time, 'position': position, } url = addeUrlFormat % formatValues if showUrls: print url adl = AreaDirectoryList(url) results = adl.getSortedDirs() for imageTimes in results: for areaDirectory in imageTimes: urls.append(url) areaDirectories.append(areaDirectory) temp = _AreaDirectoryList() for i, d in enumerate(areaDirectories): nominalTime = d.getNominalTime() tempDay = str( dateFormat.format(nominalTime, StringBuffer(), FieldPosition(0))) tempTime = str( timeFormat.format(nominalTime, StringBuffer(), FieldPosition(0))) bandList = list(d.getBands()) # tempUnitList = list(d.getCalInfo()[0]) # unitList = tempUnitList[::2] # unitDescList = tempUnitList[1::2] # calInfo = dict(zip(unitList, unitDescList)) if unit: unitList = [origUnit] else: unitList = map(str, list(d.getCalInfo()[0])[::2]) for band in bandList: for calUnit in unitList: dt = { 'server': server, 'dataset': dataset, 'descriptor': descriptor, 'bandNumber': band, 'bandList': bandList, 'debug': debug, 'accounting': accounting, 'day': tempDay, 'time': tempTime, 'imageSize': (d.getLines(), d.getElements()), 'centerLocation': (d.getCenterLatitude(), d.getCenterLongitude()), 'resolution': (d.getCenterLatitudeResolution(), d.getCenterLongitudeResolution()), 'unitList': unitList, 'unitType': calUnit, 'bands': bandList, 'band-count': d.getNumberOfBands(), 'calinfo': map(str, list(d.getCalInfo()[0])), 'calibration-scale-factor': d.getCalibrationScaleFactor(), 'calibration-type': str(d.getCalibrationType()), 'calibration-unit-name': d.getCalibrationUnitName(), 'center-latitude': d.getCenterLatitude(), 'center-latitude-resolution': d.getCenterLatitudeResolution(), 'center-longitude': d.getCenterLongitude(), 'center-longitude-resolution': d.getCenterLongitudeResolution(), 'directory-block': list(d.getDirectoryBlock()), 'elements': d.getElements(), 'lines': d.getLines(), 'memo-field': str(d.getMemoField()), 'nominal-time': DateTime(d.getNominalTime()), 'sensor-id': d.getSensorID(), 'sensor-type': str(d.getSensorType()), 'source-type': str(d.getSourceType()), 'start-time': DateTime(d.getStartTime()), 'url': urls[i], } temp.append(dt) return temp
# # THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS # FOR A PARTICULAR PURPOSE. THIS CODE AND INFORMATION ARE NOT SUPPORTED BY XEBIALABS. # from java.util import Date from java.text import ParseException from java.text import SimpleDateFormat from sets import Set from java.util import TimeZone formatter = SimpleDateFormat( snFormat ) xlrFormat = SimpleDateFormat( xlFormat ) formatter.setTimeZone(TimeZone.getTimeZone(snTimeZone)) startDate = snData[startField] print "------" try: print "Schedule Task=> date = %s" % (startDate) date = formatter.parse(startDate) print "Schedule Task=> date = %s" % (date) release = getCurrentRelease() releaseID = release.id phaseTitle=targetPhase taskTitle=targetTask print "Schedule Task=> Phase / Task = %s / %s" % ( phaseTitle, taskTitle ) phase = phaseApi.searchPhasesByTitle( phaseTitle, releaseID ) print "Schedule Task=> phase = %s" % ( phase ) phaseID = phase[0].id task = taskApi.searchTasksByTitle( taskTitle, phaseTitle, releaseID)
idpHost = props.get('shib.idp.host') idpPort = props.getInt('shib.idp.port', 0) if idpPort == 0: if httpScheme == 'http': idpPort = 80 if httpScheme == 'https': idpPort = 443 idpPath = props.get('shib.idp.path') idpBaseUrl = httpScheme + '://' + idpHost + ':' + str(idpPort) idpSSOEndpoint = idpBaseUrl + idpPath + '/profile/SAML2/POST/SSO' spId = props.get('shib.sp.id') utctz = TimeZone.getTimeZone("UTC") dateFormat = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'") dateFormat.setTimeZone(utctz) def executeSSOTest(): reqInstant = dateFormat.format(GregorianCalendar(utctz).getTime()) reqId = UUID.randomUUID().toString() authnRequest = props.get('shib.sp.authnreq') % (idpSSOEndpoint, reqId, reqInstant, spId) webClient = WebClient() webClient.setUseInsecureSSL(True) listreq = ArrayList() listreq.add( NameValuePair( "SAMLRequest", String(Base64.encodeBase64(String(authnRequest).getBytes()),