def setMembershipTime(session, groupName, memberName, enable_str=None, expire_str=None): """ Set the expiration date for a group member. :param session:`The grouper session` :param groupName:`The fully qualified group name` :param memberName:`The fully qualified member name` :param enable_str:`The enable date/time in yyyy-mm-dd HH:MM:SS format` :param expire_str:`The expiration date/time in yyyy-mm-dd HH:MM:SS format` """ sdf = SimpleDateFormat("yyyy-MM-dd HH:mm:ss") if enable_str is not None: ts = sdf.parse(enable_str) enable_millis = ts.getTime() else: enable_millis = None if expire_str is not None: ts = sdf.parse(expire_str) expire_millis = ts.getTime() else: expire_millis = None grp = jython_grouper.getGroup(session, groupName) memberships = grp.memberships.toArray() for m in memberships: member = m.member if member.name == memberName: m.enabledTimeDb = enable_millis m.disabledTimeDb = expire_millis m.update() return True else: return False
def setupjob(job, args): """ Set up a job to run on telemetry date ranges using data from HBase Telemetry jobs expect two arguments, startdate and enddate, both in yyyymmdd format. """ import java.text.SimpleDateFormat as SimpleDateFormat import java.util.Calendar as Calendar import com.mozilla.hadoop.hbase.mapreduce.MultiScanTableMapReduceUtil as MSTMRU import com.mozilla.util.Pair if len(args) != 2: raise Exception("Usage: <startdate-YYYYMMDD> <enddate-YYYYMMDD>") sdf = SimpleDateFormat(dateformat) startdate = Calendar.getInstance() startdate.setTime(sdf.parse(args[0])) enddate = Calendar.getInstance() enddate.setTime(sdf.parse(args[1])) columns = [com.mozilla.util.Pair('data', 'json')] scans = MSTMRU.generateBytePrefixScans(startdate, enddate, dateformat, columns, 500, False) MSTMRU.initMultiScanTableMapperJob( 'telemetry', scans, None, None, None, job) # inform HadoopDriver about the columns we expect to receive job.getConfiguration().set("org.mozilla.jydoop.hbasecolumns", "data:json");
def hbase_setupjob(job, args): """ Set up a job to run on telemetry date ranges using data from HBase Telemetry jobs expect two arguments, startdate and enddate, both in yyyymmdd format. """ import java.text.SimpleDateFormat as SimpleDateFormat import java.util.Calendar as Calendar import com.mozilla.hadoop.hbase.mapreduce.MultiScanTableMapReduceUtil as MSTMRU import com.mozilla.util.Pair if len(args) != 2: raise Exception("Usage: <startdate-YYYYMMDD> <enddate-YYYYMMDD>") sdf = SimpleDateFormat(dateformat) startdate = Calendar.getInstance() startdate.setTime(sdf.parse(args[0])) enddate = Calendar.getInstance() enddate.setTime(sdf.parse(args[1])) columns = [com.mozilla.util.Pair('data', 'json')] scans = MSTMRU.generateBytePrefixScans(startdate, enddate, dateformat, columns, 500, False) MSTMRU.initMultiScanTableMapperJob('telemetry', scans, None, None, None, job) # inform HadoopDriver about the columns we expect to receive job.getConfiguration().set("org.mozilla.jydoop.hbasecolumns", "data:json")
def setupjob(job, args): """ Set up a job to run on a date range of directories. Jobs expect two arguments, startdate and enddate, both in yyyy-MM-dd format. """ import java.text.SimpleDateFormat as SimpleDateFormat import java.util.Date as Date import java.util.Calendar as Calendar import com.mozilla.util.DateUtil as DateUtil import com.mozilla.util.DateIterator as DateIterator import org.apache.hadoop.mapreduce.lib.input.FileInputFormat as FileInputFormat import org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat as MyInputFormat if len(args) != 3: raise Exception( "Usage: <testpilot_study> <startdate-YYYY-MM-DD> <enddate-YYYY-MM-DD>" ) # use to collect up each date in the given range class MyDateIterator(DateIterator): def __init__(self): self._list = [] def get(self): return self._list def see(self, aTime): self._list.append(aTime) sdf = SimpleDateFormat(dateformat) study = args[0] startdate = Calendar.getInstance() startdate.setTime(sdf.parse(args[1])) enddate = Calendar.getInstance() enddate.setTime(sdf.parse(args[2])) dates = MyDateIterator() DateUtil.iterateByDay(startdate.getTimeInMillis(), enddate.getTimeInMillis(), dates) paths = [] for d in dates.get(): paths.append(pathformat % (study, sdf.format(Date(d)))) job.setInputFormatClass(MyInputFormat) FileInputFormat.setInputPaths(job, ",".join(paths)) job.getConfiguration().set("org.mozilla.jydoop.mappertype", "TEXT")
def stringToDate(value): """ Information about how to build your own date pattern is here: http://java.sun.com/j2se/1.5.0/docs/api/java/text/SimpleDateFormat.html """ dateFormat = SimpleDateFormat("dd MMM yyyy HH:mm") return dateFormat.parse(value)
def processCertInfo(): certInfo = {} dtFormat = SimpleDateFormat("E MMM dd HH:mm:ss yyyy") logFile = open("logs/cert.log", "w") for line in open('logs/expiringCerts.txt'): if 'App Stripe' in line: certInfo['appStripe'] = line.split('=')[1].strip() if 'Keystore' in line: certInfo['keystore'] = line.split('=')[1].strip() if 'Alias' in line: certInfo['alias'] = line.split('=')[1].strip() if 'Certificate status' in line: certInfo['status'] = line.split('=')[1].strip() if 'Expiration Date' in line: #Expiration Date record marks the end of certificate info. Below block processes currrent cert. certInfo['expiryDt'] = line.split('=')[1].strip() expiryDtFmtd = str(certInfo['expiryDt']).replace( "UTC ", "") # Removes UTC from date string to build date object. expiryDtObj = dtFormat.parse(expiryDtFmtd) currDtObj = Date() #Get current date. timeDiff = expiryDtObj.getTime() - currDtObj.getTime() daysBetween = (timeDiff / (1000 * 60 * 60 * 24)) if daysBetween >= 0 and daysBetween <= int( alertDays ) and certInfo['keystore'] in keystoresList and certInfo[ 'alias'] not in certAliasExcptList: #Only concerned about keystores mentioned in the properties file. logFile.write("Certificate in app stripe \"" + certInfo['appStripe'] + "\" and keystore \"" + certInfo['keystore'] + "\" with alias name \"" + certInfo['alias'] + "\" is expiring on " + certInfo['expiryDt'] + "\n") logFile.write("\n") certInfo = {} logFile.close()
def flowFileDates(hmsRunName): #will not be accurate if there is more than one set of data--specify start date in config file? """Find the dates of any FLOW files in the DSS catalog""" from java.text import SimpleDateFormat dateFormat = SimpleDateFormat("ddMMMyyyy") print("Getting dates from " + hmsRunName + "...") dates = [] #print(dss.getCatalogedPathnames()) flowFiles = filter( lambda f: ((f.split('/')[3] == 'FLOW') and (f.split('/')[6] == ('RUN:' + hmsRunName.upper()))), dss.getCatalogedPathnames()) #print(flowFiles) candidateDates = map(lambda x: x.split('/')[4], flowFiles) #print(candidateDates) for d in candidateDates: if d[0:2].isdigit() and d[2:5].isalpha() and d[5:9].isdigit(): date = dateFormat.parse(d) dateAlreadyFound = any(lambda x: x.equals(date), dates) if not dateAlreadyFound: dates.append(date) dates.sort(lambda a, b: a.compareTo(b)) return map(lambda d: dateFormat.format(d).upper(), dates)
def stringToDate(value): """ Information about how to build your own date pattern is here: http://java.sun.com/j2se/1.5.0/docs/api/java/text/SimpleDateFormat.html """ dateFormat = SimpleDateFormat("dd MMM yyyy HH:mm") return dateFormat.parse(value)
def _parseStartDate(self, date): r'@types: str -> java.util.Date or None' try: dateFormat = SimpleDateFormat("HHmmss yyyyMMdd") return dateFormat.parse(date) except: logger.warnException('Failed to convert start date: %s' ' to HHmmss yyyyMMdd' % date)
def parse_time(time): r'@types: str->java.util.Date or None' try: from java.text import SimpleDateFormat s = SimpleDateFormat("yyyy-MM-dd HH:mm:ss") return s.parse(time) except: logger.debug('Failed to parse time: %s' % time)
def setupjob(job, args): """ Set up a job to run on a date range of directories. Jobs expect two arguments, startdate and enddate, both in yyyy-MM-dd format. """ import java.text.SimpleDateFormat as SimpleDateFormat import java.util.Date as Date import java.util.Calendar as Calendar import com.mozilla.util.DateUtil as DateUtil import com.mozilla.util.DateIterator as DateIterator import org.apache.hadoop.mapreduce.lib.input.FileInputFormat as FileInputFormat import org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat as MyInputFormat if len(args) != 3: raise Exception("Usage: <testpilot_study> <startdate-YYYY-MM-DD> <enddate-YYYY-MM-DD>") # use to collect up each date in the given range class MyDateIterator(DateIterator): def __init__(self): self._list = [] def get(self): return self._list def see(self, aTime): self._list.append(aTime) sdf = SimpleDateFormat(dateformat) study = args[0] startdate = Calendar.getInstance() startdate.setTime(sdf.parse(args[1])) enddate = Calendar.getInstance() enddate.setTime(sdf.parse(args[2])) dates = MyDateIterator() DateUtil.iterateByDay(startdate.getTimeInMillis(), enddate.getTimeInMillis(), dates) paths = [] for d in dates.get(): paths.append(pathformat % (study, sdf.format(Date(d)))) job.setInputFormatClass(MyInputFormat) FileInputFormat.setInputPaths(job, ",".join(paths)); job.getConfiguration().set("org.mozilla.jydoop.mappertype", "TEXT")
def parse_time(time): r'@types: str->java.util.Date or None' try: from java.text import SimpleDateFormat s = SimpleDateFormat("yyyy-MM-dd HH:mm:ss") return s.parse(time) except: logger.debug('Failed to parse time: %s' % time)
def _parseStartDate(self, date): r'@types: str -> java.util.Date or None' try: dateFormat = SimpleDateFormat("HHmmss yyyyMMdd") return dateFormat.parse(date) except: logger.warnException('Failed to convert start date: %s' ' to HHmmss yyyyMMdd' % date)
def _parseDate(self, dateStr): for parsePattern in ["MM/dd/yyyy HH:mm:ss", "MM.dd.yyyy HH:mm:ss", "yyyyMMddHHmmss"]: try: logger.debug('Trying to parse date string %s with pattern %s' % (dateStr, parsePattern)) dateFormat = SimpleDateFormat(parsePattern) return dateFormat.parse(dateStr) except: logger.warn('Failed parsing date %s with date format %s' % (dateStr, parsePattern))
def getFieldAsDate(field, pattern): if isinstance(field, unicode): formatter = SimpleDateFormat(pattern) newDate = formatter.parse(field) return newDate elif isinstance(field, Date): return field else: return None
def findTimestamp(self, dateTup, timeTup): str_date = dateTup + " " + timeTup + " GMT" formatter = SimpleDateFormat("dd MMM yyyy HH:mm:ss z") p_date = formatter.parse(str_date) parsed_year = p_date.getYear() if parsed_year > 3000: print "parsed year is too large: " + str(parsed_year) return 0 else: sc_tmstmp = Timestamp(p_date.getTime()) return sc_tmstmp.getTime() / 1000
def findTimestamp (self, dateTup, timeTup): str_date = dateTup + " " + timeTup + " GMT" formatter = SimpleDateFormat("dd MMM yyyy HH:mm:ss z") p_date = formatter.parse(str_date) parsed_year = p_date.getYear() if parsed_year > 3000: print "parsed year is too large: " + str(parsed_year) return 0 else: sc_tmstmp = Timestamp (p_date.getTime()) return sc_tmstmp.getTime() / 1000
def createSimpleTask(phaseId, taskTypeValue, title, propertyMap): parenttaskType = Type.valueOf(taskTypeValue) parentTask = parenttaskType.descriptor.newInstance("nonamerequired") parentTask.setTitle(title) sdf = SimpleDateFormat("yyyy-MM-dd hh:mm:ss") for item in propertyMap: if item.lower().find("date") > -1: if propertyMap[item] is not None and len(propertyMap[item]) != 0: parentTask.setProperty(item,sdf.parse(propertyMap[item])) else: parentTask.setProperty(item,propertyMap[item]) taskApi.addTask(phaseId,parentTask)
def createSimpleTask(phaseId, taskTypeValue, title, propertyMap): parenttaskType = Type.valueOf(taskTypeValue) parentTask = parenttaskType.descriptor.newInstance("nonamerequired") parentTask.setTitle(title) sdf = SimpleDateFormat("yyyy-MM-dd hh:mm:ss") for item in propertyMap: if item.lower().find("date") > -1: if propertyMap[item] is not None and len(propertyMap[item]) != 0: parentTask.setProperty(item, sdf.parse(propertyMap[item])) else: parentTask.setProperty(item, propertyMap[item]) taskApi.addTask(phaseId, parentTask)
def _parseDate(self, dateStr): for parsePattern in [ "MM/dd/yyyy HH:mm:ss", "MM.dd.yyyy HH:mm:ss", "yyyyMMddHHmmss" ]: try: logger.debug('Trying to parse date string %s with pattern %s' % (dateStr, parsePattern)) dateFormat = SimpleDateFormat(parsePattern) return dateFormat.parse(dateStr) except: logger.warn('Failed parsing date %s with date format %s' % (dateStr, parsePattern))
def getPendingUpdates(self, oid): storage = self.Services.getStorage() object = storage.getObject(oid) indexFile = File(object.getPath() + "/parked_Version_Index.json") self.pendingUpdates = [] self.allUpdates = [] if indexFile.exists(): dateFormatter = SimpleDateFormat("yyyyMMddHHmmss") modifiedDate = dateFormatter.parse(object.getMetadata().getProperty("last_modified")) parkedVersions = JsonSimple(indexFile).getJsonArray() for version in parkedVersions: ts = version.get("timestamp") versionDate = dateFormatter.parse(ts) self.allUpdates.append(ts) if versionDate.after(modifiedDate): self.pendingUpdates.append( ts) object.close() self.pendingUpdateSize = len(self.pendingUpdates) self.allUpdateSize = len(self.allUpdates) return self.pendingUpdates
def tail_logfile(log_file, from_time): from java.text import SimpleDateFormat import re pattern = re.compile("\[(.*)\]") formatter = SimpleDateFormat("dd/MMM/yy:H:m:s Z") for line in log_file[1].split('\n'): mymatch = pattern.match(line) if mymatch: timestamp = mymatch.group(1) timestamp_object = formatter.parse(timestamp) if date_compare(from_time, timestamp_object) == 'Less': print line
def tail_logfile(log_file,from_time): from java.text import SimpleDateFormat import re pattern=re.compile("\[(.*)\]") formatter = SimpleDateFormat("dd/MMM/yy:H:m:s Z") for line in log_file[1].split('\n'): mymatch = pattern.match(line) if mymatch: timestamp=mymatch.group(1) timestamp_object = formatter.parse(timestamp) if date_compare(from_time,timestamp_object) == 'Less': print line
def setupjob(job, args): """ Set up a job to run on crash-stats date ranges. Expects three arguments: startdate (yymmdd) enddate (yymmdd) """ import java.text.SimpleDateFormat as SimpleDateFormat import java.util.Calendar as Calendar import com.mozilla.hadoop.hbase.mapreduce.MultiScanTableMapReduceUtil as MSTMRU from com.mozilla.util import Pair if len(args) != 2: raise Exception("Usage: <startdate-yymmdd> <enddate-yymmdd>") startarg, endarg = args sdf = SimpleDateFormat(dateformat) startdate = Calendar.getInstance() startdate.setTime(sdf.parse(startarg)) enddate = Calendar.getInstance() enddate.setTime(sdf.parse(endarg)) columns = [Pair(family, qualifier) for family, qualifier in columnlist] scans = MSTMRU.generateHexPrefixScans(startdate, enddate, dateformat, columns, 500, False) MSTMRU.initMultiScanTableMapperJob( 'crash_reports', scans, None, None, None, job) # inform HadoopDriver about the columns we expect to receive job.getConfiguration().set("org.mozilla.jydoop.hbasecolumns", ','.join(':'.join(column) for column in columnlist))
def setupjob(job, args): """ Set up a job to run on crash-stats date ranges. Expects three arguments: startdate (yymmdd) enddate (yymmdd) """ import java.text.SimpleDateFormat as SimpleDateFormat import java.util.Calendar as Calendar import com.mozilla.hadoop.hbase.mapreduce.MultiScanTableMapReduceUtil as MSTMRU from com.mozilla.util import Pair if len(args) != 2: raise Exception("Usage: <startdate-yymmdd> <enddate-yymmdd>") startarg, endarg = args sdf = SimpleDateFormat(dateformat) startdate = Calendar.getInstance() startdate.setTime(sdf.parse(startarg)) enddate = Calendar.getInstance() enddate.setTime(sdf.parse(endarg)) columns = [Pair(family, qualifier) for family, qualifier in columnlist] scans = MSTMRU.generateHexPrefixScans(startdate, enddate, dateformat, columns, 500, False) MSTMRU.initMultiScanTableMapperJob( 'crash_reports', scans, None, None, None, job) # inform HBaseDriver about the columns we expect to receive job.getConfiguration().set("org.mozilla.jydoop.hbasecolumns", ','.join(':'.join(column) for column in columnlist))
def main(*args): layer = gvsig.currentLayer() store = layer.getFeatureStore() fset = store.getFeatureSet() for feature in fset: field = feature.get("HORA") pattern = "HH:mm:ss" #field = "23:10:10" formatter = SimpleDateFormat(pattern) newDate = formatter.parse(field) cal = Calendar.getInstance() cal.setTime(newDate) hour = cal.get(Calendar.HOUR_OF_DAY) print hour
def _parseDateString(installDateString): installationDateAsDate = None if installDateString: for format in ['yyyyMMdd', 'yyyyMMddHHmmss.SSSSSS-000', 'EEE dd MMM yyyy HH:mm:ss aa zzz']: if len(installDateString) == len(format): try: from java.text import SimpleDateFormat from java.util import TimeZone dateFormatter = SimpleDateFormat(format) dateFormatter.setTimeZone(TimeZone.getTimeZone("GMT")) installationDateAsDate = dateFormatter.parse(installDateString) except java.text.ParseException: # could not parse date # print 'could not parse' + installDateString + ' as ' + format pass return installationDateAsDate
def __init__(self, name, value): self._name = name if not value: self._value = value else: if len(value) == 10: # date format parsed_value = SimpleDateFormat('yyyy-MM-dd').parse(value) else: # assume datetime format # remove microseconds if necessary if len(value) == 27: value = '%sZ' % value[:19] sdf = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'") sdf.setTimeZone(TimeZone.getTimeZone("UTC")) parsed_value = sdf.parse(value) self._value = parsed_value
def createSimpleTask(phaseId, taskTypeValue, title, propertyMap): """ Function that creats a simple task """ parentTask = taskApi.newTask(taskTypeValue) parentTask.title = title parentTask.description = title sdf = SimpleDateFormat("yyyy-MM-dd hh:mm:ss") for item in propertyMap: if item.lower().find("date") > -1: if propertyMap[item] is not None and len(propertyMap[item]) != 0: parentTask.pythonScript.setProperty(item,sdf.parse(propertyMap[item])) else: parentTask.pythonScript.setProperty(item,propertyMap[item]) taskApi.addTask(phaseId, parentTask)
def _parseSoftwareComponent(self, softwareComponentNode): name = self._evalToString('NAME/@value', softwareComponentNode) vendor = self._evalToString('VENDOR/@value', softwareComponentNode) release = self._evalToString('RELEASE/@value', softwareComponentNode) serviceLevel = self._evalToString('SERVICELEVEL/@value', softwareComponentNode) if serviceLevel.isdigit(): serviceLevel = int(serviceLevel) patchLevel = self._evalToString('PATCHLEVEL/@value', softwareComponentNode) if patchLevel.isdigit(): patchLevel = int(patchLevel) counter = self._evalToString('COUNTER/@value', softwareComponentNode) provider = self._evalToString('PROVIDER/@value', softwareComponentNode) location = self._evalToString('LOCATION/@value', softwareComponentNode) applied = self._evalToString('APPLIED/@value', softwareComponentNode) try: dateFormat = SimpleDateFormat('yyyyMMDDHHmmss') applied = dateFormat.parse(applied) except: logger.warnException('Failed to parse applied date') applied = None return self.SoftwareComponent(name, vendor, release, serviceLevel, patchLevel, counter, provider, location, applied)
def parseSuccess(self, output): """ @types: str -> [DbSchemaResult] or [] """ result = [] format_ = SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z") for line in self.stripHeader(output): line = line.strip() if line: line = line.strip("|") schemaInfo = re.split("\s*\|\s*", line) if len(schemaInfo) == 5: dateStr = "%s %s %s" % (schemaInfo[2], schemaInfo[3], self.__getTimeZone(schemaInfo[4])) date = None try: date = format_.parse(dateStr) except ParseException, ex: logger.debugException(ex.getMessage()) logger.warn('Cannot parse date "%s": %s' % (dateStr, ex.getMessage())) result.append(DbSchemaResult(schemaInfo[1].strip(), schemaInfo[0].strip(), date))
def getLastBootDate(self, bootDateFormat): logger.debug("Discovering last boot date via net stats") output = self.shell.execCmd( 'net stats srv') #@@CMD_PERMISION ntcmd protocol execution if output and self.shell.getLastCmdReturnCode() == 0: lines = output.split('\n') # get rid of empty lines: lines = [line.strip() for line in lines if line.strip()] # Second line contains 'Statistics since <date>' where date can be in 12 or 24 format dateLine = lines[1] bootDateStr = None matcher = re.search(r"\d{1,4}([./-])\d{1,4}\1\d{1,4}", dateLine) if matcher: bootDateStr = matcher.group() bootTimeStr = None bootTimeFormat = None matcher = re.search(r"\d{1,2}:\d{2}( (a|p)m)?", dateLine, re.I) if matcher: bootTimeStr = matcher.group() ampm = matcher.group(1) if ampm: bootTimeFormat = "h:mm a" else: bootTimeFormat = "H:mm" if bootDateStr and bootDateFormat and bootTimeStr: resultDateStr = "%s %s" % (bootDateStr, bootTimeStr) resultDateFormat = "%s %s" % (bootDateFormat, bootTimeFormat) try: formatter = SimpleDateFormat(resultDateFormat) result = formatter.parse(resultDateStr) logger.debug('Date = %s' % result) return result except: logger.warn( "Error parsing date string '%s' with format '%s'" % (resultDateStr, resultDateFormat)) return None raise ValueError, 'Failed getting data from net stats srv.'
def _parseDateString(installDateString): installationDateAsDate = None if installDateString: for format in [ 'yyyyMMdd', 'yyyyMMddHHmmss.SSSSSS-000', 'EEE dd MMM yyyy HH:mm:ss aa zzz' ]: if len(installDateString) == len(format): try: from java.text import SimpleDateFormat from java.util import TimeZone dateFormatter = SimpleDateFormat(format) dateFormatter.setTimeZone(TimeZone.getTimeZone("GMT")) installationDateAsDate = dateFormatter.parse( installDateString) except java.text.ParseException: # could not parse date # print 'could not parse' + installDateString + ' as ' + format pass return installationDateAsDate
def getContacts(self, userAccounts): self.contactsLoadProgress = 0 contactAccountsTemp = {} nodes = self.xmlRoot.firstChild.childNodes for i in range(len(nodes)): # check if this action was aborted if self.isAborted(): return None node = nodes[i] if not isinstance(node, minidom.Element): continue df = SimpleDateFormat("yyyy-MM-dd HH:mm:ss") date = df.parse( node.getElementsByTagName('timestamp')[0].firstChild.toxml()) name = node.getElementsByTagName('name') if len(name) == 0: name = "" else: name = name[0].firstChild.toxml() uid = node.getElementsByTagName('from')[0].firstChild.toxml() ca = ContactAccount(0, name, uid, "", None, self.protocol) if not contactAccountsTemp.has_key(ca): contactAccountsTemp[ca] = [] content = node.getElementsByTagName('body')[0].firstChild.toxml() msg = Message(0, None, content, date, True) contactAccountsTemp[ca].append(msg) self.messagesCount += 1 self.contactsLoadProgress = i * 100 / len(nodes) contacts = [] for ca in contactAccountsTemp.iterkeys(): ca.conversations = ConversationHelper.messagesToConversations( contactAccountsTemp[ca], ca, userAccounts[0]) cnt = Contact(0, "", "", ca.name) cnt.addContactAccount(ca) contacts.append(cnt) self.contactsLoadProgress = 100 return contacts
def isHung(self): sdi = SimpleDateFormat('d MMM yyyy HH:mm:ss,SSS') today = datetime.datetime.today() # no need to check if too close to midnight -- don't bother handle wrapping if today.hour <= 1: log('INFO: hang check deferred until after 1AM') return False # formulate today's log file name lf = '/var/log/ems/info.%04d%02d%02d' % (today.year,today.month,today.day) + '.log' # get the last logged message fd = os.popen('tac %s | fgrep -m 1 purge-client' % lf, 'r') line = fd.readline() fd.close() # nothing logged if line is None: log('WARN: when checking for hang the log file line was empty') return False line = line.strip() # nothing logged if len(line) <= 0: log('WARN: when checking for hang the log file was empty') return False d = sdi.parse(line,ParsePosition(0)) td = Date().getTime() - d.getTime() td = td / 1000 td = td / 60 # 60 minutes is too long, report hang return td > self.hangTimeout
def getContacts(self, userAccounts): self.contactsLoadProgress = 0 contactAccountsTemp = {} sms = re.split('\<sms\>', self.messagesContent) pattern = '\<from\>(.*)\s*\[(.*)\]\<\/from\>\s*\<msg\>(.*)\<\/msg\>\s*\<date\>(.*)\<\/date\>' prog = re.compile(pattern) for i in range(len(sms)): # check if this action was aborted if self.isAborted(): return None res = prog.search(sms[i]) if res <> None: df = SimpleDateFormat("yyyy-MM-dd HH:mm:ss") date = df.parse(res.group(4).strip()) name = unicode(res.group(1).strip(), 'utf-8') if name == None: name = "" ca = ContactAccount(0, name, res.group(2).strip(), "", None, self.protocol) if not contactAccountsTemp.has_key(ca): contactAccountsTemp[ca] = [] content = unicode(res.group(3).strip(), 'utf-8') msg = Message(0, None, content, date, True) contactAccountsTemp[ca].append(msg) self.messagesCount += 1 self.contactsLoadProgress = i * 100 / len(sms) contacts = [] for ca in contactAccountsTemp.iterkeys(): ca.conversations = ConversationHelper.messagesToConversations( contactAccountsTemp[ca], ca, userAccounts[0]) cnt = Contact(0, "", "", ca.name) cnt.addContactAccount(ca) contacts.append(cnt) self.contactsLoadProgress = 100 return contacts
def getLastBootDate(self, bootDateFormat): logger.debug("Discovering last boot date via net stats") output = self.shell.execCmd('net stats srv')#@@CMD_PERMISION ntcmd protocol execution if output and self.shell.getLastCmdReturnCode() == 0: lines = output.split('\n') # get rid of empty lines: lines = [line.strip() for line in lines if line.strip()] # Second line contains 'Statistics since <date>' where date can be in 12 or 24 format dateLine = lines[1] bootDateStr = None matcher = re.search(r"\d{1,4}([./-])\d{1,4}\1\d{1,4}", dateLine) if matcher: bootDateStr = matcher.group() bootTimeStr = None bootTimeFormat = None matcher = re.search(r"\d{1,2}:\d{2}( (a|p)m)?", dateLine, re.I) if matcher: bootTimeStr = matcher.group() ampm = matcher.group(1) if ampm: bootTimeFormat = "h:mm a" else: bootTimeFormat = "H:mm" if bootDateStr and bootDateFormat and bootTimeStr: resultDateStr = "%s %s" % (bootDateStr, bootTimeStr) resultDateFormat = "%s %s" % (bootDateFormat, bootTimeFormat) try: formatter = SimpleDateFormat(resultDateFormat) result = formatter.parse(resultDateStr) logger.debug('Date = %s' % result) return result except: logger.warn("Error parsing date string '%s' with format '%s'" % (resultDateStr, resultDateFormat)) return None raise ValueError, 'Failed getting data from net stats srv.'
def getContacts(self, userAccounts): self.contactsLoadProgress = 0 contactAccountsTemp = {} nodes = self.xmlRoot.firstChild.childNodes for i in range(len(nodes)): # check if this action was aborted if self.isAborted(): return None node = nodes[i] if not isinstance(node, minidom.Element): continue df = SimpleDateFormat("yyyy-MM-dd HH:mm:ss") date = df.parse(node.getElementsByTagName('timestamp')[0].firstChild.toxml()) name = node.getElementsByTagName('name') if len(name) == 0: name = "" else: name = name[0].firstChild.toxml() uid = node.getElementsByTagName('from')[0].firstChild.toxml() ca = ContactAccount(0, name, uid, "", None, self.protocol) if not contactAccountsTemp.has_key(ca): contactAccountsTemp[ca] = [] content = node.getElementsByTagName('body')[0].firstChild.toxml() msg = Message(0, None, content, date, True) contactAccountsTemp[ca].append(msg) self.messagesCount += 1 self.contactsLoadProgress = i * 100 /len(nodes) contacts = [] for ca in contactAccountsTemp.iterkeys(): ca.conversations = ConversationHelper.messagesToConversations(contactAccountsTemp[ca], ca, userAccounts[0]) cnt = Contact(0, "", "", ca.name) cnt.addContactAccount(ca) contacts.append(cnt) self.contactsLoadProgress = 100 return contacts
def parseSuccess(self, output): """ @types: str -> [DbSchemaResult] or [] """ result = [] format_ = SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z") for line in self.stripHeader(output): line = line.strip() if line: line = line.strip("|") schemaInfo = re.split("\s*\|\s*", line) if len(schemaInfo) == 5: dateStr = "%s %s %s" % (schemaInfo[2], schemaInfo[3], self.__getTimeZone(schemaInfo[4])) date = None try: date = format_.parse(dateStr) except ParseException, ex: logger.debugException(ex.getMessage()) logger.warn("Cannot parse date \"%s\": %s" % (dateStr, ex.getMessage())) result.append( DbSchemaResult(schemaInfo[1].strip(), schemaInfo[0].strip(), date))
def flowFileDates(hmsRunName): #will not be accurate if there is more than one set of data--specify start date in config file? """Find the dates of any FLOW files in the DSS catalog""" from java.text import SimpleDateFormat dateFormat = SimpleDateFormat("ddMMMyyyy") print("Getting dates from " + hmsRunName + "...") dates = [] #print(dss.getCatalogedPathnames()) flowFiles = filter(lambda f:((f.split('/')[3] == 'FLOW') and (f.split('/')[6] == ('RUN:'+hmsRunName.upper()))), dss.getCatalogedPathnames()) #print(flowFiles) candidateDates = map(lambda x:x.split('/')[4], flowFiles) #print(candidateDates) for d in candidateDates: if d[0:2].isdigit() and d[2:5].isalpha() and d[5:9].isdigit(): date = dateFormat.parse(d) dateAlreadyFound = any(lambda x:x.equals(date), dates) if not dateAlreadyFound: dates.append(date) dates.sort(lambda a,b:a.compareTo(b)) return map(lambda d: dateFormat.format(d).upper(), dates)
class ChartData: def __init__(self): pass def __activate__(self, context): self.auth = context["page"].authentication self.response = context["response"] self.request = context["request"] self.dateFormatter = SimpleDateFormat("yyyy-MM-dd") self.errorMsg = "" if (self.auth.is_logged_in()): if (self.auth.is_admin()==True): self.chartName = context["formData"].get("chartName") self.buildBarChart(context) else: self.errorMsg = "Requires Admin / Librarian / Reviewer access." else: self.errorMsg = "Please login." if (self.errorMsg!=""): self.response.setStatus(404) writer = self.response.getPrintWriter("text/plain; charset=UTF-8") writer.println(self.errorMsg) writer.close() def getErrorMsg(self): return self.errorMsg def buildBarChart(self, context): barChartData = None self.imgW = 550 self.imgH = 400 self.fromDtTxt = self.request.getParameter("from") self.toDtTxt = self.request.getParameter("to") if (self.fromDtTxt is None or self.toDtTxt is None): self.errorMsg = "Invalid date range." return self.fromDt = self.dateFormatter.parse(self.fromDtTxt) self.toDt = self.dateFormatter.parse(self.toDtTxt) if (self.fromDt.after(self.toDt)): self.errorMsg = "Invalid date range." return if (self.chartName=="records-by-stage-1"): barChartData = BarChartData(self.fromDtTxt + " to " + self.toDtTxt + "\n Records by Workflow Stage", "", "", BarChartData.LabelPos.SLANTED, BarChartData.LabelPos.HIDDEN, self.imgW, self.imgH, False) if (self.chartName=="records-by-stage-2"): barChartData = BarChartData("", "", "", BarChartData.LabelPos.VERTICAL, BarChartData.LabelPos.RIGHT, self.imgW, self.imgH, True) barChartData.setUseSeriesColor(True) if (self.chartName=="records-by-month-1"): barChartData = BarChartData(self.fromDtTxt + " to " + self.toDtTxt + "\n Records Published by Month", "", "", BarChartData.LabelPos.HORIZONTAL, BarChartData.LabelPos.RIGHT, self.imgW, self.imgH, False) if (self.chartName=="records-by-month-2"): barChartData = BarChartData("", "", "", BarChartData.LabelPos.HIDDEN, BarChartData.LabelPos.LEFT, self.imgW, self.imgH, False) barChartData.setUseSeriesColor(True) if (barChartData is None): self.errorMsg = "Invalid chart" return self.out = self.response.getOutputStream("image/png") barChartData.setBaseSeriesColor(Color(98, 157, 209)) barChartData = self.getChartData(self.chartName, barChartData) ChartGenerator.renderPNGBarChart(self.out, barChartData) self.out.close() def getChartData(self, chartName, chartData): if (chartName=="records-by-stage-1"): chartData.addEntry(Integer(5), "", "Investigation") chartData.addEntry(Integer(10), "", "Metadata") chartData.addEntry(Integer(20), "", "Final Review") chartData.addEntry(Integer(150), "", "Published") chartData.addEntry(Integer(100), "", "Retired") if (chartName=="records-by-stage-2"): clrIdx = Color(18,45,69) clrRep = Color(18,101,69) clrReg = Color(89,45,85) clrCol = Color(89,100,85) clrDat = Color(23,106,113) # due to the series concept in JFreeChart, we'll add all unique rows first to set the colors # TODO: refactor to remove this limitation chartData.addEntry(Integer(5), "Catalogue/Index", "Investigation", clrIdx) chartData.addEntry(Integer(10), "Repository", "Investigation", clrRep) chartData.addEntry(Integer(40), "Registry", "Metadata Review", clrReg) chartData.addEntry(Integer(120), "Collection", "Published", clrCol) chartData.addEntry(Integer(20), "Dataset", "Metadata Review", clrDat) chartData.addEntry(Integer(20), "Repository", "Final Review", clrRep) chartData.addEntry(Integer(80), "Dataset", "Final Review", clrDat) chartData.addEntry(Integer(20), "Dataset", "Published", clrDat) chartData.addEntry(Integer(70), "Dataset", "Retired", clrDat) if (chartName=="records-by-month-1"): dataType = "2012 - Records \n Published by \n Month" chartData.addEntry(Integer(5), dataType, "Jan") chartData.addEntry(Integer(10), dataType, "Feb") chartData.addEntry(Integer(20), dataType, "Mar") chartData.addEntry(Integer(150), dataType, "Apr") chartData.addEntry(Integer(100), dataType, "May") chartData.addEntry(Integer(90), dataType, "Jun") chartData.addEntry(Integer(70), dataType, "Jul") chartData.addEntry(Integer(17), dataType, "Aug") chartData.addEntry(Integer(143), dataType, "Sep") chartData.addEntry(Integer(56), dataType, "Oct") chartData.addEntry(Integer(130), dataType, "Nov") chartData.addEntry(Integer(166), dataType, "Dec") if (chartName=="records-by-month-2"): chartData.addEntry(Integer(325), "Party", "Published Records", Color(98, 157, 209)) chartData.addEntry(Integer(100), "Collection", "Published Records", Color(41,127,213)) chartData.addEntry(Integer(70), "Activity", "Published Records", Color(127,143,169)) chartData.addEntry(Integer(20), "Service", "Published Records", Color(45,127,217)) return chartData
def getDate(dateString): df = SimpleDateFormat("MM/dd/yyyy"); return df.parse(dateString)
def setError(g, k): global errorkey, errorgroup, params errorgroup = g errorkey = k iptestdate = mbo.getString("TESTDATE") ipmetertestid = mbo.getString("CG_METERTESTID") metertestset = mbo.getMboSet("CG_METERTESTMETERTEST") meterassetcount = metertestset.count() if (meterassetcount > 0): for i in range(meterassetcount): astmtrrec = metertestset.getMbo(i) testdateexist = astmtrrec.getString("TESTDATE") metertestid = astmtrrec.getString("CG_METERTESTID") parstestdate = fmt.parse(testdateexist) inpatestdate = fmt.parse(iptestdate) if (parstestdate == inpatestdate and metertestid <> ipmetertestid): setError( "FAILURECODE", " Meter test already exists for this test date. Cannot have two meter tests for the same date" ) #dateformat="12/1/10"
def setupjob(job, args): """ Similar to the above, but run telemetry data that's already been exported to HDFS. Jobs expect two arguments, startdate and enddate, both in yyyyMMdd format. """ import java.text.SimpleDateFormat as SimpleDateFormat import java.util.Date as Date import java.util.Calendar as Calendar import java.util.concurrent.TimeUnit as TimeUnit import com.mozilla.util.DateUtil as DateUtil import com.mozilla.util.DateIterator as DateIterator import org.apache.hadoop.mapreduce.lib.input.FileInputFormat as FileInputFormat import org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat as MyInputFormat if len(args) != 2: raise Exception("Usage: <startdate-YYYYMMDD> <enddate-YYYYMMDD>") # use to collect up each date in the given range class MyDateIterator(DateIterator): def __init__(self): self._list = [] def get(self): return self._list def see(self, aTime): self._list.append(aTime) sdf = SimpleDateFormat(dateformat) sdf_hdfs = SimpleDateFormat(hdfs_dateformat) startdate = Calendar.getInstance() startdate.setTime(sdf.parse(args[0])) enddate = Calendar.getInstance() enddate.setTime(sdf.parse(args[1])) nowdate = Calendar.getInstance() # HDFS only contains the last 2 weeks of data (up to yesterday) startMillis = startdate.getTimeInMillis() endMillis = enddate.getTimeInMillis() nowMillis = nowdate.getTimeInMillis() startDiff = nowMillis - startMillis if TimeUnit.DAYS.convert(startDiff, TimeUnit.MILLISECONDS) > 14: raise Exception( "HDFS Data only includes the past 14 days of history. Try again with more recent dates or use the HBase data directly." ) endDiff = nowMillis - endMillis if TimeUnit.DAYS.convert(endDiff, TimeUnit.MILLISECONDS) < 1: raise Exception( "HDFS Data only includes data up to yesterday. For (partial) data for today, use the HBase data directly." ) dates = MyDateIterator() DateUtil.iterateByDay(startMillis, endMillis, dates) paths = [] for d in dates.get(): paths.append(hdfs_pathformat % (sdf_hdfs.format(Date(d)))) job.setInputFormatClass(MyInputFormat) FileInputFormat.setInputPaths(job, ",".join(paths)) job.getConfiguration().set("org.mozilla.jydoop.mappertype", "TEXT")
def getContacts(self, userAccounts): buddiesxml = self.element.find("Buddies") contactslist = [] for buddy in buddiesxml: if buddy: name = buddy.find("Display").text fname = "" if buddy.find("FirstName").text != name and buddy.find("FirstName").text != None: fname = buddy.find("FirstName").text lname = "" if buddy.find("LastName").text != name and buddy.find("LastName").text != None: lname = buddy.find("LastName").text c = Contact(0, fname, lname, name) self.buddies[buddy.attrib["uuid"]] = c else: c = Contact(0, "", "", "") self.buddies[buddy.attrib["uuid"]] = c contactsxml = self.element.find("Contacts") for contact in contactsxml: if self.accounts[contact.find("Account").text][0] in userAccounts: if contact.find("Buddy").text == None: c = Contact(0, "", "", contact.find("Id").text) else: c = self.buddies[contact.find("Buddy").text] if not c in contactslist: contactslist.append(c) ca = ContactAccount( 0, "", contact.find("Id").text, "", c, self.accounts[contact.find("Account").text][0].getProtocol() ) c.addContactAccount(ca) self.contacts[contact.attrib["uuid"]] = ca connection = DriverManager.getConnection("jdbc:sqlite:" + self.dbFile) stmt = connection.createStatement() chatsxml = self.element.find("Chats") df = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss") for chat in chatsxml: # print self.accounts[chat.find("Account").text] if chat.find("Account") != None and self.accounts[chat.find("Account").text][0] in userAccounts: # self.chats[chat.attrib["uuid"]] = (self.accounts[chat.find("Account").text], self.accounts[chat.find("Contact").text]) # chatlist.append((self.accounts[chat.find("Account").text], self.accounts[chat.find("Contact").text])) rs = stmt.executeQuery( "SELECT * FROM kadu_messages WHERE chat='" + chat.attrib["uuid"] + "' ORDER BY send_time" ) # conv = Conversation(0, Date(), "", 0, self.contacts[chat.find("Contact").text], self.accounts[chat.find("Account").text][0]) # self.contacts[chat.find("Contact").text].addConversation(conv) # print self.contacts[chat.find("Contact").text], self.contacts[chat.find("Contact").text].getConversations().size() # print "Chat ", chat.attrib["uuid"] msgs = [] # length = 0 # fmsg = "" # date = None # enddate = None while rs.next(): if rs.getString("attributes")[-1] == "0": recv = 1 else: recv = 0 msgs.append(Message(0, None, rs.getString("content"), df.parse(rs.getString("send_time")), recv)) # conv.addMessage(msg) # length += 1 self.messagesCount += 1 # if fmsg=="": # fmsg=rs.getString("content") # if date == None: # date = df.parse(rs.getString("send_time")) # enddate = df.parse(rs.getString("send_time")) # print "\tROW = ", chat, sender, time, content, out # conv.setLength(length) # conv.setTime(date) # conv.setEndTime(enddate) # conv.setTitle(fmsg) # print self.contacts[chat.find("Contact").text] # print self.accounts[chat.find("Account").text][0] if len(msgs) > 0: self.contacts[chat.find("Contact").text].setConversations( ConversationHelper.messagesToConversations( msgs, self.contacts[chat.find("Contact").text], self.accounts[chat.find("Account").text][0] ) ) self.contactsLoadProgress = 100 return contactslist
def formatDate(date, sfmt="yyyy-MM-dd'T'HH:mm:ss", tfmt="dd/MM/yyyy"): dfSource = SimpleDateFormat(sfmt) dfTarget = SimpleDateFormat(tfmt) return dfTarget.format(dfSource.parse(date))
def mustReader(paramMnemonics, startDate, endDate): # driverName = "com.mysql.jdbc.Driver" # #Class.forName(driverName) # url = "jdbc:mysql://" + Configuration.getProperty( "vega.must.server.ip") + "/" + Configuration.getProperty( "vega.must.server.repository" ) + "?user="******"vega.must.server.user" ) + "&password="******"vega.must.server.password") # loader = Thread.currentThread().getContextClassLoader() c = loader.loadClass("com.mysql.jdbc.Driver") #print c driver = c.newInstance() #print driver con = driver.connect(url, java.util.Properties()) #con = DriverManager.getConnection(url) stmt = con.createStatement() formatter = SimpleDateFormat("yyyy-MM-dd HH:mm:ss") date1 = formatter.parse(endDate) date2 = formatter.parse(startDate) stringDate1 = formatter.format(date1) stringDate2 = formatter.format(date2) # p = Product() #ah=TableDataset() p.meta["type"] = StringParameter(value="MUST Parameter Product") p.meta["creator"] = StringParameter(value="MustClient") p.meta["description"] = StringParameter(value="MUST") p.meta["startDate"] = DateParameter(value=FineTime(date2)) p.meta["endDate"] = DateParameter(value=FineTime(date1)) # for pars in range(len(paramMnemonics)): # ah = TableDataset() parameterName = paramMnemonics[pars] p.meta[parameterName] = BooleanParameter(value=Boolean(True)) # sql = "select PID,DBTYPE from parameter where (PNAME='" + parameterName + "')" rs = stmt.executeQuery(sql) while (rs.next()): pid = rs.getInt(1) tableName = rs.getString(2) + "paramvalues" sql = "select datetime,value from %s where (pid=%s and datetime<'%s' and datetime>'%s')" % ( tableName, pid, date1.time, date2.time) rs = stmt.executeQuery(sql) list = [] dates = Long1d() values = Float1d() while (rs.next()): dates.append(rs.getLong(1)) values.append(rs.getFloat(2)) print "Retrieved parameter from MUST: ", parameterName if (pars == 0): #ah["Time"]=Column(data=dates,description="Time") ah.meta["creator"] = StringParameter(value="MustClient") ah.meta["description"] = StringParameter(value="MUST") ah.meta["startDate"] = DateParameter(value=FineTime(date2)) ah.meta["endDate"] = DateParameter(value=FineTime(date1)) # ah["Time"] = Column(data=dates, description="Time") ah[parameterName] = Column(data=values, description=parameterName) # p[parameterName] = ah rs.close() stmt.close() con.close() # return p
def formatVersion(self, dString): dfSource = SimpleDateFormat("yyyyMMddHHmmss") dfTarget = SimpleDateFormat("dd/MM/yyyy HH:mm:ss") return dfTarget.format(dfSource.parse(dString))
def hdfs_setupjob(job, args): """ Similar to the above, but run telemetry data that's already been exported to HDFS. Jobs expect two arguments, startdate and enddate, both in yyyyMMdd format. """ import java.text.SimpleDateFormat as SimpleDateFormat import java.util.Date as Date import java.util.Calendar as Calendar import java.util.concurrent.TimeUnit as TimeUnit import com.mozilla.util.DateUtil as DateUtil import com.mozilla.util.DateIterator as DateIterator import org.apache.hadoop.mapreduce.lib.input.FileInputFormat as FileInputFormat import org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat as MyInputFormat if len(args) != 2: raise Exception("Usage: <startdate-YYYYMMDD> <enddate-YYYYMMDD>") # use to collect up each date in the given range class MyDateIterator(DateIterator): def __init__(self): self._list = [] def get(self): return self._list def see(self, aTime): self._list.append(aTime) sdf = SimpleDateFormat(dateformat) sdf_hdfs = SimpleDateFormat(hdfs_dateformat) startdate = Calendar.getInstance() startdate.setTime(sdf.parse(args[0])) enddate = Calendar.getInstance() enddate.setTime(sdf.parse(args[1])) nowdate = Calendar.getInstance() # HDFS only contains the last 2 weeks of data (up to yesterday) startMillis = startdate.getTimeInMillis() endMillis = enddate.getTimeInMillis() nowMillis = nowdate.getTimeInMillis() startDiff = nowMillis - startMillis if TimeUnit.DAYS.convert(startDiff, TimeUnit.MILLISECONDS) > 14: raise Exception("HDFS Data only includes the past 14 days of history. Try again with more recent dates or use the HBase data directly.") endDiff = nowMillis - endMillis if TimeUnit.DAYS.convert(endDiff, TimeUnit.MILLISECONDS) < 1: raise Exception("HDFS Data only includes data up to yesterday. For (partial) data for today, use the HBase data directly.") dates = MyDateIterator() DateUtil.iterateByDay(startMillis, endMillis, dates) paths = [] for d in dates.get(): paths.append(hdfs_pathformat % (sdf_hdfs.format(Date(d)))) job.setInputFormatClass(MyInputFormat) FileInputFormat.setInputPaths(job, ",".join(paths));
def formatDate(self, date): dfSource = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss") dfTarget = SimpleDateFormat("dd/MM/yyyy") return dfTarget.format(dfSource.parse(date))
def transactionChange(sapUtils, mapDevcToOSH, siteOSH, SYSNAME, OSHVResult): mapTransportToOSH = HashMap() transactionChange = sapUtils.getTransactionChange() count = transactionChange.getRowCount() for row in range(count): # read all fields transaction = transactionChange.getCell(row, 0) devc = transactionChange.getCell(row, 1) objectName = transactionChange.getCell(row, 2) objectType = transactionChange.getCell(row, 3) objectDescription = transactionChange.getCell(row, 4) changeDescription = transactionChange.getCell(row, 5) date = transactionChange.getCell(row, 6) time = transactionChange.getCell(row, 7) user = transactionChange.getCell(row, 8) status = transactionChange.getCell(row, 9) changeRequest = transactionChange.getCell(row, 10) program = transactionChange.getCell(row, 12) screen = transactionChange.getCell(row, 13) programVersion = transactionChange.getCell(row, 14) targetSystem = transactionChange.getCell(row, 15) if logger.isDebugEnabled(): logger.debug('--------------------------------------------') logger.debug('changeDescription = ', changeDescription) logger.debug('objectType = ', objectType) logger.debug('objectName = ', objectName) logger.debug('objectDescription = ', objectDescription) logger.debug('date = ', date) logger.debug('time = ', time) logger.debug('user = '******'--------------------------------------------') sfDate = SimpleDateFormat('yyyy-MM-dd HH:mm:ss') dateObj = sfDate.parse(date + ' ' + time, ParsePosition(0)) if devc is not None: devcOSH = mapDevcToOSH.get(devc) if devcOSH != None: # In case the application components were filtered then we need to send only the relevant # application components for this transport addAppCompHierarchy(devcOSH, OSHVResult) transactionOSH = buildTransaction(transaction, devc, program, screen, programVersion, devcOSH, siteOSH, SYSNAME, OSHVResult) ticketStatus = '' # # L - Modifiable # D - Modifiable, Protected if status == 'L' or status == 'D': # (1) = Plan # (9) = Critical ticketStatus = 'In progress' else: # (2) = New change # (7) = Major ticketStatus = 'Closed' transportOSH = mapTransportToOSH.get(changeRequest) if transportOSH == None: transportOSH = buildTransport(changeRequest, dateObj, time, user, targetSystem, changeDescription, ticketStatus, siteOSH, OSHVResult) mapTransportToOSH.put(changeRequest, transportOSH) OSHVResult.add( modeling.createLinkOSH('contains', transactionOSH, transportOSH)) changeOSH = createTransportChange(transaction, objectType, objectName, transportOSH, OSHVResult) OSHVResult.add( modeling.createLinkOSH('use', changeOSH, transactionOSH)) else: logger.warn('can not find devclass OSH for [', devc, ']')
def transactionChange(sapUtils, mapDevcToOSH, siteOSH, SYSNAME, OSHVResult): mapTransportToOSH = HashMap() transactionChange = sapUtils.getTransactionChange() count = transactionChange.getRowCount() for row in range(count): # read all fields transaction = transactionChange.getCell(row,0) devc = transactionChange.getCell(row,1) objectName = transactionChange.getCell(row,2) objectType = transactionChange.getCell(row,3) objectDescription = transactionChange.getCell(row,4) changeDescription = transactionChange.getCell(row,5) date = transactionChange.getCell(row,6) time = transactionChange.getCell(row,7) user = transactionChange.getCell(row,8) status = transactionChange.getCell(row,9) changeRequest = transactionChange.getCell(row,10) program = transactionChange.getCell(row,12) screen = transactionChange.getCell(row,13) programVersion = transactionChange.getCell(row,14) targetSystem = transactionChange.getCell(row,15) if logger.isDebugEnabled(): logger.debug('--------------------------------------------') logger.debug('changeDescription = ', changeDescription) logger.debug('objectType = ', objectType) logger.debug('objectName = ', objectName) logger.debug('objectDescription = ', objectDescription) logger.debug('date = ', date) logger.debug('time = ', time) logger.debug('user = '******'--------------------------------------------') sfDate = SimpleDateFormat('yyyy-MM-dd HH:mm:ss') dateObj = sfDate.parse(date + ' ' + time,ParsePosition(0)) if devc is not None: devcOSH = mapDevcToOSH.get(devc) if devcOSH != None: # In case the application components were filtered then we need to send only the relevant # application components for this transport addAppCompHierarchy(devcOSH, OSHVResult) transactionOSH = buildTransaction(transaction, devc, program, screen, programVersion, devcOSH, siteOSH, SYSNAME, OSHVResult) ticketStatus = '' # # L - Modifiable # D - Modifiable, Protected if status == 'L' or status == 'D': # (1) = Plan # (9) = Critical ticketStatus = 'In progress' else: # (2) = New change # (7) = Major ticketStatus = 'Closed' transportOSH = mapTransportToOSH.get(changeRequest) if transportOSH == None: transportOSH = buildTransport(changeRequest,dateObj,time,user,targetSystem,changeDescription,ticketStatus, siteOSH, OSHVResult) mapTransportToOSH.put(changeRequest,transportOSH) OSHVResult.add(modeling.createLinkOSH('contains',transactionOSH,transportOSH)); changeOSH = createTransportChange(transaction,objectType,objectName,transportOSH, OSHVResult) OSHVResult.add(modeling.createLinkOSH('use',changeOSH,transactionOSH)); else: logger.warn('can not find devclass OSH for [', devc, ']')
class ChartData: def __init__(self): pass def __activate__(self, context): self.auth = context["page"].authentication self.response = context["response"] self.request = context["request"] self.Services = context["Services"] self.dateFormatter = SimpleDateFormat("d/M/yyyy") self.errorMsg = "" if (self.auth.is_logged_in()): if (self.auth.is_admin()==True): self.chartName = context["formData"].get("chartName") self.buildBarChart(context) else: self.errorMsg = "Requires Admin / Librarian / Reviewer access." else: self.errorMsg = "Please login." if (self.errorMsg!=""): self.response.setStatus(404) writer = self.response.getPrintWriter("text/plain; charset=UTF-8") writer.println(self.errorMsg) writer.close() def getErrorMsg(self): return self.errorMsg def buildBarChart(self, context): barChartData = None self.systemConfig = context["systemConfig"] self.imgW = 550 self.imgH = 400 if (self.request.getParameter("w") is not None): self.imgW = Integer.valueOf(self.request.getParameter("w")) if (self.request.getParameter("h") is not None): self.imgH = Integer.valueOf(self.request.getParameter("h")) self.fromDtTxt = self.request.getParameter("from") self.toDtTxt = self.request.getParameter("to") if (self.fromDtTxt is None or self.toDtTxt is None): self.errorMsg = "Invalid date range." return self.fromDt = self.dateFormatter.parse(self.fromDtTxt) self.toDt = self.dateFormatter.parse(self.toDtTxt) if (self.fromDt.after(self.toDt)): self.errorMsg = "Invalid date range." return self.out = self.response.getOutputStream("image/png") chartHandlerConfig =self.systemConfig.getObject("charts").get(self.chartName) className = chartHandlerConfig.get("className") chartHandlerClass = Class.forName(className) chartHandlerObject = chartHandlerClass.newInstance() setSystemConfigMethod = chartHandlerClass.getMethod("setSystemConfig", self.get_class("com.googlecode.fascinator.common.JsonSimple")) setSystemConfigMethod.invoke(chartHandlerObject, self.systemConfig) setScriptingServiceMethod = chartHandlerClass.getMethod("setScriptingServices", self.get_class("com.googlecode.fascinator.portal.services.ScriptingServices")) setScriptingServiceMethod.invoke(chartHandlerObject, context['Services']) setFromDateMethod = chartHandlerClass.getMethod("setFromDate", self.get_class("java.util.Date")) setFromDateMethod.invoke(chartHandlerObject, self.fromDt) setFromDateMethod = chartHandlerClass.getMethod("setToDate", self.get_class("java.util.Date")) setFromDateMethod.invoke(chartHandlerObject, self.toDt) renderChartMethod = chartHandlerClass.getMethod("renderChart", self.get_class("java.io.OutputStream")) renderChartMethod.invoke(chartHandlerObject, self.out); self.out.close() # Standard Java Class forName seems to have issues at least with Interfaces. # This is an alternative method taken from http://stackoverflow.com/questions/452969/does-python-have-an-equivalent-to-java-class-forname def get_class(self, kls): parts = kls.split('.') module = ".".join(parts[:-1]) m = __import__( module ) for comp in parts[1:]: m = getattr(m, comp) return m
def cardDataSave(context, main=None, add=None, filterinfo=None, session=None, elementId=None, xformsdata=None): u'''Функция сохранения карточки редактирования содержимого справочника разрешений. ''' linesOfNumbersSeries = linesOfNumbersSeriesCursor(context) content = json.loads(xformsdata)["schema"]["numberSeries"] #raise Exception(xformsdata) sdf = SimpleDateFormat("yyyy-MM-dd") linesOfNumbersSeries.seriesId = content["@seriesId"] linesOfNumbersSeries.numberOfLine = int(content["@numberOfLine"]) if content["@startingDate"] == '': linesOfNumbersSeries.startingDate = sdf.parse( datetime.datetime.fromtimestamp(time.time()).strftime("%Y-%m-%d")) else: linesOfNumbersSeries.startingDate = sdf.parse(content["@startingDate"]) linesOfNumbersSeries.startingNumber = content["@startingNumber"] linesOfNumbersSeries.endingNumber = content["@endingNumber"] linesOfNumbersSeries.incrimentByNumber = content["@incrimentByNumber"] linesOfNumbersSeries.isOpened = content["@isOpened"] == "true" if content["@lastUsedDate"] <> '': linesOfNumbersSeries.lastUsedDate = sdf.parse(content["@lastUsedDate"]) linesOfNumbersSeries.prefix = content["@prefix"] linesOfNumbersSeries.postfix = content["@postfix"] linesOfNumbersSeries.isFixedLength = content["@isFixedLength"] == "true" if add == 'add' and linesOfNumbersSeries.canInsert( ) and linesOfNumbersSeries.canModify(): if content["@lastUsedNumber"] == '': linesOfNumbersSeries.lastUsedNumber = int( content["@startingNumber"]) else: linesOfNumbersSeries.lastUsedNumber = int( content["@lastUsedNumber"]) if not linesOfNumbersSeries.tryInsert(): linesOfNumbersSeriesOld = linesOfNumbersSeriesCursor(context) linesOfNumbersSeriesOld.get(content["@seriesId"], int(content["@numberOfLine"])) linesOfNumbersSeries.recversion = linesOfNumbersSeriesOld.recversion linesOfNumbersSeries.update() elif add == 'add' and linesOfNumbersSeries.canInsert(): if content["@lastUsedNumber"] == '': linesOfNumbersSeries.lastUsedNumber = int( content["@startingNumber"]) else: linesOfNumbersSeries.lastUsedNumber = int( content["@lastUsedNumber"]) linesOfNumbersSeries.insert() elif add == 'edit' and linesOfNumbersSeries.canModify(): linesOfNumbersSeriesOld = linesOfNumbersSeriesCursor(context) linesOfNumberSeriesTest = linesOfNumbersSeriesCursor(context) gridContext = json.loads( session)['sessioncontext']['related']['gridContext'] gridContext = gridContext if isinstance(gridContext, list) else [gridContext] currentId = {} for gc in gridContext: if "currentRecordId" in gc.keys(): currentId[gc["@id"]] = gc["currentRecordId"] linesOfNumbersSeriesOld.get(currentId["numbersSeriesGrid"], int(currentId["linesNumbersSeriesGrid"])) if content["@lastUsedNumber"] == '': linesOfNumbersSeries.lastUsedNumber = linesOfNumbersSeriesOld.lastUsedNumber else: linesOfNumbersSeries.lastUsedNumber = int( content["@lastUsedNumber"]) if linesOfNumbersSeriesOld.numberOfLine == linesOfNumbersSeries.numberOfLine: #linesOfNumbersSeriesOld.seriesId==linesOfNumbersSeries.seriesId and \ linesOfNumbersSeries.recversion = linesOfNumbersSeriesOld.recversion linesOfNumbersSeries.update() elif linesOfNumberSeriesTest.tryGet( linesOfNumbersSeries.seriesId, int(linesOfNumbersSeries.numberOfLine)): context.error(u'Серия с данным номером уже существует.') elif linesOfNumbersSeries.canInsert(): linesOfNumbersSeriesOld.delete() linesOfNumbersSeries.insert() else: raise CelestaException(u"Недостаточно прав для данной операции!") else: raise CelestaException(u"Недостаточно прав для данной операции!")
def getActiveTransactions(self): activeTransactions = [] whereClauses = ArrayList() whereClauses.add("FUNCNAME IN ('SAPWL_TCODE_AGGREGATION','SAPWL_TCODE_AGGREGATION_COPY')"); result = self.executeQuery("TFDIR", whereClauses, "FUNCNAME")#@@CMD_PERMISION sap protocol execution functionName = None if result.next(): functionName = result.getString("FUNCNAME") if functionName == None: logger.warn('getActiveTransactions: active transaction function is not found') return activeTransactions day = self.__client.getProperty('from_date') if day == None: today = Date() sfDate = SimpleDateFormat("yyyyMMdd") day = sfDate.format(today) elif day.find('/') != -1: try: sfDate = SimpleDateFormat("MM/dd/yyyy") parsedDate = sfDate.parse(day) sfDate = SimpleDateFormat("yyyyMMdd") day = sfDate.format(parsedDate) except: logger.reportWarning('Failed to parse date ', day) logger.debug('Parsed start date:', day) logger.debug('Active transactions from data:', day) mapTransactionToUsers = None getUsers = Boolean.parseBoolean(self.__client.getProperty("get_users")) if getUsers: mapTransactionToUsers = HashMap() funcParams = HashMap() funcParams.put('READ_START_DATE', day) funcParams.put('READ_START_TIME', '000000') funcParams.put('READ_END_DATE', day) funcParams.put('READ_END_TIME', '235959') funcParams.put('READ_ONLY_MAINRECORDS', 'X') logger.debug('executing func:SAPWL_STATREC_FROM_REMOTE_SYS(', str(funcParams),')') fields = ArrayList() fields.add('TCODE') fields.add('ACCOUNT') usersResult = self.__client.executeFunction('SAPWL_STATREC_FROM_REMOTE_SYS', funcParams, 'NORMAL_RECORDS', fields) while usersResult.next(): transaction = usersResult.getString('TCODE') if len(transaction) > 0: user = usersResult.getString("ACCOUNT"); users = mapTransactionToUsers.get(transaction) if users == None: users = HashMap() mapTransactionToUsers.put(transaction,users) users.put(user,users); self.getSites() site = self.getSites().getCell(0,0) servers = self.getServers(site) numServers = servers.getRowCount() transactionToStats = HashMap() for j in range(numServers): try: instance = servers.getCell(j,0); logger.debug('getActiveTransactions:executing function[' + functionName + '] for instance [' + instance + ']') if functionName == 'SAPWL_TCODE_AGGREGATION_COPY': records = self.callSapwlTcodeAggregationCopy(instance,day) while records.next(): transaction = (str(records.getString(0))).strip() mapUsers = None if mapTransactionToUsers != None: mapUsers = mapTransactionToUsers.get(transaction) if (transaction != None) and (len(transaction) > 0): stats = transactionToStats.get(transaction) if stats == None: stats = TransactionStatistics(transaction) transactionToStats.put(transaction,stats) if mapUsers != None: stats.users = ArrayList(mapUsers.keySet()) if records.next(): stats.steps = stats.steps + int(float(records.getString(0))) if records.next(): stats.responseTime = stats.responseTime + int(float(records.getString(0))) if records.next(): stats.cpuTime = stats.cpuTime + int(float(records.getString(0))) if records.next(): stats.dbTime = stats.dbTime + int(float(records.getString(0))) if records.next(): stats.guiTime = stats.guiTime + int(float(records.getString(0))) if records.next(): stats.roundTrips = stats.roundTrips + int(float(records.getString(0))) if records.next(): stats.text = (str(records.getString(0))).strip() else: fields = ArrayList() fields.add('ENTRY_ID') fields.add('COUNT') fields.add('RESPTI') fields.add('CPUTI') fields.add('DBTIME') fields.add('GUITIME') fields.add('GUICNT') fields.add('TEXT') records = self.getApplicationStatistics(functionName, instance, day, fields) while records.next(): entryID = records.getString("ENTRY_ID"); transaction = self.getTransactionFromEntryID(entryID); mapUsers = None if mapTransactionToUsers != None: mapUsers = mapTransactionToUsers.get(transaction) if (transaction != None) and (len(transaction) > 0): stats = transactionToStats.get(transaction) if(stats == None): stats = TransactionStatistics(transaction) transactionToStats.put(transaction,stats) if(mapUsers != None): stats.users = ArrayList(mapUsers.keySet()) count = records.getString("COUNT") stats.steps = stats.steps + int(count) stats.responseTime = stats.responseTime + int(records.getString("RESPTI")) stats.cpuTime = stats.cpuTime + int(records.getString("CPUTI")) stats.dbTime = stats.dbTime + int(records.getString("DBTIME")) stats.guiTime = stats.guiTime + int(records.getString("GUITIME")) stats.roundTrips = stats.roundTrips + int(records.getString("GUICNT")) stats.text = records.getString("TEXT") except: msg = sys.exc_info()[1] strmsg = '%s' % msg if strmsg.find('NO_DATA_FOUND') != -1: logger.debug(strmsg) logger.reportWarning('No data found in the given time range') else: logger.debugException('Unexpected error getting transactions for function:' + str(functionName)) logger.reportWarning('Unexpected error getting transactions for function:' + str(functionName) + ':' + strmsg) transactions = ArrayList(transactionToStats.keySet()) logger.debug("getActiveTransactions: Found [" + str(transactions.size()) + "] active transactions") if logger.isDebugEnabled(): logger.debug("getActiveTransactions: transactions = " + str(transactions)) transactionsInfo = self.getTransactionsInfo(transactions) it = transactionToStats.values() for stats in it: prop = Properties() prop.setProperty('data_name', str(stats.transaction)) prop.setProperty('dialog_steps', str(stats.steps)) prop.setProperty('total_response_time', str(stats.responseTime)) prop.setProperty('average_response_time', str(stats.getAverageCPUTime())) prop.setProperty('total_cpu_time', str(stats.cpuTime)) prop.setProperty('average_cpu_time', str(stats.getAverageCPUTime())) prop.setProperty('round_trips', str(stats.roundTrips)) prop.setProperty('total_db_time', str(stats.dbTime)) prop.setProperty('average_db_time', str(stats.getAverageDBTime())) prop.setProperty('total_gui_time', str(stats.guiTime)) prop.setProperty('average_gui_time', str(stats.getAverageGUITime())) prop.setProperty('text', stats.text) prop.setProperty('saptransaction_averagedbtime', str(stats.users.size())) info = transactionsInfo.get(stats.transaction) if info != None: prop.setProperty('devclass', info.devclass) prop.setProperty('program', info.program) prop.setProperty('screen', info.screen) prop.setProperty('', info.screen) else: prop.setProperty('devclass', "") prop.setProperty('program', "") prop.setProperty('screen', "") prop.setProperty('version', "") activeTransactions.append(prop) return activeTransactions
def formatDate(self, date): dfSource = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss") dfTarget = SimpleDateFormat("dd/MM/yyyy") return dfTarget.format(dfSource.parse(date))
def makeTimeSeriesContainer(tsData, timeZone, pathname=None): ''' Construct a TimeSeriesContainer object from a python dictionary that was created from a single "time-series" returned from the CWMS RADAR web service ''' #---------------# # initial setup # #---------------# tsc = None try: tz = TimeZone.getTimeZone(timeZone) sdf8601 = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX") sdfHecTime = SimpleDateFormat("ddMMMyyyy, HH:mm") cal = Calendar.getInstance() for obj in sdf8601, sdfHecTime, cal: obj.setTimeZone(tz) ht = HecTime() times, values, qualities = [], [], [] #------------------# # process the data # #------------------# if tsData.has_key("regular-interval-values"): #----------------------------------------# # regular time series (a lot to process) # #----------------------------------------# rts = tsData["regular-interval-values"] intvlStr = rts["interval"] unit = rts["unit"].split()[0] if intvlStr.startswith("PT"): intvlNum, intvlUnit = int(intvlStr[2:-1]), intvlStr[-1] try: factor, field = { "M": (1, Calendar.MINUTE), "H": (60, Calendar.HOUR_OF_DAY) }[intvlUnit] except KeyError: raise Exception("Unexpected interval: %s" % intvlStr) else: intvlNum, intvlUnit = int(intvlStr[1:-1]), intvlStr[-1] try: factor, field = { "Y": (1440 * 365, Calendar.YEAR), "M": (1440 * 30, Calendar.MONTH), "D": (1440, Calendar.DATE) }[intvlUnit] except KeyError: raise Exception("Unexpected interval: %s" % intvlStr) intvl = intvlNum * factor segmentCount = rts["segment-count"] cal.setTimeInMillis( sdf8601.parse(rts["segments"][0]["first-time"]).getTime()) for i in range(segmentCount): for j in range(rts["segments"][i]["value-count"]): ht.set(sdfHecTime.format(cal.getTimeInMillis())) v, q = rts["segments"][i]["values"][j] times.append(ht.value()) values.append(v) qualities.append(q) cal.add(field, intvlNum) if i < segmentCount - 1: nextBegin = sdf8601.parse( rts["segments"][i + 1]["first-time"]).getTime() time = cal.getTimeInMillis() while time < nextBegin: ht.set(sdfHecTime.format(time)) times.append(ht.value()) values.append(Constants.UNDEFINED) qualities.append(0) cal.add(field, intvlNum) time = cal.getTimeInMillis() elif tsData.has_key("irregular-interval-values"): #------------------------------# # irregular time series (easy) # #------------------------------# its = tsData["irregular-interval-values"] unit = its["unit"].split()[0] intvl = 0 for t, v, q in its["values"]: ht.set(sdfHecTime.format(sdf8601.parse(t))) times.append(ht.value()) values.append(v) qualities.append(q) else: raise Exception("Time series has no values") #--------------------------------------------------# # code common to regular and irregular time series # #--------------------------------------------------# tsc = TimeSeriesContainer() tsc.times = times tsc.values = values tsc.quality = qualities tsc.numberValues = len(times) tsc.startTime = times[0] tsc.endTime = times[-1] tsc.interval = intvl tsc.units = unit tsc.timeZoneID = timeZone tsc.timeZoneRawOffset = tz.getRawOffset() name = tsData["name"] loc, param, paramType, intv, dur, ver = name.split(".") if pathname: #---------------------------# # use pathname if specified # #---------------------------# A, B, C, D, E, F = 1, 2, 3, 4, 5, 6 parts = pathname.split("/") parts[D] = '' tsc.fullName = "/".join(parts) tsc.watershed = parts[A] try: tsc.location, tsc.subLocation = parts[B].split("-", 1) except: tsc.location = parts[B] try: tsc.parameter, tsc.subParameter = parts[C].split("-", 1) except: tsc.parameter = parts[C] try: tsc.version, tsc.subVersion = parts[F].split("-", 1) except: tsc.version = parts[F] else: #--------------------------------------# # no pathname, use CWMS time series id # #--------------------------------------# try: tsc.location, tsc.subLocation = loc.split("-", 1) except: tsc.location = loc try: tsc.parameter, tsc.subParameter = param.split("-", 1) except: tsc.parameter = param try: tsc.version, tsc.subVersion = ver.split("-", 1) except: tsc.version = ver tsc.type = { "Total": "PER-CUM", "Max": "PER-MAX", "Min": "PER-MIN", "Const": "INST-VAL", "Ave": "PER-AVER", "Inst": ("INST-VAL", "INST-CUM")[param.startswith("Precip")] }[paramType] except: output(traceback.format_exc()) return tsc
from java.util import Date from java.text import ParseException from java.text import SimpleDateFormat from sets import Set from java.util import TimeZone formatter = SimpleDateFormat( snFormat ) xlrFormat = SimpleDateFormat( xlFormat ) formatter.setTimeZone(TimeZone.getTimeZone(snTimeZone)) startDate = snData[startField] print "------" try: print "Schedule Task=> date = %s" % (startDate) date = formatter.parse(startDate) print "Schedule Task=> date = %s" % (date) release = getCurrentRelease() releaseID = release.id phaseTitle=targetPhase taskTitle=targetTask print "Schedule Task=> Phase / Task = %s / %s" % ( phaseTitle, taskTitle ) phase = phaseApi.searchPhasesByTitle( phaseTitle, releaseID ) print "Schedule Task=> phase = %s" % ( phase ) phaseID = phase[0].id task = taskApi.searchTasksByTitle( taskTitle, phaseTitle, releaseID) print "Schedule Task=> task = %s" % ( task ) taskID = task[0].id myTask = taskApi.getTask( taskID ) myTask.waitForScheduledStartDate = True myTask.scheduledStartDate = xlrFormat.format(date)
class DashboardData: def __init__(self): pass def __activate__(self, context): self.auth = context["page"].authentication self.errorMsg = "" self.request = context["request"] self.response = context["response"] self.fromDtTxt = self.request.getParameter("from") self.toDtTxt = self.request.getParameter("to") self.reportName = self.request.getParameter("reportName") self.dateFormatter = SimpleDateFormat("d/M/yyyy") self.systemConfig = context["systemConfig"] if (self.fromDtTxt is None or self.toDtTxt is None): curCal = Calendar.getInstance() self.fromDtTxt = "1/1/%s" % curCal.get(Calendar.YEAR) self.toDtTxt = "%s/%s/%s" % (curCal.get(Calendar.DAY_OF_MONTH), curCal.get(Calendar.MONTH)+1,curCal.get(Calendar.YEAR)) if (self.reportName is None): self.reportName = "Dashboard Report" if (self.auth.is_logged_in()): if (self.auth.is_admin()==True): self.action = self.request.getParameter("action") if self.action == "export": self.exportDashboard(context) else: self.buildDashboard(context) else: self.errorMsg = "Requires Admin / Librarian / Reviewer access." else: self.errorMsg = "Please login." def exportDashboard(self, context): format = self.request.getParameter("format") reportName = self.request.getParameter("reportName") self.fromDtTxt = self.request.getParameter("from") self.toDtTxt = self.request.getParameter("to") if (self.fromDtTxt is None or self.toDtTxt is None): self.errorMsg = "Invalid date range." return self.fromDt = self.dateFormatter.parse(self.fromDtTxt) self.toDt = self.dateFormatter.parse(self.toDtTxt) if (self.fromDt.after(self.toDt)): self.errorMsg = "Invalid date range." return if format == "csv": fileName = self.urlEncode(reportName) self.response.setHeader("Content-Disposition", "attachment; filename=%s-%s-%s.csv" % (fileName, self.fromDtTxt, self.toDtTxt)) writer = self.response.getPrintWriter("text/csv; charset=UTF-8") charts = self.systemConfig.getJsonSimpleMap("charts") chartNames = ["records-by-stage-1", "records-by-stage-2", "records-by-month-1", "records-by-month-2"] for chartName in chartNames: chartHandlerConfig = charts.get(chartName) className = chartHandlerConfig.getString("", "className") chartHandlerClass = Class.forName(className) chartHandlerObject = chartHandlerClass.newInstance() setSystemConfigMethod = chartHandlerClass.getMethod("setSystemConfig", self.get_class("com.googlecode.fascinator.common.JsonSimple")) setSystemConfigMethod.invoke(chartHandlerObject, self.systemConfig) setScriptingServiceMethod = chartHandlerClass.getMethod("setScriptingServices", self.get_class("com.googlecode.fascinator.portal.services.ScriptingServices")) setScriptingServiceMethod.invoke(chartHandlerObject, context['Services']) setFromDateMethod = chartHandlerClass.getMethod("setFromDate", self.get_class("java.util.Date")) setFromDateMethod.invoke(chartHandlerObject, self.fromDt) setFromDateMethod = chartHandlerClass.getMethod("setToDate", self.get_class("java.util.Date")) setFromDateMethod.invoke(chartHandlerObject, self.toDt) renderChartMethod = chartHandlerClass.getMethod("renderCsv", self.get_class("java.io.Writer"), self.get_class("java.lang.String") ) renderChartMethod.invoke(chartHandlerObject, writer, chartName); writer.close() def getErrorMsg(self): return self.errorMsg def buildDashboard(self, context): self.velocityContext = context def getFromDt(self): return self.fromDtTxt def getToDt(self): return self.toDtTxt def getDateRange(self): return "from="+ self.getFromDt() + "&to=" + self.getToDt() def getReportName(self): return self.reportName # Standard Java Class forName seems to have issues at least with Interfaces. # This is an alternative method taken from http://stackoverflow.com/questions/452969/does-python-have-an-equivalent-to-java-class-forname def get_class(self, kls): parts = kls.split('.') module = ".".join(parts[:-1]) m = __import__( module ) for comp in parts[1:]: m = getattr(m, comp) return m def urlEncode(self, text): return URLEncoder.encode(text, "utf-8")