def show_action_edit(self):
     now = Date()
     sdf = SimpleDateFormat("yyyy")
     year = int(sdf.format(now))
     sdf = SimpleDateFormat("M")
     month = int(sdf.format(now))
     sdf = SimpleDateFormat("d")
     day = int(sdf.format(now))
     request.setAttribute("default_year", year)
     request.setAttribute("default_month", month)
     request.setAttribute("default_day", day)
     return "/WEB-INF/ftl/course/show_preparecourse_action_create.ftl"
Beispiel #2
0
    def __newDoc(self):
        self.oid = self.object.getId()
        self.pid = self.payload.getId()
        metadataPid = self.params.getProperty("metaPid", "DC")

        self.utils.add(self.index, "storage_id", self.oid)
        if self.pid == metadataPid:
            self.itemType = "object"
        else:
            self.oid += "/" + self.pid
            self.itemType = "datastream"
            self.utils.add(self.index, "identifier", self.pid)

        self.utils.add(self.index, "id", self.oid)
        self.utils.add(self.index, "item_type", self.itemType)
        self.utils.add(self.index, "last_modified", self.last_modified)
        self.utils.add(self.index, "harvest_config", self.params.getProperty("jsonConfigOid"))
        self.utils.add(self.index, "harvest_rules",  self.params.getProperty("rulesOid"))

        self.item_security = []
        self.owner = self.params.getProperty("owner", "guest")
        formatter = SimpleDateFormat('yyyyMMddHHmmss')
        self.params.setProperty("last_modified", formatter.format(Date()))        
        self.utils.add(self.index, "date_object_created", self.params.getProperty("date_object_created"))
        self.params.setProperty("date_object_modified", time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime()) )
        self.utils.add(self.index, "date_object_modified",  self.params.getProperty("date_object_modified"))
Beispiel #3
0
 def change_view(self):
     self.vlayout.removeAllViews()
     self.vlayout.addView(self.text_result)
     calendar = Calendar.getInstance()
     dateformat = SimpleDateFormat('yyyy / MM / dd')
     now = dateformat.format(calendar.getTime())
     self.text_result.setText(now)
Beispiel #4
0
    def __newDoc(self):
        self.oid = self.object.getId()
        self.pid = self.payload.getId()
        metadataPid = self.params.getProperty("metaPid", "DC")

        self.utils.add(self.index, "storage_id", self.oid)
        if self.pid == metadataPid:
            self.itemType = "object"
        else:
            self.oid += "/" + self.pid
            self.itemType = "datastream"
            self.utils.add(self.index, "identifier", self.pid)

        self.utils.add(self.index, "id", self.oid)
        self.utils.add(self.index, "item_type", self.itemType)
        self.utils.add(self.index, "last_modified", self.last_modified)
        self.utils.add(self.index, "harvest_config",
                       self.params.getProperty("jsonConfigOid"))
        self.utils.add(self.index, "harvest_rules",
                       self.params.getProperty("rulesOid"))

        self.item_security = []
        self.owner = self.params.getProperty("owner", "guest")
        formatter = SimpleDateFormat('yyyyMMddHHmmss')
        self.params.setProperty("last_modified", formatter.format(Date()))
        self.utils.add(self.index, "date_object_created",
                       self.params.getProperty("date_object_created"))
        self.params.setProperty(
            "date_object_modified",
            time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime()))
        self.utils.add(self.index, "date_object_modified",
                       self.params.getProperty("date_object_modified"))
Beispiel #5
0
    def process_record(self):
        content = {}
        self.set_from_task_vars('u_request', content)
        self.set_from_task_vars('u_application_name', content)
        self.set_from_task_vars('cmdb_ci', content)
        self.set_from_task_vars('priority', content)
        self.set_from_task_vars('state', content)
        self.set_from_task_vars('assignment_group', content)
        self.set_from_task_vars('assigned_to', content)
        self.set_from_task_vars('due_date', content)
        self.set_from_task_vars('short_description', content)
        self.set_from_task_vars('description', content)

        # Dates need to be converted
        sdf = SimpleDateFormat("MM-dd-yyyy HH:mm:ss")
        content['due_date'] = sdf.format(self.task_vars['due_date'])

        # Also sending release info.
        content['x_xlbv_xl_release_identifier'] = str(release.id)
        content['x_xlbv_xl_release_state'] = str(release.status)

        logger.debug('process_record : send create_record request...')
        logger.debug('process_record : table name: ' + self.table_name)
        logger.debug('process_record : content... ')
        logger.debug(content)
        response = self.sn_client.create_record(self.table_name, content,
                                                getCurrentTask().getId())
        logger.debug('process_record : response...')
        logger.debug(response)

        return response
def flowFileDates(hmsRunName):
    #will not be accurate if there is more than one set of data--specify start date in config file?
    """Find the dates of any FLOW files in the DSS catalog"""
    from java.text import SimpleDateFormat
    dateFormat = SimpleDateFormat("ddMMMyyyy")
    print("Getting dates from " + hmsRunName + "...")

    dates = []
    #print(dss.getCatalogedPathnames())
    flowFiles = filter(
        lambda f:
        ((f.split('/')[3] == 'FLOW') and (f.split('/')[6] ==
                                          ('RUN:' + hmsRunName.upper()))),
        dss.getCatalogedPathnames())
    #print(flowFiles)
    candidateDates = map(lambda x: x.split('/')[4], flowFiles)
    #print(candidateDates)

    for d in candidateDates:
        if d[0:2].isdigit() and d[2:5].isalpha() and d[5:9].isdigit():
            date = dateFormat.parse(d)
            dateAlreadyFound = any(lambda x: x.equals(date), dates)
            if not dateAlreadyFound:
                dates.append(date)

    dates.sort(lambda a, b: a.compareTo(b))
    return map(lambda d: dateFormat.format(d).upper(), dates)
Beispiel #7
0
    def __init__(self, selectFields):
        BaseQuery.__init__(self, selectFields)
        self.params = ParamUtil(request)
        self.prepareCourseId = None
        self.createUserId = None
        self.k = self.params.getStringParam("k")  #查询关键字
        self.ktype = self.params.getStringParam("ktype")  #查询类型[关键字对应的类型]
        if self.ktype == None:
            self.ktype = "1"  #默认关键字查询标题
        self.unit = self.params.getStringParam("unit")  #主备人所属机构
        self.course_BeginDate = self.params.getStringParam("course_BeginDate")
        self.course_EndDate = self.params.getStringParam("course_EndDate")
        self.subjectId = self.params.getIntParamZeroAsNull("subjectId")
        self.gradeId = self.params.getIntParamZeroAsNull("gradeId")
        request.setAttribute("subjectId", self.subjectId)
        request.setAttribute("gradeId", self.gradeId)
        request.setAttribute("k", self.k)
        request.setAttribute("ktype", self.ktype)
        request.setAttribute("unit", self.unit)
        request.setAttribute("course_BeginDate", self.course_BeginDate)
        request.setAttribute("course_EndDate", self.course_EndDate)
        self.orderType = 0
        self.status = None
        self.stage = None  #集备执行的阶段,正在进行running;已经完成finishaed;还未进行will ;recommend 推荐的
        self.containChild = None  #准确学科的查询
        self.prepareCoursePlanId = None
        self.prepareCourseGenerated = True
        self.custormAndWhere = None  #自定义条件查询

        sft = SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
        self.nowDate = sft.format(Date())
Beispiel #8
0
    def create_sale(self, sale):
        values = ContentValues()
        values.put("person", sale['person'].lower())
        values.put("total", sale['value'])
        values.put("description", sale['description'].replace('\n',
                                                              ' ').lower())
        values.put("paid", sale['paid'])

        calendar = Calendar.getInstance()
        dateformat = SimpleDateFormat('yyyy/MM/dd HH:mm')
        now = dateformat.format(calendar.getTime())
        values.put("date", now)

        db = self.getWritableDatabase()
        db.insertWithOnConflict("sale", None, values,
                                SQLiteDatabase.CONFLICT_REPLACE)

        #remove quantity from products
        products = sale['description'].split('\n')

        for product in products:
            name, quantity = product.split(':')
            quantity = int(quantity)
            db.execSQL(
                "UPDATE product SET quantity = quantity - %d WHERE name='%s'" %
                (quantity, name))
        db.close()
Beispiel #9
0
def getVigentStretchesQuery(store, fecha):
    #query = getCarreterasManager().getVigentStretchesQuery(store, fecha)
    dateFormatter = SimpleDateFormat("dd/MM/yyyy")
    formatedDate = dateFormatter.format(fecha)
    filtro = "( fecha_entrada <= '%s' OR fecha_entrada IS NULL) AND ('%s' <= fecha_salida OR fecha_salida IS NULL)" % (
        formatedDate, formatedDate)
    query = store.createFeatureQuery()
    query.addFilter(filtro)
    return query
Beispiel #10
0
def getDate(self):
    calendar = GregorianCalendar()
    date = calendar.getTime()
    dayFormat = SimpleDateFormat("dd")
    monthFormat = SimpleDateFormat("MM")
    yearFormat = SimpleDateFormat("yyyy")
    DAY = int(dayFormat.format(date))
    MONTH = int(monthFormat.format(date))
    YEAR = int(yearFormat.format(date))
    if MONTH < 10:
        TEMP1 = "%d0%d" % (YEAR, MONTH)
    else:
        TEMP1 = "%d%d" % (YEAR, MONTH)
    if DAY < 10:
        CURRENTDATE = "%d0%d" % (TEMP1, DAY)
    else:
        CURRENTDATE = "%d%d" % (TEMP1, DAY)
    return CURRENTDATE
Beispiel #11
0
def getDate(self):
    calendar = GregorianCalendar()
    date = calendar.getTime()
    dayFormat = SimpleDateFormat("dd")
    monthFormat = SimpleDateFormat("MM")
    yearFormat = SimpleDateFormat("yyyy")
    DAY = int(dayFormat.format(date))
    MONTH = int(monthFormat.format(date))
    YEAR = int(yearFormat.format(date))
    if MONTH < 10:
        TEMP1 = "%d0%d" % (YEAR, MONTH)
    else:
        TEMP1 = "%d%d" % (YEAR, MONTH)
    if DAY < 10:
        CURRENTDATE = "%d0%d" % (TEMP1, DAY)
    else:
        CURRENTDATE = "%d%d" % (TEMP1, DAY)
    return CURRENTDATE
 def __init__(self, selectFields):
     BaseQuery.__init__(self, selectFields)
     self.params = ParamUtil(request)
     self.prepareCourseId = None
     self.userId = None
     self.privateContentExist = None  #是否只显示有内容的个案 默认None,全部显示 True,查询有内容的,False 查询无内容的
     self.stage = None  #集备执行的阶段,正在进行running;已经完成finishaed;还未进行will
     sft = SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
     self.nowDate = sft.format(Date())
Beispiel #13
0
def getSqlDateInGMT(date):
    """
        @type: long->str
        @param: date - timestamp
    """
    dateObj = Date(date)
    df = SimpleDateFormat()
    df.setTimeZone(TimeZone.getTimeZone("GMT"))
    # Default Java Date.toString() format
    df.applyPattern('EEE MMM dd HH:mm:ss zzz yyyy')
    return df.format(dateObj)
def setupjob(job, args):
    """
    Set up a job to run on a date range of directories.

    Jobs expect two arguments, startdate and enddate, both in yyyy-MM-dd format.
    """

    import java.text.SimpleDateFormat as SimpleDateFormat
    import java.util.Date as Date
    import java.util.Calendar as Calendar
    import com.mozilla.util.DateUtil as DateUtil
    import com.mozilla.util.DateIterator as DateIterator
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat as FileInputFormat
    import org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat as MyInputFormat

    if len(args) != 3:
        raise Exception(
            "Usage: <testpilot_study> <startdate-YYYY-MM-DD> <enddate-YYYY-MM-DD>"
        )

    # use to collect up each date in the given range
    class MyDateIterator(DateIterator):
        def __init__(self):
            self._list = []

        def get(self):
            return self._list

        def see(self, aTime):
            self._list.append(aTime)

    sdf = SimpleDateFormat(dateformat)
    study = args[0]
    startdate = Calendar.getInstance()
    startdate.setTime(sdf.parse(args[1]))

    enddate = Calendar.getInstance()
    enddate.setTime(sdf.parse(args[2]))

    dates = MyDateIterator()

    DateUtil.iterateByDay(startdate.getTimeInMillis(),
                          enddate.getTimeInMillis(), dates)

    paths = []
    for d in dates.get():
        paths.append(pathformat % (study, sdf.format(Date(d))))

    job.setInputFormatClass(MyInputFormat)
    FileInputFormat.setInputPaths(job, ",".join(paths))
    job.getConfiguration().set("org.mozilla.jydoop.mappertype", "TEXT")
Beispiel #15
0
    def __init__(self, selectFields):
        BaseQuery.__init__(self, selectFields)
        self.params = ParamUtil(request)
        self.orderType = 0
        self.createUserId = None
        self.ownerType = None
        self.ownerId = None
        self.status = None
        self.qryDate = None
        self.k = self.params.getStringParam("k")
        self.filter = self.params.getStringParam("filter")

        sft = SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
        self.nowDate = sft.format(Date())
	def actionPerformed(self,actionEvent):
		self.scl_long_tuneup_controller.getMessageTextField().setText("")	
		rightNow = Calendar.getInstance()
		date_format = SimpleDateFormat("MM.dd.yyyy")
		time_str = date_format.format(rightNow.getTime())				
		fc = JFileChooser(constants_lib.const_path_dict["XAL_XML_ACC_FILES_DIRS_PATH"])
		fc.setDialogTitle("Save SCL data into the SCL_new.xdxf file")
		fc.setApproveButtonText("Save")
		fl_filter = FileNameExtensionFilter("SCL Acc File",["xdxf",])
		fc.setFileFilter(fl_filter)
		fc.setSelectedFile(File("SCL_"+time_str+".xdxf"))		
		returnVal = fc.showOpenDialog(self.scl_long_tuneup_controller.linac_wizard_document.linac_wizard_window.frame)
		if(returnVal == JFileChooser.APPROVE_OPTION):
			fl_out = fc.getSelectedFile()
			fl_path = fl_out.getPath()
			if(fl_path.rfind(".xdxf") != (len(fl_path) - 5)):
				fl_out = File(fl_out.getPath()+".xdxf")	
			#---------prepare the XmlDataAdaptor 
			root_DA = XmlDataAdaptor.newEmptyDocumentAdaptor()
			scl_DA = root_DA.createChild("xdxf")	
			scl_DA.setValue("date",time_str)
			scl_DA.setValue("system","sns")
			scl_DA.setValue("version","2.0")
			#---- SCLMed	
			seq_name_arr = ["SCLMed","SCLHigh","HEBT1"]
			for seq_name in seq_name_arr:
				accl = self.scl_long_tuneup_controller.linac_wizard_document.accl
				seq = accl.findSequence(seq_name)
				cavs = seq.getAllNodesWithQualifier(AndTypeQualifier().and((OrTypeQualifier()).or(SCLCavity.s_strType)))
				quads = seq.getAllNodesWithQualifier(AndTypeQualifier().and((OrTypeQualifier()).or(Quadrupole.s_strType)))
				scl_seq_DA = scl_DA.createChild("sequence")
				scl_seq_DA.setValue("id",seq.getId())
				for quad in quads:
					node_DA = scl_seq_DA.createChild("node")
					node_DA.setValue("id",quad.getId())
					attr_DA = node_DA.createChild("attributes")
					field_DA = attr_DA.createChild("magnet")
					scl_quad_fields_dict_holder = self.scl_long_tuneup_controller.scl_long_tuneup_init_controller.scl_quad_fields_dict_holder
					field_DA.setValue("dfltMagFld",str(scl_quad_fields_dict_holder.quad_field_dict[quad]))
				for cav in cavs:
					node_DA = scl_seq_DA.createChild("sequence")
					node_DA.setValue("id",cav.getId())
					attr_DA = node_DA.createChild("attributes")
					rf_cav_DA = attr_DA.createChild("rfcavity")
					cav_wrappper = self.scl_long_tuneup_controller.getCav_WrapperForCavId(cav.getId())
					(amp,phase) =  (cav_wrappper.designAmp,cav_wrappper.designPhase)
					rf_cav_DA.setValue("amp",float("%8.5f"%amp))
					rf_cav_DA.setValue("phase",float("%8.3f"%phase))
			root_DA.writeTo(fl_out)		
Beispiel #17
0
 def execute(self):
     self.parentGuid=self.params.safeGetStringParam("parentGuid")
     self.parentType=self.params.safeGetStringParam("parentType")
     self.year = self.params.safeGetIntParam("year")
     self.month = self.params.safeGetIntParam("month")
     self.day = self.params.safeGetIntParam("day")
     if self.year==0:
         now=Date()
         sdf = SimpleDateFormat("yyyy")
         self.year= sdf.format(now)
         sdf = SimpleDateFormat("M")
         self.month= sdf.format(now)
         sdf = SimpleDateFormat("d")
         self.day= sdf.format(now)
     
     self.calendars = self.calendar_svc.getCalendars(self.parentGuid,self.parentType,int(self.year),int(self.month))
     
     request.setAttribute("parentGuid",self.parentGuid)
     request.setAttribute("parentType",self.parentType)
     request.setAttribute("calendars",self.calendars)
     request.setAttribute("year",self.year)
     request.setAttribute("month",self.month)        
     request.setAttribute("day",self.day)
     return "/WEB-INF/mod/calendarevent/GetCalendarMsg.ftl"     
Beispiel #18
0
def dateformat(t, format, language=None):
    """
    Format python date to string using Java SimpleDateFormat.
    
    :param t: Python date.
    
    :returns: Format string of the date
    """
    jt = jdate(t)
    if language is None:
        df = SimpleDateFormat(format)
    else:
        locale = Locale(language)
        df = SimpleDateFormat(format, locale)
    return df.format(jt)
Beispiel #19
0
 def __init__(self, imp):
     '''Get the metadata from the given dm3 image.
     '''
     extractor = GatanMetadataExtractor(imp)
     self.exposure = extractor.getExposure()
     self.magnification = extractor.getMagnification()
     self.mag_factor = extractor.getActualMagnification() / self.magnification
     self.mag_unit = 'x'
     if not Double.isNaN(extractor.getEnergyloss()):
         self.energyloss = extractor.getEnergyloss()
     else:
         self.energyloss = 0
     self.date = extractor.getDateAndTime()
     date_formater = SimpleDateFormat('yyyyMMdd')
     self.date_string = date_formater.format(self.date)
     self.name = extractor.getName()
     self.prop_dict = {}
Beispiel #20
0
def setupjob(job, args):
    """
    Set up a job to run on a date range of directories.

    Jobs expect two arguments, startdate and enddate, both in yyyy-MM-dd format.
    """

    import java.text.SimpleDateFormat as SimpleDateFormat
    import java.util.Date as Date
    import java.util.Calendar as Calendar
    import com.mozilla.util.DateUtil as DateUtil
    import com.mozilla.util.DateIterator as DateIterator
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat as FileInputFormat
    import org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat as MyInputFormat

    if len(args) != 3:
        raise Exception("Usage: <testpilot_study> <startdate-YYYY-MM-DD> <enddate-YYYY-MM-DD>")

    # use to collect up each date in the given range
    class MyDateIterator(DateIterator):
       def __init__(self):
          self._list = []
       def get(self):
          return self._list
       def see(self, aTime):
          self._list.append(aTime)

    sdf = SimpleDateFormat(dateformat)
    study = args[0]
    startdate = Calendar.getInstance()
    startdate.setTime(sdf.parse(args[1]))

    enddate = Calendar.getInstance()
    enddate.setTime(sdf.parse(args[2]))

    dates = MyDateIterator()

    DateUtil.iterateByDay(startdate.getTimeInMillis(), enddate.getTimeInMillis(), dates)

    paths = []
    for d in dates.get():
       paths.append(pathformat % (study, sdf.format(Date(d))))

    job.setInputFormatClass(MyInputFormat)
    FileInputFormat.setInputPaths(job, ",".join(paths));
    job.getConfiguration().set("org.mozilla.jydoop.mappertype", "TEXT")
Beispiel #21
0
 def createFile(self):
     if (os.path.exists(self.datafilename) == 0):
         fid = open(self.datafilename, 'w')
         df = SimpleDateFormat('hh.mm.dd.MM.yyyy')
         today = df.format(Date())
         print "Writing data to file:" + self.datafilename
         print "Writing mca file to:" + self.mcadir
         # write datetime
         line = " I18_EXAFS_RUN=" + str(self.fileno) + " " + today
         print >> fid, line
         print >> fid, self.title
         print >> fid, self.condition1
         print >> fid, self.condition2
         print >> fid, self.condition3
         print >> fid, 'Sample X=', MicroFocusSampleX.getPosition(
         ), 'Sample Y=', MicroFocusSampleY.getPosition()
         print >> fid, 'comboDCM energy time I0 It drain flu1 flu2 flu3 flu4 flu5 flu6 flu7 flu8 flu9 flutot'
         fid.close()
Beispiel #22
0
def savePreviousArguments(managedServerName):
    from java.io import File
    from java.io import FileOutputStream
    from java.util import Properties
    from java.util import Date
    from java.text import SimpleDateFormat

    import string
    startToEdit()
    # parameter on the wsdl ant task call
    fileLocation = sys.argv[1].replace("\\", "/")
    print "The backup file location is"
    print fileLocation
    try:
        dateFormat = SimpleDateFormat('_d_MMM_yyyy_HH_mm_ss')
        date = Date()
        formattedDate = dateFormat.format(date)
        print formattedDate
    except:
        print "The date cannot be created/formatted"

    try:
        propsFile = File(fileLocation + managedServerName + formattedDate +
                         "_config.bkp")
        print propsFile.exists()
        if (propsFile.exists() == 0):
            propsFile.createNewFile()
    except:
        print "The file cannot be created on:"
        print propsFile.getAbsoluteFile()
        dumpStack()

    previousProperties = Properties()
    print '===> Saving the  previous arguments - ' + managedServerName
    cd('/Servers/' + managedServerName)
    print "Getting the Classpath"
    classPath = cmo.getServerStart().getClassPath()
    print classPath
    if classPath == None:
        classPath = ""
    previousProperties.setProperty("classPath", classPath)
    print "Saving Arguments to file"
    previousProperties.store(FileOutputStream(propsFile), None)
    print '===> Saved arguments! Please verify the file on:' + fileLocation + "in" + managedServerName
def savePreviousArguments(managedServerName):
    from java.io import File
    from java.io import FileOutputStream
    from java.util import Properties
    from java.util import Date
    from java.text import SimpleDateFormat
    
    import string
    startToEdit()
    # parameter on the wsdl ant task call
    fileLocation = sys.argv[1].replace("\\","/")
    print "The backup file location is"
    print fileLocation
    try:
        dateFormat = SimpleDateFormat('_d_MMM_yyyy_HH_mm_ss')
        date = Date()
        formattedDate = dateFormat.format(date)
        print formattedDate
    except:
        print "The date cannot be created/formatted"
        
    try:    
        propsFile = File(fileLocation+ managedServerName + formattedDate+"_config.bkp");
        print propsFile.exists()
        if(propsFile.exists() == 0):
            propsFile.createNewFile()
    except:
        print "The file cannot be created on:"
        print propsFile.getAbsoluteFile()
        dumpStack()     
        
    previousProperties = Properties()
    print '===> Saving the  previous arguments - ' + managedServerName
    cd('/Servers/'+managedServerName)
    print "Getting the VMArgs"
    vmArgs = cmo.getServerStart().getArguments()
    print vmArgs
    if vmArgs == None:
        vmArgs = ""
    previousProperties.setProperty("vmArgs", vmArgs)
    print "Saving Arguments to file"
    previousProperties.store(FileOutputStream(propsFile),None)
    print '===> Saved arguments! Please verify the file on:'+ fileLocation + "in" + managedServerName 
def makeTimeSeriesContainer(station, interval, tz, records, decodeInfo):
    global timezones
    sdf = SimpleDateFormat("ddMMMyyyy, HH:mm")
    if dssTimezone:
        if not timezones["DSS"]:
            timezones["DSS"] = TimeZone.getTimeZone(
                tzInfo[dssTimezone]["JAVA"])
        sdf.setTimeZone(timezones["DSS"])
    else:
        sdf.setTimeZone(timezones["USGS"])
    dd, decodeInfo = decodeInfo
    cal = Calendar.getInstance()
    t = HecTime()
    tsc = TimeSeriesContainer()
    tsc.interval = interval
    times = []
    values = []
    tsc.quality = None
    factor = decodeInfo["DSS_FACTOR"]
    for j in range(len(records)):
        millis, value = records[j]
        cal.setTimeInMillis(millis)
        t.set(sdf.format(cal.getTime()))
        times.append(t.value())
        try:
            values.append(float(value) * factor)
        except:
            values.append(Constants.UNDEFINED)
    tsc.times = times
    tsc.values = values
    tsc.startTime = times[0]
    tsc.endTime = times[-1]
    tsc.numberValues = len(values)
    tsc.timeZoneID = sdf.getTimeZone().getID()
    tsc.timeZoneRawOffset = sdf.getTimeZone().getRawOffset()
    return tsc
def flowFileDates(hmsRunName):
    #will not be accurate if there is more than one set of data--specify start date in config file?
    """Find the dates of any FLOW files in the DSS catalog"""
    from java.text import SimpleDateFormat
    dateFormat = SimpleDateFormat("ddMMMyyyy")
    print("Getting dates from " + hmsRunName + "...")

    dates = []
    #print(dss.getCatalogedPathnames())
    flowFiles = filter(lambda f:((f.split('/')[3] == 'FLOW') and (f.split('/')[6] == ('RUN:'+hmsRunName.upper()))),
                       dss.getCatalogedPathnames())
    #print(flowFiles)
    candidateDates = map(lambda x:x.split('/')[4], flowFiles)
    #print(candidateDates)

    for d in candidateDates:
        if d[0:2].isdigit() and d[2:5].isalpha() and d[5:9].isdigit():
            date = dateFormat.parse(d)
            dateAlreadyFound = any(lambda x:x.equals(date), dates)
            if not dateAlreadyFound:
                dates.append(date)

    dates.sort(lambda a,b:a.compareTo(b))
    return map(lambda d: dateFormat.format(d).upper(), dates)
Beispiel #26
0
def formatDate(date, sfmt="yyyy-MM-dd'T'HH:mm:ss", tfmt="dd/MM/yyyy"):    
    dfSource = SimpleDateFormat(sfmt)
    dfTarget = SimpleDateFormat(tfmt)
    return dfTarget.format(dfSource.parse(date))
Beispiel #27
0
formatter.setTimeZone(TimeZone.getTimeZone(snTimeZone))

startDate = snData[startField]
print "------"
try:
    print "Schedule Task=> date = %s" % (startDate)
    date = formatter.parse(startDate)
    print "Schedule Task=> date = %s" % (date)
    release = getCurrentRelease()
    releaseID = release.id
    phaseTitle=targetPhase
    taskTitle=targetTask
    print "Schedule Task=> Phase / Task = %s / %s" % ( phaseTitle, taskTitle )
    phase = phaseApi.searchPhasesByTitle( phaseTitle, releaseID )
    print "Schedule Task=> phase = %s" % ( phase )
    phaseID = phase[0].id
    task = taskApi.searchTasksByTitle( taskTitle, phaseTitle, releaseID)
    print "Schedule Task=> task = %s" % ( task )
    taskID = task[0].id
    myTask = taskApi.getTask( taskID )
    myTask.waitForScheduledStartDate = True
    myTask.scheduledStartDate = xlrFormat.format(date)
    print "Schedule Task=> Task = %s" % ( myTask )
    print "Schedule Task=> Task.scheduledStartDate = %s" % ( myTask.scheduledStartDate )
    taskApi.updateTask( myTask )

except ParseException :
    print "exception
# End try
print "------"
Beispiel #28
0
def hdfs_setupjob(job, args):
    """
    Similar to the above, but run telemetry data that's already been exported
    to HDFS.

    Jobs expect two arguments, startdate and enddate, both in yyyyMMdd format.
    """

    import java.text.SimpleDateFormat as SimpleDateFormat
    import java.util.Date as Date
    import java.util.Calendar as Calendar
    import java.util.concurrent.TimeUnit as TimeUnit
    import com.mozilla.util.DateUtil as DateUtil
    import com.mozilla.util.DateIterator as DateIterator
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat as FileInputFormat
    import org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat as MyInputFormat

    if len(args) != 2:
        raise Exception("Usage: <startdate-YYYYMMDD> <enddate-YYYYMMDD>")

    # use to collect up each date in the given range
    class MyDateIterator(DateIterator):
       def __init__(self):
          self._list = []
       def get(self):
          return self._list
       def see(self, aTime):
          self._list.append(aTime)

    sdf = SimpleDateFormat(dateformat)
    sdf_hdfs = SimpleDateFormat(hdfs_dateformat)
    startdate = Calendar.getInstance()
    startdate.setTime(sdf.parse(args[0]))

    enddate = Calendar.getInstance()
    enddate.setTime(sdf.parse(args[1]))

    nowdate = Calendar.getInstance()

    # HDFS only contains the last 2 weeks of data (up to yesterday)
    startMillis = startdate.getTimeInMillis()
    endMillis = enddate.getTimeInMillis()
    nowMillis = nowdate.getTimeInMillis()

    startDiff = nowMillis - startMillis
    if TimeUnit.DAYS.convert(startDiff, TimeUnit.MILLISECONDS) > 14:
        raise Exception("HDFS Data only includes the past 14 days of history. Try again with more recent dates or use the HBase data directly.")

    endDiff = nowMillis - endMillis
    if TimeUnit.DAYS.convert(endDiff, TimeUnit.MILLISECONDS) < 1:
        raise Exception("HDFS Data only includes data up to yesterday. For (partial) data for today, use the HBase data directly.")

    dates = MyDateIterator()

    DateUtil.iterateByDay(startMillis, endMillis, dates)

    paths = []
    for d in dates.get():
       paths.append(hdfs_pathformat % (sdf_hdfs.format(Date(d))))

    job.setInputFormatClass(MyInputFormat)
    FileInputFormat.setInputPaths(job, ",".join(paths));
Beispiel #29
0
    def evaluate(self, times, max_time, origins_csv, destinations_csv, csv_writer, split=500, do_merge=False):
        '''
        evaluate the shortest paths between origins and destinations
        uses the routing options set in setup() (run it first!)

        Parameters
        ----------
        times: list of date times, the desired start/arrival times for evaluation
        origins_csv: file with origin points
        destinations_csv: file with destination points
        csv_writer: CSVWriter, configured writer to write results
        do_merge: merge the results over time, only keeping the best connections
        max_time: maximum travel-time in seconds (the smaller this value, the smaller the shortest path tree, that has to be created; saves processing time)
        '''

        origins = self.otp.loadCSVPopulation(origins_csv, LATITUDE_COLUMN, LONGITUDE_COLUMN)
        destinations = self.otp.loadCSVPopulation(destinations_csv, LATITUDE_COLUMN, LONGITUDE_COLUMN)

        sources = origins if not self.arrive_by else destinations
        n_slices = (sources.size() / split) + 1

        if n_slices > 1:
            print 'Splitting sources into {} part(s) with {} points each part'.format(n_slices, split)

        from_index = 0;
        to_index = 0;
        i = 1

        while True:
            if to_index >= sources.size():
                break
            from_index = to_index
            to_index += split
            if to_index >= sources.size():
                to_index = sources.size()
            sliced_sources = sources.get_slice(from_index, to_index)
            if n_slices > 1:
                print('calculating part {}/{}'.format(i, n_slices))
            i += 1

            if not self.arrive_by:
                origins = sliced_sources
            else:
                destinations = sliced_sources
            self.request.setOrigins(origins)
            self.request.setDestinations(destinations)
            self.request.setLogProgress(self.print_every_n_lines)

            if self.arrive_by:
                time_note = ' arrival time '
            else:
                time_note = 'start time '

    #         # if evaluation is performed in a time window, routes exceeding the window will be ignored
    #         # (worstTime already takes care of this, but the time needed to reach the snapped the OSM point is also taken into account here)
    #         if len(times) > 1:
    #             print 'Cutoff set: routes with {}s exceeding the time window ({}) will be ignored (incl. time to reach OSM-net)'.format(time_note, times[-1])
    #             cutoff = times[-1]
    #             self.request.setCutoffTime(cutoff.year, cutoff.month, cutoff.day, cutoff.hour, cutoff.minute, cutoff.second)

            # iterate all times
            results = [] # dimension (if not merged): times x targets (origins resp. destinations)
            sdf = SimpleDateFormat('HH:mm:ss')
            sdf.setTimeZone(TimeZone.getTimeZone("GMT +2"))
            for t, date_time in enumerate(times):
                # compare seconds since epoch (different ways to get it from java/python date)
                epoch = datetime.utcfromtimestamp(0)
                time_since_epoch = (date_time - epoch).total_seconds()
                self.request.setDateTime(date_time.year, date_time.month, date_time.day, date_time.hour, date_time.minute, date_time.second)
                # has to be set every time after setting datetime (and also AFTER setting arriveby)
                self.request.setMaxTimeSec(max_time)

                msg = 'Starting evaluation of routes with ' + time_note + date_time.strftime(DATETIME_FORMAT)
                print msg

                results_dt = self.batch_processor.evaluate(self.request)

                # if there already was a calculation: merge it with new results
                if do_merge and len(results) > 0:
                    for i, prev_result in enumerate(results[0]):
                        if prev_result is not None:
                            prev_result.merge(results_dt[i])
                #write and append if no merging is needed (saves memory)
                else:
                    search_time = sdf.format(date_time)
                    csv_writer.write(results_dt, additional_columns={'search_time': search_time}, append=True)
                    for r in results_dt:
                        del(r)

            if do_merge:
                # flatten the results
                results = [r for res in results for r in res]
                csv_writer.write(results, append=False)
Beispiel #30
0
#		F:servletSessionsModule=
#		F:systemModule=
#		F:threadPoolModule=
#		F:transactionModule=
#		F:webAppModule=F'
#-----------------------------------------------------------------

if (len(sys.argv) == 2):
    # Diretorio para escrita das metricas coletadas
    global OUTPUT_PATH
    global ERROR_FILE

    modules = sys.argv[0]
    OUTPUT_PATH = sys.argv[1]
    
    ERROR_FILE  =  os.path.join(OUTPUT_PATH, DAY_FORMAT.format(Date()) + "_errorLog.log")
        
    splitModules = modules.split(',')
    for aModule in splitModules:
        # Remove eventuais espacos em branco existentes
        aModule = String(aModule).replace(String(" "),String(""))
        processServerMetrics(aModule)
    
else:
    print "Este script requer dois parametros para sua execucao:"
    print "1 - Lista de modulos para os quais serao extraidas as metricas."
    print "Segue a lista de modules disponiveis:"
    print "   - beanModule" 
    print "   - connectionPoolModule"
    print "   - hamanagerModule"
    print "   - objectPoolModule "
Beispiel #31
0
class EsLogger(WorkerThread):
    def __init__(self, threadName, workerConfig):
        WorkerThread.__init__(self, threadName, workerConfig)
        
        self.logger = Logger.getLogger("ElasticSearch.EsLogger")
        # self.logger.setLevel(Level.DEBUG)

        self.workerConfig = workerConfig

        # Default Bulk Request Settings for ES Logging.
        # - Set to True to use bulk requests for logs.
        self.useBulkReq = False
        self.bulkReqCounter = 0
        self.bulkReqExecCountTrigger = 1000
        self.lastBulkReqFlush = datetime.now()

        if (threadName is None):
            # ==== 1st Instance (threadName is None) ====
            # Get the EsLogger queue.
            # This object will feed the queue through this reference.
            self.wq = workerConfig.wq
            self.esNode = self.workerConfig.esNode

        else:
            # ==== 2nd Instance (threadName is not None) ====
            self.esNode = self.workerConfig.esNode
            self.esClient = self.esNode.getClient()
            self.esBulkReq = EsBulkReq(self.esClient, None)

            self.indexName = workerConfig.indexName
            
            # If bulkReq config are set in the workerConfig object, use them.
            if workerConfig.useBulkReq is not None:
                self.useBulkReq = workerConfig.useBulkReq
            if workerConfig.bulkReqExecCountTrigger is not None:
                self.bulkReqExecCountTrigger = workerConfig.bulkReqExecCountTrigger

        # Json SerDe objects
        self.boon = BoonJson()

        self.esLoggerWorker = None
        self.esLoggerThread = None
        self.stopThread = False
        self.threaded = False

        self.dtfmt = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ")

        if threadName is None:
            self.threadName = "EsLoggerController"
            # Startup the Background thread.
            self.startEsLoggerThread()
        else:
            self.threadName = threadName

    def startEsLoggerThread(self):
 
        if (     (self.esLoggerWorker is None)
             and (self.esLoggerThread is None) ):
            # Create the workers config object.
            # Place the esLogger worker queue into the config object.
            # Place the index name into the config object.
            wconfig = EsLoggerConfig(self.esNode, self.workerConfig.esHosts,
                                     self.workerConfig.indexName, self.wq)
            
            wconfig.useBulkReq = self.workerConfig.useBulkReq
            wconfig.bulkReqExecCountTrigger = self.workerConfig.bulkReqExecCountTrigger

            if self.workerConfig.esNode is not None:
                esNode = self.workerConfig.esNode
            else:
                esNode = self.esNode

            if esNode == 0:
                self.logger.error("An esNode is required to create the EsLogger WorkerThread.")
                sys.exit(0)

            wconfig.esNode = esNode
            wconfig.esHosts = self.workerConfig.esHosts

            # Create the esLogger object that will run on a thread.
            # Give the object a thread name and the workerConfig object.
            # This esLogger object will run on a thread and feed off of the
            # worker queue in the worker config object.
            self.esLoggerWorker = EsLogger("EsLoggerThread", wconfig)
            # Create the thread.
            self.esLoggerThread = Thread(name=self.esLoggerWorker.threadName,
                                         target=self.esLoggerWorker.start, args=())
            # Start the thread.
            self.esLoggerThread.start()
        else:
            self.logger.warn("The EsLoggerThread is already running!")

    
    def stopEsLoggerThread(self):
        if self.esLoggerWorker.useBulkReq:
            self.logger.debug("Flush Bulk Inserts...")
            self.esLoggerWorker.flushBulkInserts()

        # Give 7 seconds to flush any pending inserts.
        self.logger.debug("-- Sleep 2...")
        pytime.sleep(7)

        if self.esLoggerWorker:
            self.logger.debug("Stop Thread...")
            self.esLoggerWorker.stopThread = True

            # Allow 2 seconds for thread to die
            self.logger.debug("-- Sleep 2...")
            pytime.sleep(2)

            # Use interrupts to stop threads.
            # Interrupt is needed because the thread may be waiting on queue queue.
            self.logger.debug("Interrupt Thread...")
            tries = 5
            while self.esLoggerWorker.isAlive() and tries > 0:
                self.esLoggerWorker.interrupt()
                pytime.sleep(.002)
                tries = tries - 1


    def stop(self):
        self.stopEsLoggerThread()

    def run(self):
        self.threaded = True
        self.wq = self.workerConfig.wq

        while not self.stopThread:
            # Read from workerQueue.
            wrk = self.dequeueWork()

            if wrk is not None:
                # Log to ES
                logLevel = wrk.get('logLevel')
                self._logToEs(logLevel, wrk)
            else:
                # Flush bulk inserts if there are any in the que,
                # Every 5 seconds.
                unFlushedSecs = (datetime.now() - self.lastBulkReqFlush).seconds
                if (    (unFlushedSecs >= 5)
                    and (self.bulkReqCounter > 0)):
                    self.logger.debug("...EsLogs Bulk Inserts... Flushing after 5 seconds...")
                    self.flushBulkInserts()

    # Place work into the work queue.
    def enqueueWork(self, workObj):
        method = "enqueueWork"
        self.logger.debug("[{}.{}] Place workObj into workerQueue".format(self.threadName,
                                                                          method) )
        self.wq.put(workObj)


    # Pulls a work item from the queue.
    def dequeueWork(self):
        method = "dequeueWork"
        # True: blocks if there are no items in the queue.
        #    3: blocking on the queue releases after 1 seconds.
        try:
            wrk = None
            wrk = self.wq.get(True,1)
        except BaseException as ex:
            # When we hit a BaseException while waiting on workQue, it is prob a timeout error on and empty queue.
            if ('Empty' not in type(ex).__name__):
                self.logger.error("BaseException in [{}.{}] - [{}: {}]".format(self.threadName, method, type(ex), ex))
            if wrk is None:
                self.logger.debug("--> Getting wrk, Queue.get() - timed out.")
        except JException as ex:
            self.logger.info("JException in [{}.{}] - [{}]".format(self.threadName, method, ex))
            # If we get an interrupt which waiting for work, we need to stop the thread.
            self.stopThread = True
        return wrk


    def logToEs(self, logLevel, jMap):

        jMap.put('logDateTime', self.dtfmt.format(datetime.now()))
        jMap.put('logLevel', logLevel)

        # Push data into queue and return
        self.enqueueWork(jMap)

        
    # Accepts a Java Map object which we can serialize a json string via Boon.
    # Then use the json string to store to ES.
    def _logToEs(self, logLevel, jMap):
        # *** Interactive Debug seems to mess with multi-threading
        # pdb.set_trace()

        # Convert the Java Map to a JSON string
        jsonEvent = self.boon.serToJson(jMap)

        try:
            # Get the date from the logDateTime
            edt = jMap.get('logDateTime')
            # Pull the data from the TimeStamp 
            pat = r'^(\d+)\-(\d+)\-(\d+)T.*' 
            match = re.search(pat, edt) 
            if match: 
                edt = match.group(1) +  match.group(2) +  match.group(3) 
            else:
                edt = 'unknown'

            # Add the date to the index name.
            indexName = self.indexName + '-' + edt

            # Prep to Index document.
            self.logger.debug("\n==index==> index[{}] logLevel[{}]\n".format(indexName,logLevel))
            i1 = self.esClient.prepareIndex(indexName, logLevel, None)
            i2 = i1.setSource(jsonEvent)

            self.doEsInsert(i2, indexName, '', jsonEvent)

        except JException as ex:
            # ex: g.elasticsearch.index.mapper.MapperParsingException:...
            #     failed to parse [Data.Return.Value.Death.DateNormalized]
            # Change the name of the bad field to s_<fieldName> in the jsonStr and reinsert the object.
            # - Call on self.indexEvent(newJson) recursively as there may be multiple failures.
            self.logger.error("EsLogger Exception Caught [{}] [{}]".format(ex, jsonEvent))

    def doEsInsert(self, i2, indexName, typeMap, jsonEvent):
        #=================================================================================
        # Single index request operation...
        # Submits one index operation at a time.
        #=================================================================================
        if not self.useBulkReq:
            self.logger.debug("\n==single==> {}\n".format(jsonEvent))
            # Execution a single index request.
            indxResp = i2.execute().actionGet()
        #=================================================================================

        #=================================================================================
        # Bulk Index Operation...
        # Keep adding to the bulk index request until we reach N index operations,
        # then submit the request.
        #=================================================================================
        else:
            self.logger.debug("\n==bulk==> {}\n".format(jsonEvent))
            self.esBulkReq.add(i2, indexName, typeMap, jsonEvent)
            self.bulkReqCounter = self.bulkReqCounter + 1
            if self.bulkReqCounter >= self.bulkReqExecCountTrigger:
                self.esBulkReq.execute()
                self.bulkReqCounter = 0
                self.lastBulkReqFlush = datetime.now()
        #=================================================================================

    # If Bulk Request mode is in use, one should call on this function to flush
    # any pending bulk requests at the end of bulk operation cycles.
    def flushBulkInserts(self):
        if self.useBulkReq and (self.bulkReqCounter > 0):
            self.logger.debug("\n==> BulkReq Flush!\n")
            self.esBulkReq.execute()
            self.lastBulkReqFlush = datetime.now()


    def close(self):
        self.stopEsLoggerThread()
Beispiel #32
0
def currentDate(format = 'yyyy.MM.dd'):
  df = SimpleDateFormat(format)
  return df.format(Date())
#!/usr/bin/env python
from time import sleep
from java.util import Date
from java.text import SimpleDateFormat

username='******'
password='******'
adminurl='t3://host:port'
connect(username,password,adminurl)
domainRuntime()
servers=['server1','server2']
df=SimpleDateFormat('yyyy.MM.dd HH:mm:ss S')
fh = open('threads_mon.log', 'w')
while true:
	now=df.format(Date())
	for server in servers:
		cd('ServerRuntimes/'+server+'/ThreadPoolRuntime/ThreadPoolRuntime')
		compReq = cmo.getCompletedRequestCount()
		status = cmo.getHealthState()
		hoggingThreads = cmo.getHoggingThreadCount()
		totalThreads = cmo.getExecuteThreadTotalCount()
		idleThrds = cmo.getExecuteThreadIdleCount()
		pending = cmo.getPendingUserRequestCount()
		qLen = cmo.getQueueLength()
		thruput = cmo.getThroughput()
		outline="%s %s %3d %3d %3d %3d %6.2f\n" % (now,server,totalThreads,idleThrds,pending,qLen,thruput)
		#print '%s %s %d %d %d %d %f' % (now,server,totalThreads,idleThrds,pending,qLen,thruput)
		fh.write(outline)
	sleep(15)
	fh.flush()
disconnect()
Beispiel #34
0
	def getActiveTransactions(self):
		activeTransactions = []
		whereClauses = ArrayList()
		whereClauses.add("FUNCNAME IN ('SAPWL_TCODE_AGGREGATION','SAPWL_TCODE_AGGREGATION_COPY')");
		result = self.executeQuery("TFDIR", whereClauses, "FUNCNAME")#@@CMD_PERMISION sap protocol execution
		
		functionName = None
		if result.next():
			functionName = result.getString("FUNCNAME")

		if functionName == None:
			logger.warn('getActiveTransactions: active transaction function is not found')
			return activeTransactions
		
		day = self.__client.getProperty('from_date')
		if day == None:
			today = Date()
			sfDate = SimpleDateFormat("yyyyMMdd")
			day = sfDate.format(today)
		elif day.find('/') != -1:
			try:
				sfDate = SimpleDateFormat("MM/dd/yyyy")
				parsedDate = sfDate.parse(day)
				sfDate = SimpleDateFormat("yyyyMMdd")
				day = sfDate.format(parsedDate)
			except:
				logger.reportWarning('Failed to parse date ', day)
				
		logger.debug('Parsed start date:', day)
			

		logger.debug('Active transactions from data:', day)
		mapTransactionToUsers = None
		getUsers = Boolean.parseBoolean(self.__client.getProperty("get_users"))
		if getUsers:
			mapTransactionToUsers = HashMap()
			
			funcParams = HashMap()
			funcParams.put('READ_START_DATE', day)
			funcParams.put('READ_START_TIME', '000000')
			funcParams.put('READ_END_DATE', day)
			funcParams.put('READ_END_TIME', '235959')
			funcParams.put('READ_ONLY_MAINRECORDS', 'X')
			
			logger.debug('executing func:SAPWL_STATREC_FROM_REMOTE_SYS(', str(funcParams),')')
			
			fields = ArrayList()
			fields.add('TCODE')
			fields.add('ACCOUNT')
			usersResult = self.__client.executeFunction('SAPWL_STATREC_FROM_REMOTE_SYS', funcParams, 'NORMAL_RECORDS', fields)
			while usersResult.next():
				transaction = usersResult.getString('TCODE')
				if len(transaction) > 0:
					user = usersResult.getString("ACCOUNT");
					users = mapTransactionToUsers.get(transaction)
					if users == None:
						users = HashMap()
						mapTransactionToUsers.put(transaction,users)
					users.put(user,users);

		self.getSites()
		site = self.getSites().getCell(0,0)
		servers = self.getServers(site)
		numServers = servers.getRowCount()
		transactionToStats = HashMap()
		for j in range(numServers):
			try:
				instance = servers.getCell(j,0);
				logger.debug('getActiveTransactions:executing function[' + functionName + '] for instance [' + instance + ']')
				if functionName == 'SAPWL_TCODE_AGGREGATION_COPY':
					records = self.callSapwlTcodeAggregationCopy(instance,day)
	
					while records.next():
						transaction = (str(records.getString(0))).strip()
						mapUsers = None
						if mapTransactionToUsers != None:
							mapUsers = mapTransactionToUsers.get(transaction)
						if (transaction != None) and (len(transaction) > 0):
							stats = transactionToStats.get(transaction)
							if stats == None:
								stats = TransactionStatistics(transaction)
								transactionToStats.put(transaction,stats)
	
							if mapUsers != None:
								stats.users = ArrayList(mapUsers.keySet())
							if records.next():
								stats.steps = stats.steps + int(float(records.getString(0)))
							if records.next():
								stats.responseTime = stats.responseTime + int(float(records.getString(0)))
							if records.next():
								stats.cpuTime = stats.cpuTime + int(float(records.getString(0)))
							if records.next():
								stats.dbTime = stats.dbTime + int(float(records.getString(0)))
							if records.next():
								stats.guiTime = stats.guiTime + int(float(records.getString(0)))
							if records.next():
								stats.roundTrips = stats.roundTrips + int(float(records.getString(0)))
							if records.next():
								stats.text = (str(records.getString(0))).strip()
				else:
					fields = ArrayList()
					fields.add('ENTRY_ID')
					fields.add('COUNT')
					fields.add('RESPTI')
					fields.add('CPUTI')
					fields.add('DBTIME')
					fields.add('GUITIME')
					fields.add('GUICNT')
					fields.add('TEXT')
					records = self.getApplicationStatistics(functionName, instance, day, fields)

					while records.next():
						entryID = records.getString("ENTRY_ID");
						transaction = self.getTransactionFromEntryID(entryID);
						mapUsers = None
						if mapTransactionToUsers != None:
							mapUsers = mapTransactionToUsers.get(transaction)
						if (transaction != None) and (len(transaction) > 0):
							stats = transactionToStats.get(transaction)
							if(stats == None):
								stats = TransactionStatistics(transaction)
								transactionToStats.put(transaction,stats)

							if(mapUsers != None):
								stats.users = ArrayList(mapUsers.keySet())
							count = records.getString("COUNT")
							stats.steps = stats.steps + int(count)
							stats.responseTime = stats.responseTime + int(records.getString("RESPTI"))
							stats.cpuTime = stats.cpuTime + int(records.getString("CPUTI"))
							stats.dbTime = stats.dbTime + int(records.getString("DBTIME"))
							stats.guiTime = stats.guiTime + int(records.getString("GUITIME"))
							stats.roundTrips = stats.roundTrips + int(records.getString("GUICNT"))
							stats.text = records.getString("TEXT")
			except:
				msg = sys.exc_info()[1]
				strmsg = '%s' % msg
				if strmsg.find('NO_DATA_FOUND') != -1:
					logger.debug(strmsg)
					logger.reportWarning('No data found in the given time range')
				else:
					logger.debugException('Unexpected error getting transactions for function:' + str(functionName))
					logger.reportWarning('Unexpected error getting transactions for function:' + str(functionName) + ':' + strmsg)

		transactions = ArrayList(transactionToStats.keySet())
		logger.debug("getActiveTransactions: Found [" + str(transactions.size()) + "] active transactions")
		if logger.isDebugEnabled():
			logger.debug("getActiveTransactions: transactions = " + str(transactions))
		transactionsInfo = self.getTransactionsInfo(transactions)

		it = transactionToStats.values()
		for stats in it:
			prop = Properties()
			prop.setProperty('data_name', str(stats.transaction))
			prop.setProperty('dialog_steps', str(stats.steps))
			prop.setProperty('total_response_time', str(stats.responseTime))
			prop.setProperty('average_response_time', str(stats.getAverageCPUTime()))
			prop.setProperty('total_cpu_time', str(stats.cpuTime))
			prop.setProperty('average_cpu_time', str(stats.getAverageCPUTime()))
			prop.setProperty('round_trips', str(stats.roundTrips))
			prop.setProperty('total_db_time', str(stats.dbTime))
			prop.setProperty('average_db_time', str(stats.getAverageDBTime()))
			prop.setProperty('total_gui_time', str(stats.guiTime))
			prop.setProperty('average_gui_time', str(stats.getAverageGUITime()))
			prop.setProperty('text', stats.text)
			prop.setProperty('saptransaction_averagedbtime', str(stats.users.size()))

			info = transactionsInfo.get(stats.transaction)
			if info != None:
				prop.setProperty('devclass', info.devclass)
				prop.setProperty('program', info.program)
				prop.setProperty('screen', info.screen)
				prop.setProperty('', info.screen)
			else:
				prop.setProperty('devclass', "")
				prop.setProperty('program', "")
				prop.setProperty('screen', "")
				prop.setProperty('version', "")
				
			activeTransactions.append(prop)
		
		return activeTransactions
Beispiel #35
0
 def getCurrentDate(self, format):
     curDate = SimpleDateFormat(format)
     return curDate.format(Date());
    def getStdout(self):
        return self.stdout.getOutput()

    def getStdoutLines(self):
        return self.stdout.getOutputLines()

    def getStderr(self):
        return self.stderr.getOutput()

    def getStderrLines(self):
        return self.stderr.getOutputLines()

myDate = Date(System.currentTimeMillis())
df = Sdf('yyyyMMddyyHHMMss')
dt = df.format(myDate)
testPath = testPath.replace( "\\", "\\\\")
testPath = testPath.replace( "\:", "\\\:")
paramfile = """
# Demo Parameters file
#
Test1=%s
resultsFilename=Results%s.xml
runType=FileSystem
PerScenarioTimeOut=%s
controllerPollingInterval=%s
fsTimeout=%s
""" % (testPath, dt, PerScenarioTimeout, controllerPolliingInterval, fsTimeout)


Beispiel #37
0
 def formatVersion(self, dString):    
     dfSource = SimpleDateFormat("yyyyMMddHHmmss")
     dfTarget = SimpleDateFormat("dd/MM/yyyy HH:mm:ss")
     return dfTarget.format(dfSource.parse(dString))
Beispiel #38
0
def mustReader(paramMnemonics, startDate, endDate):
    #
    driverName = "com.mysql.jdbc.Driver"
    #
    #Class.forName(driverName)
    #
    url = "jdbc:mysql://" + Configuration.getProperty(
        "vega.must.server.ip") + "/" + Configuration.getProperty(
            "vega.must.server.repository"
        ) + "?user="******"vega.must.server.user"
        ) + "&password="******"vega.must.server.password")
    #
    loader = Thread.currentThread().getContextClassLoader()
    c = loader.loadClass("com.mysql.jdbc.Driver")
    #print c
    driver = c.newInstance()
    #print driver
    con = driver.connect(url, java.util.Properties())
    #con = DriverManager.getConnection(url)

    stmt = con.createStatement()
    formatter = SimpleDateFormat("yyyy-MM-dd HH:mm:ss")

    date1 = formatter.parse(endDate)
    date2 = formatter.parse(startDate)
    stringDate1 = formatter.format(date1)
    stringDate2 = formatter.format(date2)
    #
    p = Product()
    #ah=TableDataset()

    p.meta["type"] = StringParameter(value="MUST Parameter Product")
    p.meta["creator"] = StringParameter(value="MustClient")
    p.meta["description"] = StringParameter(value="MUST")
    p.meta["startDate"] = DateParameter(value=FineTime(date2))
    p.meta["endDate"] = DateParameter(value=FineTime(date1))
    #
    for pars in range(len(paramMnemonics)):
        #
        ah = TableDataset()
        parameterName = paramMnemonics[pars]
        p.meta[parameterName] = BooleanParameter(value=Boolean(True))
        #
        sql = "select PID,DBTYPE from parameter where (PNAME='" + parameterName + "')"
        rs = stmt.executeQuery(sql)

        while (rs.next()):
            pid = rs.getInt(1)
            tableName = rs.getString(2) + "paramvalues"

        sql = "select datetime,value from %s where (pid=%s and datetime<'%s' and datetime>'%s')" % (
            tableName, pid, date1.time, date2.time)

        rs = stmt.executeQuery(sql)

        list = []
        dates = Long1d()
        values = Float1d()

        while (rs.next()):
            dates.append(rs.getLong(1))
            values.append(rs.getFloat(2))

        print "Retrieved parameter from MUST: ", parameterName

        if (pars == 0):
            #ah["Time"]=Column(data=dates,description="Time")
            ah.meta["creator"] = StringParameter(value="MustClient")
            ah.meta["description"] = StringParameter(value="MUST")
            ah.meta["startDate"] = DateParameter(value=FineTime(date2))
            ah.meta["endDate"] = DateParameter(value=FineTime(date1))
        #
        ah["Time"] = Column(data=dates, description="Time")
        ah[parameterName] = Column(data=values, description=parameterName)
        #
        p[parameterName] = ah
    rs.close()
    stmt.close()
    con.close()
    #
    return p
def date_as_string(date):
    tz = TimeZone.getTimeZone("UTC")
    df = SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    df.setTimeZone(tz)
    return df.format(date)
Beispiel #40
0
def mustReader(paramMnemonics,startDate,endDate) :
        #
        driverName="com.mysql.jdbc.Driver"
        #
        #Class.forName(driverName)
        #
        url = "jdbc:mysql://"+Configuration.getProperty("vega.must.server.ip")+"/"+Configuration.getProperty("vega.must.server.repository")+"?user="******"vega.must.server.user")+"&password="******"vega.must.server.password")
        #
        loader = Thread.currentThread().getContextClassLoader()
        c = loader.loadClass("com.mysql.jdbc.Driver")
        #print c
        driver = c.newInstance()
        #print driver
        con = driver.connect(url, java.util.Properties())
        #con = DriverManager.getConnection(url)
        
        stmt = con.createStatement()
        formatter = SimpleDateFormat ("yyyy-MM-dd HH:mm:ss")
        
        date1 =  formatter.parse(endDate)
        date2 =  formatter.parse(startDate)
        stringDate1 = formatter.format(date1)
        stringDate2 = formatter.format(date2)
        #
        p = Product()
        #ah=TableDataset()
        
        p.meta["type"]=StringParameter(value="MUST Parameter Product")
        p.meta["creator"]=StringParameter(value="MustClient")
        p.meta["description"]=StringParameter(value="MUST")
        p.meta["startDate"]=DateParameter(value=FineTime(date2))
        p.meta["endDate"]=DateParameter(value=FineTime(date1))
        #
        for pars in range(len(paramMnemonics)) :
                #
                ah=TableDataset()
                parameterName = paramMnemonics[pars]
                p.meta[parameterName]=BooleanParameter(value=Boolean(True))
                #
                sql = "select PID,DBTYPE from parameter where (PNAME='"+parameterName+"')"
                rs = stmt.executeQuery(sql)
                
                while (rs.next()):
                        pid=rs.getInt(1)
                        tableName = rs.getString(2)+"paramvalues"
                
                sql = "select datetime,value from %s where (pid=%s and datetime<'%s' and datetime>'%s')"%(tableName,pid,date1.time,date2.time)
                
                rs = stmt.executeQuery(sql)
                
                list = []
                dates=Long1d()
                values=Float1d()
                
                while (rs.next()):
                        dates.append(rs.getLong(1))
                        values.append(rs.getFloat(2))
                
                print "Retrieved parameter from MUST: ", parameterName
                
                if (pars == 0) :
                        #ah["Time"]=Column(data=dates,description="Time")
                        ah.meta["creator"]=StringParameter(value="MustClient")
                        ah.meta["description"]=StringParameter(value="MUST")
                        ah.meta["startDate"]=DateParameter(value=FineTime(date2))
                        ah.meta["endDate"]=DateParameter(value=FineTime(date1))
                #
                ah["Time"]=Column(data=dates,description="Time")
                ah[parameterName]=Column(data=values,description=parameterName)
                #
                p[parameterName]=ah
        rs.close()
        stmt.close()
        con.close()
        #
        return p
Beispiel #41
0
def cardData(context,
             main=None,
             add=None,
             filterinfo=None,
             session=None,
             elementId=None):
    u'''Функция данных для карточки редактирования содержимого таблицы разрешения. '''

    gridContext = json.loads(
        session)['sessioncontext']['related']['gridContext']
    gridContext = gridContext if isinstance(gridContext,
                                            list) else [gridContext]
    currentId = {}
    for gc in gridContext:
        if "currentRecordId" in gc.keys():
            currentId[gc["@id"]] = gc["currentRecordId"]

    if add == 'add':
        if "numbersSeriesGrid" in currentId.keys():
            seriesId = currentId["numbersSeriesGrid"]
        else:
            seriesId = ''
        xformsdata = {
            "schema": {
                "numberSeries": {
                    "@seriesId": seriesId,
                    "@numberOfLine": "",
                    "@startingDate": "",
                    "@startingNumber": "",
                    "@endingNumber": "",
                    "@incrimentByNumber": "",
                    "@lastUsedNumber": "",
                    "@isOpened": "",
                    "@lastUsedDate": "",
                    "@prefix": "",
                    "@postfix": "",
                    "@isFixedLength": ""
                }
            }
        }
    elif add == 'edit':
        sdf = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS")
        linesOfNumbersSeries = linesOfNumbersSeriesCursor(context)
        linesOfNumbersSeries.get(currentId["numbersSeriesGrid"],
                                 int(currentId["linesNumbersSeriesGrid"]))
        xformsdata = {
            "schema": {
                "numberSeries": {
                    "@seriesId":
                    linesOfNumbersSeries.seriesId,
                    "@numberOfLine":
                    linesOfNumbersSeries.numberOfLine,
                    "@startingDate":
                    unicode(sdf.format(linesOfNumbersSeries.startingDate)),
                    "@startingNumber":
                    linesOfNumbersSeries.startingNumber,
                    "@endingNumber":
                    linesOfNumbersSeries.endingNumber,
                    "@incrimentByNumber":
                    linesOfNumbersSeries.incrimentByNumber,
                    "@lastUsedNumber":
                    linesOfNumbersSeries.lastUsedNumber,
                    "@isOpened":
                    unicode(linesOfNumbersSeries.isOpened).lower(),
                    "@lastUsedDate":
                    unicode(sdf.format(linesOfNumbersSeries.lastUsedDate)),
                    "@prefix":
                    linesOfNumbersSeries.prefix,
                    "@postfix":
                    linesOfNumbersSeries.postfix,
                    "@isFixedLength":
                    unicode(linesOfNumbersSeries.isFixedLength).lower()
                }
            }
        }

    # print xformsdata
    xformssettings = {
        "properties": {
            "event": {
                "@name": "single_click",
                "@linkId": "1",
                "action": {
                    "#sorted": [{
                        "main_context": "current"
                    }, {
                        "datapanel": {
                            "@type": "current",
                            "@tab": "current",
                            "element": {
                                "@id": "linesNumbersSeriesGrid",
                                "add_context": ""
                            }
                        }
                    }]
                }
            }
        }
    }

    return JythonDTO(XMLJSONConverter.jsonToXml(json.dumps(xformsdata)),
                     XMLJSONConverter.jsonToXml(json.dumps(xformssettings)))
Beispiel #42
0
 def formatDate(self, date):    
     dfSource = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss")
     dfTarget = SimpleDateFormat("dd/MM/yyyy")
     return dfTarget.format(dfSource.parse(date))
 def format_not_after(self):
     """
     Formats the expiration date/time of the certificate.
     """
     df = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX")
     return df.format(self.__not_after)
uv = DataMath.magnitude(uwnd, vwnd)
#uv = DataMath.Magnitude(uwnd, vwnd)
uvq = uv.mul(qv).div(9.8*1000)
#uvq = uv*qv/(9.8*1000)

#---- Create data layer
print 'Create data layer...'
dataLayer = DrawMeteoData.createShadedLayer(qhdivg, "WaterVaporFlux", "Flux", False)

#---- Add layer
print 'Add layers...'
mapFrame.addLayer(dataLayer)
mapFrame.moveLayer(dataLayer, 0)
#---- Zoom data
mapLayout.getActiveLayoutMap().zoomToExtentLonLatEx(Extent(0,360,-90.1,90.1))
#---- Set MapLayout
format = SimpleDateFormat('yyyy-MM-dd')
aTime = dataAir.getDataInfo().getTimes().get(tIdx)
mapLayout.addText('Water Vapor Flux Divergence - ' + format.format(aTime), 320, 30, 'Arial', 16)
aLegend = mapLayout.addLegend(650, 100)
aLegend.setLegendStyle(LegendStyles.Bar_Vertical)
aLegend.setLegendLayer(dataLayer)
layoutMap.setGridXDelt(60)
layoutMap.setGridYDelt(30)
layoutMap.setDrawGridLine(False)
mapLayout.paintGraphics()

frame = JFrame('MeteoInfo Script Sample', size = (750, 530))
frame.add(mapLayout)
frame.visible = True
print 'Finished!'
 def formatDate(self, date):
     dfSource = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss")
     dfTarget = SimpleDateFormat("dd/MM/yyyy")
     return dfTarget.format(dfSource.parse(date))
Beispiel #46
0
 def formatVersion(self, dString):
     dfSource = SimpleDateFormat("yyyyMMddHHmmss")
     dfTarget = SimpleDateFormat("dd/MM/yyyy HH:mm:ss")
     return dfTarget.format(dfSource.parse(dString))
def setupjob(job, args):
    """
    Similar to the above, but run telemetry data that's already been exported
    to HDFS.

    Jobs expect two arguments, startdate and enddate, both in yyyyMMdd format.
    """

    import java.text.SimpleDateFormat as SimpleDateFormat
    import java.util.Date as Date
    import java.util.Calendar as Calendar
    import java.util.concurrent.TimeUnit as TimeUnit
    import com.mozilla.util.DateUtil as DateUtil
    import com.mozilla.util.DateIterator as DateIterator
    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat as FileInputFormat
    import org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat as MyInputFormat

    if len(args) != 2:
        raise Exception("Usage: <startdate-YYYYMMDD> <enddate-YYYYMMDD>")

    # use to collect up each date in the given range
    class MyDateIterator(DateIterator):
        def __init__(self):
            self._list = []

        def get(self):
            return self._list

        def see(self, aTime):
            self._list.append(aTime)

    sdf = SimpleDateFormat(dateformat)
    sdf_hdfs = SimpleDateFormat(hdfs_dateformat)
    startdate = Calendar.getInstance()
    startdate.setTime(sdf.parse(args[0]))

    enddate = Calendar.getInstance()
    enddate.setTime(sdf.parse(args[1]))

    nowdate = Calendar.getInstance()

    # HDFS only contains the last 2 weeks of data (up to yesterday)
    startMillis = startdate.getTimeInMillis()
    endMillis = enddate.getTimeInMillis()
    nowMillis = nowdate.getTimeInMillis()

    startDiff = nowMillis - startMillis
    if TimeUnit.DAYS.convert(startDiff, TimeUnit.MILLISECONDS) > 14:
        raise Exception(
            "HDFS Data only includes the past 14 days of history. Try again with more recent dates or use the HBase data directly."
        )

    endDiff = nowMillis - endMillis
    if TimeUnit.DAYS.convert(endDiff, TimeUnit.MILLISECONDS) < 1:
        raise Exception(
            "HDFS Data only includes data up to yesterday. For (partial) data for today, use the HBase data directly."
        )

    dates = MyDateIterator()

    DateUtil.iterateByDay(startMillis, endMillis, dates)

    paths = []
    for d in dates.get():
        paths.append(hdfs_pathformat % (sdf_hdfs.format(Date(d))))

    job.setInputFormatClass(MyInputFormat)
    FileInputFormat.setInputPaths(job, ",".join(paths))
    job.getConfiguration().set("org.mozilla.jydoop.mappertype", "TEXT")
Beispiel #48
0
def makeTimeSeriesContainer(tsData, timeZone, pathname=None):
    '''
	Construct a TimeSeriesContainer object from a python dictionary that was
	created from a single "time-series" returned from the CWMS RADAR web
	service
	'''
    #---------------#
    # initial setup #
    #---------------#
    tsc = None
    try:
        tz = TimeZone.getTimeZone(timeZone)
        sdf8601 = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX")
        sdfHecTime = SimpleDateFormat("ddMMMyyyy, HH:mm")
        cal = Calendar.getInstance()
        for obj in sdf8601, sdfHecTime, cal:
            obj.setTimeZone(tz)
        ht = HecTime()
        times, values, qualities = [], [], []
        #------------------#
        # process the data #
        #------------------#
        if tsData.has_key("regular-interval-values"):
            #----------------------------------------#
            # regular time series (a lot to process) #
            #----------------------------------------#
            rts = tsData["regular-interval-values"]
            intvlStr = rts["interval"]
            unit = rts["unit"].split()[0]
            if intvlStr.startswith("PT"):
                intvlNum, intvlUnit = int(intvlStr[2:-1]), intvlStr[-1]
                try:
                    factor, field = {
                        "M": (1, Calendar.MINUTE),
                        "H": (60, Calendar.HOUR_OF_DAY)
                    }[intvlUnit]
                except KeyError:
                    raise Exception("Unexpected interval: %s" % intvlStr)
            else:
                intvlNum, intvlUnit = int(intvlStr[1:-1]), intvlStr[-1]
                try:
                    factor, field = {
                        "Y": (1440 * 365, Calendar.YEAR),
                        "M": (1440 * 30, Calendar.MONTH),
                        "D": (1440, Calendar.DATE)
                    }[intvlUnit]
                except KeyError:
                    raise Exception("Unexpected interval: %s" % intvlStr)
            intvl = intvlNum * factor
            segmentCount = rts["segment-count"]
            cal.setTimeInMillis(
                sdf8601.parse(rts["segments"][0]["first-time"]).getTime())
            for i in range(segmentCount):
                for j in range(rts["segments"][i]["value-count"]):
                    ht.set(sdfHecTime.format(cal.getTimeInMillis()))
                    v, q = rts["segments"][i]["values"][j]
                    times.append(ht.value())
                    values.append(v)
                    qualities.append(q)
                    cal.add(field, intvlNum)
                if i < segmentCount - 1:
                    nextBegin = sdf8601.parse(
                        rts["segments"][i + 1]["first-time"]).getTime()
                    time = cal.getTimeInMillis()
                    while time < nextBegin:
                        ht.set(sdfHecTime.format(time))
                        times.append(ht.value())
                        values.append(Constants.UNDEFINED)
                        qualities.append(0)
                        cal.add(field, intvlNum)
                        time = cal.getTimeInMillis()
        elif tsData.has_key("irregular-interval-values"):
            #------------------------------#
            # irregular time series (easy) #
            #------------------------------#
            its = tsData["irregular-interval-values"]
            unit = its["unit"].split()[0]
            intvl = 0
            for t, v, q in its["values"]:
                ht.set(sdfHecTime.format(sdf8601.parse(t)))
                times.append(ht.value())
                values.append(v)
                qualities.append(q)
        else:
            raise Exception("Time series has no values")
        #--------------------------------------------------#
        # code common to regular and irregular time series #
        #--------------------------------------------------#
        tsc = TimeSeriesContainer()
        tsc.times = times
        tsc.values = values
        tsc.quality = qualities
        tsc.numberValues = len(times)
        tsc.startTime = times[0]
        tsc.endTime = times[-1]
        tsc.interval = intvl
        tsc.units = unit
        tsc.timeZoneID = timeZone
        tsc.timeZoneRawOffset = tz.getRawOffset()

        name = tsData["name"]
        loc, param, paramType, intv, dur, ver = name.split(".")
        if pathname:
            #---------------------------#
            # use pathname if specified #
            #---------------------------#
            A, B, C, D, E, F = 1, 2, 3, 4, 5, 6
            parts = pathname.split("/")
            parts[D] = ''
            tsc.fullName = "/".join(parts)
            tsc.watershed = parts[A]
            try:
                tsc.location, tsc.subLocation = parts[B].split("-", 1)
            except:
                tsc.location = parts[B]
            try:
                tsc.parameter, tsc.subParameter = parts[C].split("-", 1)
            except:
                tsc.parameter = parts[C]
            try:
                tsc.version, tsc.subVersion = parts[F].split("-", 1)
            except:
                tsc.version = parts[F]
        else:
            #--------------------------------------#
            # no pathname, use CWMS time series id #
            #--------------------------------------#
            try:
                tsc.location, tsc.subLocation = loc.split("-", 1)
            except:
                tsc.location = loc
            try:
                tsc.parameter, tsc.subParameter = param.split("-", 1)
            except:
                tsc.parameter = param
            try:
                tsc.version, tsc.subVersion = ver.split("-", 1)
            except:
                tsc.version = ver
        tsc.type = {
            "Total": "PER-CUM",
            "Max": "PER-MAX",
            "Min": "PER-MIN",
            "Const": "INST-VAL",
            "Ave": "PER-AVER",
            "Inst": ("INST-VAL", "INST-CUM")[param.startswith("Precip")]
        }[paramType]
    except:
        output(traceback.format_exc())
    return tsc
def to_hectime(datestr, pattern="MM/dd/yyyy HH:mm"):
    informat = SimpleDateFormat(pattern)
    hecformat = SimpleDateFormat("ddMMMyyyy HHmm")
    date = informat.parse(datestr)
    hecdatestr = hecformat.format(date)
    return HecTime(hecdatestr)
Beispiel #50
0
def _getTimeStamp():
    # WAS style time stamp.  SimpleDateFormat is not thread safe.
    sdf = SimpleDateFormat("yy/MM/dd HH:mm:ss.sss z")
    return '[' + sdf.format(Date()) + ']'
Beispiel #51
0
def listADDEImages(server, dataset, descriptor,
    accounting=DEFAULT_ACCOUNTING,
    location=None,
    coordinateSystem=CoordinateSystems.LATLON,
    place=None,
    mag=None,
    position=0,
    unit=None,
    day=None,
    time=None,
    debug=False,
    band=None,
    size=None):
    """Creates a list of ADDE images.
    
    Args:
        localEntry: Local ADDE dataset.
        server: ADDE server.
        dataset: ADDE dataset group name.
        descriptor: ADDE dataset descriptor.
        day: Day range. ('begin date', 'end date')
        time: ('begin time', 'end time')
        position: Position number. Values may be integers or the string "ALL". (default=0)
        band: McIDAS band number; only images that have matching band number will be returned.
        accounting: ('user', 'project number') User and project number required by servers using McIDAS accounting. default = ('idv','0')
        
    Returns:
        ADDE image matching the given criteria, if any.
    """
    
    user = accounting[0]
    proj = accounting[1]
    debug = str(debug).lower()
    
    if mag:
        mag = '&MAG=%s %s' % (mag[0], mag[1])
    else:
        mag = ''
        
    if unit:
        unit = '&UNIT=%s' % (unit)
    else:
        unit = ''
        
    if place is Places.CENTER:
        place = '&PLACE=CENTER'
    elif place is Places.ULEFT:
        place = '&PLACE=ULEFT'
    else:
        # raise ValueError()
        place = ''
        
    if coordinateSystem is CoordinateSystems.LATLON:
        coordSys = 'LATLON'
    elif coordinateSystem is CoordinateSystems.AREA or coordinateSystem is CoordinateSystems.IMAGE:
        coordSys = 'LINELE'
    else:
        raise ValueError()
        
    if location:
        location = '%s=%s %s' % (coordSys, location[0], location[1])
    else:
        location = ''
        
    if size:
        if size == 'ALL':
            size = '&SIZE=99999 99999'
        else:
            size = '&SIZE=%s %s' % (size[0], size[1])
    else:
        size = ''
        
    if time:
        time = '&TIME=%s %s I' % (time[0], time[1])
    else:
        time = ''
        
    if band:
        band = '&BAND=%s' % (str(band))
    else:
        band = '&BAND=ALL'
        
    tz = TimeZone.getTimeZone("Z")
    
    dateFormat = SimpleDateFormat()
    dateFormat.setTimeZone(tz)
    dateFormat.applyPattern('yyyyDDD')
    
    timeFormat = SimpleDateFormat();
    timeFormat.setTimeZone(tz)
    timeFormat.applyPattern('HH:mm:ss')
    
    addeUrlFormat = "adde://%(server)s/imagedirectory?&PORT=112&COMPRESS=gzip&USER=%(user)s&PROJ=%(proj)s&VERSION=1&DEBUG=%(debug)s&TRACE=0&GROUP=%(dataset)s&DESCRIPTOR=%(descriptor)s%(band)s%(location)s%(place)s%(size)s%(unit)s%(mag)s%(day)s%(time)s&POS=%(position)s"
    
    areaDirectories = []
    
    dates = _normalizeDates(day)
    if not dates:
        formatValues = {
            'server': server,
            'user': user,
            'proj': proj,
            'debug': debug,
            'dataset': dataset,
            'descriptor': descriptor,
            'band': band,
            'location': location,
            'place': place,
            'size': size,
            'unit': unit,
            'mag': mag,
            'day': '',
            'time': time,
            'position': position,
        }
        url = addeUrlFormat % formatValues
        print url
        adl = AreaDirectoryList(url)
        dirs = adl.getSortedDirs()
        for areaDirectory in dirs[0]:
            areaDirectories.append(areaDirectory)
    else:
        for date in dates:
            urlDate = '&DAY=%s' % (date)
            formatValues = {
                'server': server,
                'user': user,
                'proj': proj,
                'debug': debug,
                'dataset': dataset,
                'descriptor': descriptor,
                'band': band,
                'location': location,
                'place': place,
                'size': size,
                'unit': unit,
                'mag': mag,
                'day': urlDate,
                'time': time,
                'position': position,
            }
            url = addeUrlFormat % formatValues
            print url
            adl = AreaDirectoryList(url)
            dirs = adl.getSortedDirs()
            for areaDirectory in dirs[0]:
                areaDirectories.append(areaDirectory)
            
    temp = _AreaDirectoryList()
    for i, d in enumerate(areaDirectories):
        # print i, d.getBands(), d.getSensorType(), d.getCenterLatitude(), d.getCenterLongitude()
        print i, d
        nominalTime = d.getNominalTime()
        tempDay = dateFormat.format(nominalTime, StringBuffer(), FieldPosition(0)).toString()
        tempTime = timeFormat.format(nominalTime, StringBuffer(), FieldPosition(0)).toString()
        
        tempBand = list(d.getBands())
        if len(tempBand) == 1:
            tempBand = tempBand[0]
        else:
            # raise Exception
            pass
            
        dt = {
            'server': server,
            'dataset': dataset,
            'descriptor': descriptor,
            'band': tempBand,
            'debug': debug,
            'accounting': accounting,
            'day': tempDay,
            'time': (tempTime, tempTime),
            'coordinateSystem': CoordinateSystems.AREA,
            'location': (d.getLines(), d.getElements()),
            
            
        }
        temp.append(dt)
    return temp
Beispiel #52
0
def listADDEImages(localEntry=None,
    server=None, dataset=None, descriptor=None,
    accounting=DEFAULT_ACCOUNTING,
    location=None,
    coordinateSystem=CoordinateSystems.LATLON,
    place=None,
    mag=None,
    position=None,
    unit=None,
    day=None,
    time=None,
    debug=False,
    band=None,
    size=None,
    showUrls=True):
    """Creates a list of ADDE images.
    
    Args:
        localEntry: Local ADDE dataset.
        server: ADDE server.
        dataset: ADDE dataset group name.
        descriptor: ADDE dataset descriptor.
        day: Day range. ('begin date', 'end date')
        time: ('begin time', 'end time')
        position: Position number. Values may be integers or the string "ALL". (default=0)
        band: McIDAS band number; only images that have matching band number will be returned.
        accounting: ('user', 'project number') User and project number required by servers using McIDAS accounting. default = ('idv','0')
        
    Returns:
        ADDE image matching the given criteria, if any.
    """
    if localEntry:
        server = localEntry.getAddress()
        dataset = localEntry.getGroup()
        descriptor = localEntry.getDescriptor().upper()
    elif (server is None) or (dataset is None) or (descriptor is None):
        raise TypeError("must provide localEntry or server, dataset, and descriptor values.")
        
    if server == "localhost" or server == "127.0.0.1":
        port = EntryStore.getLocalPort()
    else:
        port = "112"
        
    # server = '%s:%s' % (server, port)
    
    user = accounting[0]
    proj = accounting[1]
    debug = str(debug).lower()
    
    if mag:
        mag = '&MAG=%s %s' % (mag[0], mag[1])
    else:
        mag = ''
        
    if unit:
        origUnit = unit
        unit = '&UNIT=%s' % (unit)
    else:
        # origUnit = None
        unit = ''
        
    if place is Places.CENTER:
        place = '&PLACE=CENTER'
    elif place is Places.ULEFT:
        place = '&PLACE=ULEFT'
    else:
        # raise ValueError()
        place = ''
        
    if coordinateSystem is CoordinateSystems.LATLON:
        coordSys = 'LATLON'
    elif coordinateSystem is CoordinateSystems.AREA or coordinateSystem is CoordinateSystems.IMAGE:
        coordSys = 'LINELE'
    else:
        raise ValueError()
        
    if location:
        location = '&%s=%s %s' % (coordSys, location[0], location[1])
    else:
        location = ''
        
    if size:
        if size == 'ALL':
            size = '&SIZE=99999 99999'
        else:
            size = '&SIZE=%s %s' % (size[0], size[1])
    else:
        size = ''
        
    if time:
        time = '&TIME=%s %s I' % (time[0], time[1])
    else:
        time = ''
        
    if band:
        band = '&BAND=%s' % (str(band))
    else:
        band = '&BAND=ALL'
        
    if position is not None:
        if isinstance(position, int):
            position = '&POS=%s' % (position)
        elif isinstance(position, tuple):
            if len(position) != 2:
                raise ValueError('position range may only contain values for the beginning and end of a range.')
            position = '&POS=%s %s' % (str(position[0]), str(position[1]))
        else:
            position = '&POS=%s' % (str(position).upper())
    else:
        position = '&POS=0'
        
    tz = TimeZone.getTimeZone('Z')
    
    dateFormat = SimpleDateFormat()
    dateFormat.setTimeZone(tz)
    dateFormat.applyPattern('yyyyDDD')
    
    timeFormat = SimpleDateFormat();
    timeFormat.setTimeZone(tz)
    timeFormat.applyPattern('HH:mm:ss')
    
    addeUrlFormat = "adde://%(server)s/imagedirectory?&PORT=%(port)s&COMPRESS=gzip&USER=%(user)s&PROJ=%(proj)s&VERSION=1&DEBUG=%(debug)s&TRACE=0&GROUP=%(dataset)s&DESCRIPTOR=%(descriptor)s%(band)s%(location)s%(place)s%(size)s%(unit)s%(mag)s%(day)s%(time)s%(position)s"
    
    urls = []
    areaDirectories = []
    
    dates = _normalizeDates(day)
    for date in dates:
        formatValues = {
            'server': server,
            'port': port,
            'user': user,
            'proj': proj,
            'debug': debug,
            'dataset': dataset,
            'descriptor': descriptor,
            'band': band,
            'location': location,
            'place': place,
            'size': size,
            'unit': unit,
            'mag': mag,
            'day': date,
            'time': time,
            'position': position,
        }
        url = addeUrlFormat % formatValues
        if showUrls:
            print url
        adl = AreaDirectoryList(url)
        results = adl.getSortedDirs()
        for imageTimes in results:
            for areaDirectory in imageTimes:
                urls.append(url)
                areaDirectories.append(areaDirectory)
                
    temp = _AreaDirectoryList()
    for i, d in enumerate(areaDirectories):
        nominalTime = d.getNominalTime()
        tempDay = str(dateFormat.format(nominalTime, StringBuffer(), FieldPosition(0)))
        tempTime = str(timeFormat.format(nominalTime, StringBuffer(), FieldPosition(0)))
        
        bandList = list(d.getBands())
        # tempUnitList = list(d.getCalInfo()[0])
        # unitList = tempUnitList[::2]
        # unitDescList = tempUnitList[1::2]
        # calInfo = dict(zip(unitList, unitDescList))
        if unit:
            unitList = [origUnit]
        else:
            unitList = map(str, list(d.getCalInfo()[0])[::2])
            
        for band in bandList:
            for calUnit in unitList:
                dt = {
                    'server': server,
                    'dataset': dataset,
                    'descriptor': descriptor,
                    'bandNumber': band,
                    'bandList': bandList,
                    'debug': debug,
                    'accounting': accounting,
                    'day': tempDay,
                    'time': tempTime,
                    'imageSize': (d.getLines(), d.getElements()),
                    'centerLocation': (d.getCenterLatitude(), d.getCenterLongitude()),
                    'resolution': (d.getCenterLatitudeResolution(), d.getCenterLongitudeResolution()),
                    'unitList': unitList,
                    'unitType': calUnit,
                    'bands': bandList,
                    'band-count': d.getNumberOfBands(),
                    'calinfo': map(str, list(d.getCalInfo()[0])),
                    'calibration-scale-factor': d.getCalibrationScaleFactor(),
                    'calibration-type': str(d.getCalibrationType()),
                    'calibration-unit-name': d.getCalibrationUnitName(),
                    'center-latitude': d.getCenterLatitude(),
                    'center-latitude-resolution': d.getCenterLatitudeResolution(),
                    'center-longitude': d.getCenterLongitude(),
                    'center-longitude-resolution': d.getCenterLongitudeResolution(),
                    'directory-block': list(d.getDirectoryBlock()),
                    'elements': d.getElements(),
                    'lines': d.getLines(),
                    'memo-field': str(d.getMemoField()),
                    'nominal-time': DateTime(d.getNominalTime()),
                    'sensor-id': d.getSensorID(),
                    'sensor-type': str(d.getSensorType()),
                    'source-type': str(d.getSourceType()),
                    'start-time': DateTime(d.getStartTime()),
                    'url': urls[i],
                }
            temp.append(dt)
    return temp