Esempio n. 1
0
 def init(self, cfg=None):
     """ This method will be called twice, once with a cfg parameter
         and once without """
     if cfg is None:
         HttpServlet.init(self)
     else:
         HttpServlet.init(self, cfg)
     # These are the things we only do once
     if WMS.timer is None:
         WMS.timer = Timer(1) # timer is a daemon
         # Load the Log4j configuration file
         file = self.getInitParameter("log4j-init-file")
         if file is not None:
             prefix = self.getServletContext().getRealPath("/")
             PropertyConfigurator.configure(prefix + file)
         WMS.logger.debug("Initialized logging system")
         # Initialize the cache of datasets
         DatasetCache.init()
         WMS.logger.debug("Initialized DatasetCache")
         # Start a timer that will clear the cache at regular intervals
         # so that NcML aggregations are reloaded
         # TODO: get the interval value from a config file
         intervalInMs = int(60 * 1000) # Runs once a minute
         WMS.cacheWiper = CacheWiper()
         WMS.timer.scheduleAtFixedRate(WMS.cacheWiper, intervalInMs, intervalInMs)
         WMS.logger.debug("Initialized NetcdfDatasetCache refresher")
         WMS.logger.debug("ncWMS Servlet initialized")
                updateTestDataTable += "</table>"
                testDataSection += updateTestDataTable
        else:
            updateTestDataTable = '<table border="1" cellSpacing="0" cellPadding="2">'
            rowId = 0
            for rowMatch in TABLE_ROW_PATTERN.finditer(testDataTable):
                if rowId == 0 or rowId in selectedRows:
                    updateTestDataTable += rowMatch.group(0)
                rowId += 1
            updateTestDataTable += "</table>"
            testDataSection = updateTestDataTable

    return testDataSection


# main
if __name__ == "__main__":
    if len(sys.argv) != 2:
        print "Usage: jython tools/TestProcedureDoc/generateTestCampaignDoc.py <test_campaign_file.xml>"
        print "Output: test_campaign_file-doc.html, in same directory as test_campaign_file.xml"
        print "Precondition: must be run from ATE root directory and ATE.jar must be in CLASSPATH"
        sys.exit(1)
    else:
        campaignFileName = sys.argv[1]
        print "Generation of aggregated test cases documentation for test campaign", campaignFileName
        print
        PropertyConfigurator.configure(StaticConfiguration.CONFIG_DIRECTORY + "/log4j.properties")
        aggregatedDocFileName = generateTestCasesDoc(campaignFileName)
        print
        print "Generated", aggregatedDocFileName, "successfully."
Esempio n. 3
0
# along with Grinder Analyzer; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA

import os

from org.jtmb.grinderAnalyzer import Configuration
from org.jtmb.grinderAnalyzer import MDC
from org.jtmb.velocityMerger import VelocityMerger
from org.apache.log4j import PropertyConfigurator
from java.util import Properties


def _getProperties_():
    # vPlugin.analyzer=org.jtmb.grinderAnalyzer.HTTPTestPlugin
    props = CONFIG.startupProperties
    props.put("vPlugin.analyzer", "org.jtmb.grinderAnalyzer.GAVelocityPlugin")
    return props

# If the user has provided the path to their own analyzer.properties file, use it.
# Otherwise, use the default provided by this project.
if MDC.get('user.analyzer.config') is not None:
	CONFIG_FILE=MDC.get('user.analyzer.config')
else:
	CONFIG_FILE=MDC.get('current.dir') + os.sep + "conf/analyzer.properties"

# Log4j must be configured before the VelocityMerger classes
# are instantiated.
PropertyConfigurator.configure(CONFIG_FILE)
CONFIG=Configuration(CONFIG_FILE)
VORPAL=VelocityMerger(_getProperties_())
Esempio n. 4
0
                testDataSection += updateTestDataTable
        else:
            updateTestDataTable = '<table border="1" cellSpacing="0" cellPadding="2">'
            rowId = 0
            for rowMatch in TABLE_ROW_PATTERN.finditer(testDataTable):
                if rowId == 0 or rowId in selectedRows:
                    updateTestDataTable += rowMatch.group(0)
                rowId += 1
            updateTestDataTable += '</table>'
            testDataSection = updateTestDataTable

    return testDataSection


# main
if __name__ == '__main__':
    if len(sys.argv) != 2:
        print 'Usage: jython tools/TestProcedureDoc/generateTestCampaignDoc.py <test_campaign_file.xml>'
        print 'Output: test_campaign_file-doc.html, in same directory as test_campaign_file.xml'
        print 'Precondition: must be run from ATE root directory and ATE.jar must be in CLASSPATH'
        sys.exit(1)
    else:
        campaignFileName = sys.argv[1]
        print 'Generation of aggregated test cases documentation for test campaign', campaignFileName
        print
        PropertyConfigurator.configure(StaticConfiguration.CONFIG_DIRECTORY +
                                       "/log4j.properties")
        aggregatedDocFileName = generateTestCasesDoc(campaignFileName)
        print
        print 'Generated', aggregatedDocFileName, 'successfully.'
Esempio n. 5
0
#
# Welcome
#

print 'Prudence %s for %s.' % (prudence_version, prudence_flavor)

#
# Logging
#

# log4j: This is our actual logging engine
try:
	from org.apache.log4j import PropertyConfigurator
	System.setProperty('prudence.logs', document.source.basePath.path + '/logs')
	PropertyConfigurator.configure(document.source.basePath.path + '/configuration/logging.conf')
except:
	raise

# JULI: Remove any pre-existing configuration
LogManager.getLogManager().reset()

# JULI: Bridge to SLF4J, which will use log4j as its engine 
try:
	from org.slf4j.bridge import SLF4JBridgeHandler
	SLF4JBridgeHandler.install()
except:
	raise

# Set Restlet to use SLF4J, which will use log4j as its engine
System.setProperty('org.restlet.engine.loggerFacadeClass', 'org.restlet.ext.slf4j.Slf4jLoggerFacade')
def setup_main_logger():
    PropertyConfigurator.configure('log4j.properties')
    return Logger.getLogger('ConfigNOW')
Esempio n. 7
0
@author: Vladimir Kolesnik
'''
import os, sys, datetime

#Local libs
from src.main.jython.CMULogger import log4j

#Java libs
from java.lang import System, Thread
from org.apache.log4j import PropertyConfigurator

currentDate = datetime.datetime.now()
jobName = currentDate.strftime("%Y-%m-%d_%H-%M-%S")

classLoader = Thread.currentThread().getContextClassLoader()
PropertyConfigurator.configure(
    classLoader.getResource("src/main/resources/log4j.properties"))

runLogger = log4j("runLogger")

if len(sys.argv) > 1:
    pathToJSON = sys.argv[1]
    runLogger.infoMessage('main',
                          'Using %s as a configuration file...' % pathToJSON)
else:
    runLogger.infoMessage(
        'main', 'JAR requires at least 1 argument: path to JSON config file.')
    System.exit(1)

from src.main.jython.CMUConfigParser import CMUConfigParser

doCMUExport = CMUConfigParser(pathToJSON).returnObject("CMUOperations",
        run_validators()
    else:
        main_logger.warn('Ignoring validation due to property validation=off')
    
try:
    customLog = rb_config.getProperty('confignow.commandlog')
    # @todo - consider moving off to separate function 
    if customLog is not None and len(customLog) > 0:
        if customLog == 'true':
            customLog = 'ConfigNOW_' + command_name + "_" + datetime.now().strftime("%Y%m%d_%H%M%S.log")
        main_logger.info('Setting custom log file for command: "' + customLog + '"')
        inStream = FileInputStream('log4j.properties')
        logProperties = Properties()
        logProperties.load(inStream)
        logProperties.setProperty('log4j.appender.filelog.File', customLog)
        PropertyConfigurator.configure(logProperties)

    log = setup_command_logger()
	

    main_logger.info('WLST support: ' + bool_str(wlst_support))

    command_file = find_command_file()
    if command_file:
        validate_plugins()
        run_command_plugins('pre')
        if is_jython(command_file):
            main_logger.info('Running command from jython file: ' + command_file)
            # todo: investigate changing this to use call_extension(), done like this to work with wlst
            execfile(command_file)
            if 'run' in dir():
Esempio n. 9
0
def main_queue(config_file, afile, parliament_cache_info):
    """
    
    Entry Point for Queue invocation, processes one file at a time
    
    Serially processes XML/ZIP files from the message queue and 
    uploads to XML repository. Returns True/False to consumer
        True = Remove from queue
        False = Retain in queue for whatever reason
    
    @param config_file  configuration file
    @param afile        path to the serialized file
    @param parliament_cache_info object of type gen_utils.ParliamentCacheInfo
    @return Boolean 
    """
    print "[checkpoint] got file " + afile
    script_path = os.path.dirname(os.path.realpath(__file__))
    PropertyConfigurator.configure(script_path + File.separator + "log4j.properties")
    # comment above lines to run in emotional mode
    cfg = TransformerConfig(config_file)
    # create the output folders
    __setup_output_dirs__(cfg)
    wd_cfg = WebDavConfig(config_file)
    in_queue = False
    transformer = Transformer(cfg)
    input_map = {
        "parliament-info" : param_parl_info(cfg, parliament_cache_info.parl_info),
        "type-mappings" : param_type_mappings()         
        }
    print "INFOINFO : parliament-info ", input_map["parliament-info"]
    transformer.set_params(input_map)
    cfgs = {
        "main_config":cfg, 
        "transformer":transformer, 
        "webdav_config" : wd_cfg
    }
    pxf = ProcessXmlFilesWalker(cfgs)
    """
    Do Unzipping and Transformations
    """
    import fnmatch
    if os.path.isfile(afile):
        """
        Copy afile to temp_files_folder as working-copy(wc_afile)
        """
        temp_dir = cfg.get_temp_files_folder()
        wc_afile = temp_dir + os.path.basename(afile)
        shutil.copyfile(afile, wc_afile)
        print "[checkpoint] copied working-copy to temp folder"
        
        if fnmatch.fnmatch(afile, "*.zip") and os.path.isfile(wc_afile):
            print "[checkpoint] unzipping archive files"
            unzip = GenericDirWalkerUNZIP()
            temp_dir = cfg.get_temp_files_folder()
            unzip.extractor(afile, temp_dir)
            xml_basename = os.path.basename(wc_afile)
            xml_name = os.path.splitext(xml_basename)[0]
            new_afile = temp_dir + xml_name + ".xml"
            if os.path.isfile(new_afile):
                print "[checkpoint] found the unzipped XML file"
                # if there is an XML file inside then we have process its atts
                # descending upon the extracted folder
                bunparse = ParseBungeniXML(new_afile)
                parse_ok = bunparse.doc_parse()
                if parse_ok == False:
                    # Parsing error return to queue
                    return False
                print "[checkpoint] unzipped file parsed"
                sba = SeekBindAttachmentsWalker(cfgs)
                image_node = bunparse.get_image_file()
                votes_node = bunparse.get_votes_file()
                if (image_node is not None):
                    print "[checkpoint] entered user/group doc path"
                    local_dir = os.path.dirname(afile)
                    print "[checkpoint] processing image/log_data file"
                    origi_name = sba.image_seek_rename(bunparse, temp_dir, True)
                elif (votes_node is not None):
                    print "[checkpoint] we have votes"
                    local_dir = os.path.dirname(afile)
                    print "[checkpoint] processing file with roll_call node"
                    origi_name = sba.votes_seek_rename(bunparse, temp_dir, True)
                else:
                    print "[checkpoint] entered attachments doc path"
                    sba.attachments_seek_rename(bunparse)

                print "[checkpoint] transforming the xml with zipped files"
                info_object = pxf.process_file(new_afile)
                # remove unzipped new_afile & wc_afile from temp_files_folder
                os.remove(new_afile)
                os.remove(wc_afile)
                if info_object[1] == True:
                    in_queue = True
                elif info_object[1] == False:
                    in_queue = False
                    return in_queue
                else:
                    print COLOR.WARNING, "No pipeline defined here ", COLOR.ENDC
                    in_queue = False
                    return in_queue
            else:
                print "[checkpoint] extracted " + new_afile + "] but not found :-J"
                in_queue = True
                return in_queue
        elif fnmatch.fnmatch(afile, "*.xml") and os.path.isfile(wc_afile):
            print "[checkpoint] transforming the xml"
            info_object = pxf.process_file(wc_afile)
            # remove wc_afile from temp_files_folder
            os.remove(wc_afile)
            if info_object[1] == True:
                print "[checkpoint] transformed the xml"
                in_queue = True
            elif info_object[1] == False:
                in_queue = False
                return in_queue
            elif info_object[1] == None:
                # mark parl-information document for removal from message-queue
                in_queue = True
                return in_queue
            else:
                print COLOR.WARNING, "No pipeline defined here ", COLOR.ENDC
                in_queue = False
                return in_queue
        else:
            # ignore any other file type, not interested with them currently...
            print "[" + afile + "] ignoring unprocessable filetype"
            in_queue = True
            return in_queue
    else:
        print "[" + afile + "] not found in filesystem"
        in_queue = True
        return in_queue

    """
    Do sync step
    """
    print "[checkpoint] entering sync"
    
    sxw = SyncXmlFilesWalker(cfgs)
    if not os.path.isdir(cfg.get_temp_files_folder()):
        mkdir_p(cfg.get_temp_files_folder())
    sxw.create_sync_file()
    # reaching here means there is a successfull file
    sync_stat_obj = sxw.sync_file(info_object[0])
    sxw.close_sync_file()
    sxw = None

    print "[checkpoint] exiting sync"
    if sync_stat_obj[0] == True and sync_stat_obj[1] == None:
        # ignore upload -remove from queue
        in_queue = True
        return in_queue
    elif sync_stat_obj[0] == True:
        in_queue = True
    else:
        # eXist not responding?!
        # requeue and try later
        in_queue = False
        return in_queue

    """
    Do uploading to eXist
    """
    print COLOR.OKGREEN + "Uploading XML file(s) to eXist via WebDav..." + COLOR.ENDC
    print "[checkpoint] at", time.localtime(time.time())
    # first reset bungeni xmls folder
    webdav_reset_folder(wd_cfg, wd_cfg.get_bungeni_xml_folder())
    #webdaver = WebDavClient(wd_cfg.get_username(), wd_cfg.get_password())
    #webdaver.reset_remote_folder(wd_cfg.get_http_server_port()+wd_cfg.get_bungeni_xml_folder())
    #webdaver.shutdown()
    rsu = RepoSyncUploader({"main_config":cfg, "webdav_config" : wd_cfg})
    print "[checkpoint] uploading XML file"
    if in_queue == True:
        upload_stat = rsu.upload_file(info_object[0])
        rsu = None
    else:
        rsu = None
        in_queue = False
        return in_queue

    print COLOR.OKGREEN + "Uploading ATTACHMENT file(s) to eXist via WebDav..." + COLOR.ENDC
    
    webdav_reset_folder(wd_cfg, wd_cfg.get_bungeni_atts_folder())
    #webdaver = WebDavClient(wd_cfg.get_username(), wd_cfg.get_password())
    #webdaver.reset_remote_folder(wd_cfg.get_http_server_port()+wd_cfg.get_bungeni_atts_folder())
    #webdaver.shutdown()
    
    # upload attachments at this juncture
    pafw = ProcessedAttsFilesWalker({"main_config":cfg, "webdav_config" : wd_cfg})
    info_obj = pafw.process_atts(cfg.get_attachments_output_folder())
    
    if info_obj == True:
        pafw = None
        in_queue = True
    else:
        pafw = None
        return False
    print COLOR.OKGREEN + "Completed upload to eXist!" + COLOR.ENDC
    # do post-transform
    """
    !+FIX_THIS (ao,8th Aug 2012) PostTransform degenerates and becomes and expensive process 
    over-time temporarily disabled.
    """
    pt = PostTransform({"webdav_config": wd_cfg})
    print "Initiating PostTransform request on eXist-db for URI =>", sync_stat_obj[1]
    info_object = pt.update(str(sync_stat_obj[1]))
    
    if info_object == True:
        in_queue = True
    else:
        in_queue = False
    return in_queue
Esempio n. 10
0
class Command:
    PropertyConfigurator.configure(
        "/opt/zimbra/conf/zmconfigd.log4j.properties")
    P = Provisioning.getInstance(Provisioning.CacheMode.OFF)

    @classmethod
    def resetProvisioning(cls, type):
        if type == "local":
            LC.reload()
        else:
            try:
                cls.P.flushCache(Provisioning.CacheEntryType.fromString(type),
                                 None)
            except:
                pass  # mailboxd is down, or not running here, either way we don't care.

    def __init__(self,
                 desc,
                 name,
                 cmd=None,
                 func=None,
                 args=None,
                 base="/opt/zimbra"):
        self.desc = desc
        self.name = name
        self.cmd = None
        if cmd:
            self.cmd = '/'.join((base, cmd))
        self.func = func
        self.args = args
        self.resetState()

    def __str__(self):
        if self.cmd:
            return "%s %s %s %s" % (self.name, self.cmd, self.status,
                                    self.error)
        else:
            return "%s %s(%s) %s %s" % (self.name, self.func, self.args,
                                        self.status, self.error)

    def resetState(self):
        self.status = None
        self.output = None
        self.error = None

    def execute(self, a=None):
        Log.logMsg(5, "Executing: %s" % (str(self), ))
        self.resetState
        self.lastChecked = time.clock()

        output = error = ""
        t1 = time.clock()
        st = ""
        if self.cmd:
            cm = self.cmd
            st = cm
            (rc, output, error) = self.runCmd(a)
            if a:
                st = cm % a
        else:
            cm = self.func
            st = cm
            (rc, output, error) = cm(self.args, a)

        dt = time.clock() - t1
        self.status = rc
        if (rc < 0):
            self.error = "UNKNOWN: %s died with signal %s " % (self.name, rc)
            Log.logMsg(2, self.error)
            raise Exception, self.error
        else:
            if (not output):
                output = "UNKNOWN OUTPUT"
            if (not error):
                if rc == 0:
                    error = "OK"
                else:
                    error = "UNKNOWN ERROR"

            self.output = output
            self.error = error
            if rc:
                Log.logMsg(
                    4,
                    "Executed: %s returned %d (%d - %d) (%.2f sec): output='%s'"
                    % (st, rc, len(output), len(error), dt, output))
            else:
                Log.logMsg(
                    4, "Executed: %s returned %d (%d - %d) (%.2f sec)" %
                    (st, rc, len(output), len(error), dt))

        return rc

    def runCmd(self, a=None):
        if (a):
            cmd = self.cmd % a
        else:
            cmd = self.cmd
        args = shlex.split(cmd)
        Log.logMsg(4, "Executing %s" % (cmd, ))
        p = subprocess.Popen(args,
                             stdout=subprocess.PIPE,
                             stdin=subprocess.PIPE,
                             stderr=subprocess.PIPE)

        rc = output = error = None
        while rc is None:
            (output, error) = p.communicate()
            rc = p.wait()

        Log.logMsg(
            4,
            "runCmd: cmd='%s', args='%s', rc='%s', output='%s', error='%s'" %
            (cmd, args, rc, output, error))
        return (rc, output, error)

    def runFunc(self, a=None):
        if (a):
            return self.func(a)
        else:
            return self.func()
Esempio n. 11
0
if __name__ == "__main_test__":
    l = get_legislature_info("/opt/bungeni/bungeni_apps/config/glue.ini")
    p = get_parl_info("/opt/bungeni/bungeni_apps/config/glue.ini")
    print p


if __name__ == "__main__":
    """
    Five command line parameters are supported
    
      --config=config_file_name - specifies the config file name
      --po2xml - translates po files to xml for i18n in eXisd-db
      --transform - runs a transform
      --synchronize - synchronizes with a xml db
      --upload - uploades to a xml db
    """
    script_path = os.path.dirname(os.path.realpath(__file__))
    if (len(sys.argv) > 1):
        #from org.apache.log4j import PropertyConfigurator
        PropertyConfigurator.configure(script_path + File.separator + "log4j.properties")
        # process input command line options
        options, remainder = getopt.getopt(sys.argv[1:], 
          "c:ptsu",
          ["config=", "po2xml","transform","synchronize","upload"]
        )
        # call main
        main(options)
    else:
        print COLOR.FAIL , " config.ini file must be an input parameter " , COLOR.ENDC
Esempio n. 12
0
def setup_main_logger():
    PropertyConfigurator.configure('log4j.properties')
    return Logger.getLogger('ConfigNOW')
Esempio n. 13
0
        main_logger.warn('Ignoring validation due to property validation=off')

try:
    customLog = rb_config.getProperty('confignow.commandlog')
    # @todo - consider moving off to separate function
    if customLog is not None and len(customLog) > 0:
        if customLog == 'true':
            customLog = 'ConfigNOW_' + command_name + "_" + datetime.now(
            ).strftime("%Y%m%d_%H%M%S.log")
        main_logger.info('Setting custom log file for command: "' + customLog +
                         '"')
        inStream = FileInputStream('log4j.properties')
        logProperties = Properties()
        logProperties.load(inStream)
        logProperties.setProperty('log4j.appender.filelog.File', customLog)
        PropertyConfigurator.configure(logProperties)

    log = setup_command_logger()

    main_logger.info('WLST support: ' + bool_str(wlst_support))

    command_file = find_command_file()
    if command_file:
        validate_plugins()
        run_command_plugins('pre')
        if is_jython(command_file):
            main_logger.info('Running command from jython file: ' +
                             command_file)
            # todo: investigate changing this to use call_extension(), done like this to work with wlst
            execfile(command_file)
            if 'run' in dir():
Esempio n. 14
0
#
# Welcome
#

print 'Prudence %s for %s.' % (prudence_version, prudence_flavor)

#
# Logging
#

# log4j: This is our actual logging engine
try:
    from org.apache.log4j import PropertyConfigurator
    System.setProperty('prudence.logs',
                       document.source.basePath.path + '/logs')
    PropertyConfigurator.configure(document.source.basePath.path +
                                   '/configuration/logging.conf')
except:
    raise

# JULI: Remove any pre-existing configuration
LogManager.getLogManager().reset()

# JULI: Bridge to SLF4J, which will use log4j as its engine
try:
    from org.slf4j.bridge import SLF4JBridgeHandler
    SLF4JBridgeHandler.install()
except:
    raise

# Set Restlet to use SLF4J, which will use log4j as its engine
System.setProperty('org.restlet.engine.loggerFacadeClass',
Esempio n. 15
0
        safe_base = random.choice(self.all_bases.values())
        for base in self.all_bases.values():
            if plane.position().squareDistanceTo(base.position()) < safe_base.position().squareDistanceTo(plane.position()):
                safe_base = base
        return safe_base

    def save_planes(self):
        for plane in self.my_planes.values():
            print(plane)
            print(plane.fuelInTank() < 0.5 * plane.type.tankCapacity)
            if plane.fuelInTank() < 0.5 * plane.type.tankCapacity:
                safe_base = self.nearest_base(plane)
                print(safe_base)
                self.game.sendCommand(MoveCommand(plane,safe_base.position()))
                self.game.sendCommand(LandCommand(plane,safe_base))


if __name__ == "__main__":
    # Usage
    if len(sys.argv) != 3:
        print "Usage : ./AI.py ip port"
        sys.exit()

    from org.apache.log4j import PropertyConfigurator

    PropertyConfigurator.configure('log4j.properties')
    print 'Hello'
    ai = AviationAI(sys.argv[1], int(sys.argv[2]))
    print 'Toto'
    ai.think()