def main(options, args): # Create top level logger. logger = ssdlog.make_logger('sk_skcompile', options) if options.skdir: skdir = options.skdir else: try: skdir = os.path.join(os.environ['PYHOME'], 'SOSS', 'SkPara', 'sk') except KeyError: print("Please set your PYHOME environment variable") print("or specify the --skdir option") sys.exit(1) sk_bank = sk_interp.skBank(skdir, logger=logger) compiler = SkCompiler(sk_bank, logger, append_mode=options.appendmode) obe_id = args[0].upper() obe_mode = args[1].upper() for cmd in args[2:]: print(compiler.compile_skeleton(obe_id, obe_mode, cmd)) compiler.append_mode = True return 0
def main(options, args): # Create top level logger. logger = ssdlog.make_logger('names', options) try: myhost = ro.get_myhost() except Exception, e: raise nameServiceError("Can't get my own hostname: %s" % str(e))
def main(options, args, ui): myhost = ro.get_myhost(short=False) basename = 'g2disp-%s' % (myhost.replace('.', '_')) logger = ssdlog.make_logger(basename, options) # Make our callback object mobj = g2Disp(logger=logger, basename=basename) ui.ui(mobj)
def main(options, args): # Create top level logger. logger = ssdlog.make_logger(options.svcname, options) # Initialize remote objects subsystem. try: ro.init() except ro.remoteObjectError, e: logger.error("Error initializing remote objects subsystem: %s" % str(e)) sys.exit(1)
def main(options, args): # Create top level logger. logger = ssdlog.make_logger("mgrsvc", options) # Get the names of the nodes in this cluster and remove our name. The # result is the list of hosts running name servers that we need to # synchronize with. try: myhost = ro.get_myhost(short=True) except Exception, e: raise managerSvcError("Can't get my own hostname: %s" % str(e))
def main(options, args): hostname = args[0] logdir = os.path.join(paths.home, '.ana_logs') if not os.path.isdir(logdir): os.mkdir(logdir) if not options.logstderr: options.logfile = os.path.join(logdir, 'anamenu_{}.log'.format(hostname)) logger = ssdlog.make_logger(hostname, options) rohost = options.rohost def SigHandler(signum, frame): """Signal handler for all unexpected conditions.""" logger.debug('signal handling. %s' % str(signum)) #update_menu_num(menu_file, num_menu-1, logger=logger) #ana.quit('quit') # Set signal handler for signals. Add any other signals you want to # handle or terminate here. for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGHUP]: signal.signal(sig, SigHandler) app = Widgets.Application(logger=logger) root = app.make_window(title='ANA Menu') try: ana = AnaMenu(root, logger, rohost, hostname) ana.setup_ui() root.show() ana.set_geometry(options.geometry) app.mainloop() except KeyboardInterrupt as e: print('interrupted by keyboard....') logger.debug('Keyboard Interrupt...') ana.quit('quit') app.quit() except Exception as e: logger.error('error: starting anamenu. %s' % e) #sys.exit(1) raise
def main(options, args): logname = 'cleanup_fits' logger = ssdlog.make_logger(logname, options) try: if options.daemon: while True: check_usage(options, args, logger) logger.debug("Sleeping for %3.2f secs" % options.interval) time.sleep(options.interval) else: check_usage(options, args, logger) except KeyboardInterrupt: logger.error("Caught keyboard interrupt!") logger.info("Cleanup terminating.")
def main(options, args): # Create top level logger. logger = ssdlog.make_logger('sk_decode', options) if options.cmdstr: sk_bank = skBank(options.skdir, logger=logger) decode_abscmd(options.cmdstr, options.envstr, sk_bank, logger) sys.exit(0) if len(args) > 0: for filename in args: try: with open(filename, 'r') as in_f: buf = in_f.read() except IOError as e: print("Error opening file '%s'" % filename) sys.exit(1) interp(options, filename, buf) else: buf = sys.stdin.read() interp(options, '<stdin>', buf)
def decode_abscmd(cmdstr, envstr, sk_bank, logger): import remoteObjects as ro ro.init() fakeStatus = { 'STATL.TSC_F_SELECT': 'CS_IR', } # Create top level logger. logger = ssdlog.make_logger('sk_decode', options) sk_bank = skBank(options.skdir, logger=logger) variable_resolver = VariableResolver({}) register_resolver = RegisterResolver() #status_resolver = MockStatusResolver(fakeStatus) status_resolver = StatusResolver(ro.remoteObjectProxy('status')) frame_source = MockFrameSource() eval = Evaluator(variable_resolver, register_resolver, status_resolver, frame_source, logger) # Parse environment string into an AST, raising parse error if # necessary envstr = envstr.strip() if len(envstr) > 0: res = sk_bank.param_parser.parse_params(envstr) if res[0]: raise DecodeError("Error parsing default parameters '%s': %s" % (envstr, res[2])) try: ast_global_params = res[1] assert ast_global_params.tag == 'param_list', ASTerr( ast_global_params) except AssertionError as e: raise DecodeError("Malformed default parameter list '%s': AST=%s" % (envstr, str(ast_global_params))) else: ast_global_params = None # Set global env, if any if ast_global_params: eval.set_params(ast_global_params) # Parse command string into an AST, raising parse error if # necessary cmdstr = cmdstr.strip() res = sk_bank.ope_parser.parse_opecmd(cmdstr) if res[0]: raise DecodeError("Error parsing command '%s': %s" % (cmdstr, res[2])) ast = res[1] assert ast.tag == 'cmdlist', ASTerr(ast) ast = ast.items[0] assert ast.tag == 'abscmd', ASTerr(ast) assert len(ast.items) == 2, ASTerr(ast) (ast_cmd_exp, ast_params) = ast.items # Make a *SUB ast and decode it ast = ASTNode('star_sub', ast_cmd_exp, ast_params) decoder = Decoder(eval, sk_bank, logger) newast = decoder.decode(ast, eval) print(newast.AST2str()) if options.verbose: print(newast.printAST()) return 0
def main(options, args): basename = options.svcname logger = ssdlog.make_logger(basename, options) # Initialize remote objects subsystem args = ['localhost'] if options.rohosts is not None: args = options.rohosts.split(',') try: ro.init(args) except ro.remoteObjectError as e: logger.error("Error initializing remote objects subsystem: %s" % \ str(e)) sys.exit(1) ev_quit = threading.Event() # Create a local pub sub instance monname = '%s.mon' % basename minimon = Monitor.Monitor(monname, logger, numthreads=options.numthreads) threadPool = minimon.get_threadPool() queue = Queue.Queue() channels = options.channels.split(',') # Make our callback object/remote object if options.soundsink: mobj = SoundSink(monitor=minimon, logger=logger, queue=queue, channels=channels, ev_quit=ev_quit, dst=options.destination) else: mobj = SoundSource(monitor=minimon, logger=logger, queue=queue, channels=channels, ev_quit=ev_quit, compress=options.compress) svc = ro.remoteObjectServer(svcname=basename, obj=mobj, logger=logger, port=options.port, ev_quit=ev_quit, usethread=True, threadPool=threadPool) mon_server_started = False ro_server_started = False try: # Startup monitor threadpool minimon.start(wait=True) minimon.start_server(wait=True, port=options.monport) mon_server_started = True # Configure logger for logging via our monitor # if options.logmon: # minimon.logmon(logger, options.logmon, ['logs']) if options.soundsink: # Subscribe our callback functions to the local monitor minimon.subscribe_cb(mobj.anon_arr, channels) minimon.subscribe_remote(options.monitor, channels, {}) else: # publish our channels to the specified monitor minimon.publish_to(options.monitor, channels, {}) svc.ro_start(wait=True) ro_server_started = True try: mobj.server_loop() except KeyboardInterrupt: logger.error("Received keyboard interrupt!") finally: ev_quit.set() if mon_server_started: minimon.stop_server(wait=True) if ro_server_started: svc.ro_stop(wait=True) minimon.stop(wait=True) logger.info("%s exiting..." % basename)
optparser.add_option("--outdir", dest="outdir", default=None, help="Specify output directory for results") optparser.add_option("--test", dest="test", default='parse', help="Specify scan|parse|decode") ssdlog.addlogopts(optparser) (options, args) = optparser.parse_args(sys.argv[1:]) #unittest.main() # Create top level logger. logger = ssdlog.make_logger('testlogger', options) if options.test == 'scan': print("TEST IS SCANNING") test = TestSkLexer(logger=logger, skbase=options.skbase) elif options.test == 'parse': print("TEST IS SCANNING/PARSING") test = TestSkParser(logger=logger, skbase=options.skbase) elif options.test == 'decode': print("TEST IS SCANNING/PARSING/DECODING") test = TestSkDecoder(logger=logger, skbase=options.skbase) else: print("Unknown test: '%s'" % options.test) sys.exit(1) test.setUp()
def server(options, config): # Create top level logger. logger = ssdlog.make_logger('datasink', options) key = config.get('key', None) if key is None: self.logger.error("Configuration file contains no 'key' directive") sys.exit(0) datadir = config.get('datadir', None) if datadir is None: datadir = os.getcwd() logger.warning("Storing files in {}".format(datadir)) logger.info("To change this, add 'datadir' directive to config") else: logger.info("Storing files in {}".format(datadir)) # if this is set, file will be moved here after transfer movedir = config.get('movedir', None) unpack_tarfiles = config.get('unpack_tarfiles', False) # if this is set, only instruments matching this instrument # will be transferred insfilter = config.get('insfilter', None) # this datasink's name name = key.split('-')[0] queue_names = [name] config['queue_names'] = queue_names # takes care of transfers into datadir xfer = transfer.Transfer(logger, datadir, storeby=config.get('storeby', None), md5check=config.get('md5check', False)) def xfer_file(work_unit, fn_ack): job = work_unit['job'] info, res = {}, {} if insfilter is not None: if job['insname'] not in insfilter: # ACK allows another job to be released to us fn_ack(True, '', {}) return # get particulars of transfer method if 'host' not in job: job['host'] = config['transfer_host'] if 'transfermethod' not in job: job['transfermethod'] = config['transfer_method'] if 'username' not in job: job['username'] = config['transfer_username'] job['direction'] = config.get('transfer_direction', 'from') xfer.transfer(job, info, res) # ACK allows another job to be released to us fn_ack(True, '', {}) # After the transfer, dictionary `res` should contain a result code. if 'xfer_code' not in res: logger.error("No result code after transfer: %s" % (str(res))) return if res['xfer_code'] == 0: dst_path = res['dst_path'] dst_dir, filename = os.path.split(dst_path) file_pfx, file_ext = os.path.splitext(filename) file_ext = file_ext.lower() try: if (unpack_tarfiles and file_ext in ['.tar', '.tgz', '.tar.gz']): if movedir is not None: extract_dir = movedir else: extract_dir = dst_dir # unpack tar file with tarfile.open(dst_path, 'r') as tar_f: tar_f.extractall(path=extract_dir) # & remove tarball os.remove(dst_path) else: if movedir is not None: move_path = os.path.join(movedir, filename) shutil.move(res['dst_path'], move_path) logger.info("unpack/move completed") except Exception as e: logger.error( "Error unpacking/moving file after transfer: {}".format(e), exc_info=True) ev_quit = threading.Event() jobsink = worker.JobSink(logger, name) jobsink.config = config jobsink.add_action('transfer', xfer_file) jobsink.start_workers(ev_quit) jobsink.serve(ev_quit) logger.info("Exiting program.") sys.exit(0)