def main(): args, cmd, capturer = arg.parse_args() log.configure_logging(args.output_directory, args.log_to_stderr) log.log_header() javac_commands, jars = capturer.gen_instance(cmd).capture() log.info('Results: %s', pprint.pformat(javac_commands)) tools.run(args, javac_commands, jars)
def run(manifest, config, logging_level=logging.INFO): log.configure_logging(logging_level) resource_name = manifest['name'] xml_dir = config['working_dir'] zip_dir = config['source_dir'] state_file = config['state_file'] state.update( unzip( resource_name, new_zip_files(), zip_dir, xml_dir), state_file)
def main(): args, cmd, imported_module = arg.parse_args() log.configure_logging(args.output_directory, args.incremental, args.log_to_stderr) log_header() results = imported_module.gen_instance(cmd).capture() logging.info('Results: %s', pprint.pformat(results)) options = {'print' : print_tool, 'randoop' : randoop_tool, 'graphtool' : graph_tool, } if args.tool: options[args.tool](results,args)
def main(): args, cmd, imported_module = arg.parse_args() log.configure_logging(args.output_directory, args.incremental, args.log_to_stderr) log_header() javac_commands, jars = imported_module.gen_instance(cmd).capture() logging.info('Results: %s', pprint.pformat(javac_commands)) options = {'soot' : soot_tool, 'checker' : checker_tool, 'inference' : inference_tool, 'print' : print_tool, 'randoop' : randoop_tool, 'graphtool' : graph_tool, } if args.tool: options[args.tool](javac_commands,jars,args)
def main(): args, cmd, capturer = arg.parse_args() log.configure_logging(args.output_directory, args.log_to_stderr) log.log_header() result = cache.retrieve(cmd, args, capturer) if not result: print "DLJC: Build command failed." sys.exit(1) javac_commands, jars, stats = result log.info('Results: %s', pprint.pformat(javac_commands)) output_json(os.path.join(args.output_directory, 'javac.json'), javac_commands) output_json(os.path.join(args.output_directory, 'jars.json'), jars) output_json(os.path.join(args.output_directory, 'stats.json'), stats) tools.run(args, javac_commands, jars)
def main(): args, cmd, imported_module = arg.parse_args() log.configure_logging(args.output_directory, args.incremental, args.log_to_stderr) log_header() javac_commands, jars = imported_module.gen_instance(cmd).capture() logging.info('Results: %s', pprint.pformat(javac_commands)) options = { 'soot': soot_tool, 'checker': checker_tool, 'inference': inference_tool, 'print': print_tool, 'randoop': randoop_tool, 'graphtool': graph_tool, } if args.tool: options[args.tool](javac_commands, jars, args)
logged_samples, normalized_logged_samples = matrix_processor.apply_options_to_general_feature_matrix( options, gen_samples, mean, std ) logger.info("Saving samples and positions...") numpy.savetxt(options.output_folder_path + "/normalized_samples.csv", normalized_logged_samples, delimiter=",") # numpy.savetxt(options.output_folder_path + "/samples.csv", logged_samples, delimiter=",") # visualize feature matrix using pca plots.pca_and_plot( options.output_folder_path + "/normalized_samples_pca.png", normalized_logged_samples, show_plot=False ) # ####################################################################################### # # Main # ####################################################################################### logger = None if __name__ == "__main__": config = config.get_config(sys.argv) log.configure_logging(config["log_file_path"]) logger = log.get_logger("feature_extraction") logger.debug("*** Starting ***") main(config) logger.debug("*** Finished ***")
import log from config.config_parser import from_json from job.rds_job_runner import run_rds_job from job.s3_job_runner import run_s3_job from job.sql_job_runner import run_sql_job logger = log.configure_logging('root') def run(): job_configs = from_json("configs/etl_config.json") logger.info("Running RDS to Redshift Jobs") for job_config in job_configs["rds_jobs"]: run_rds_job(job_config) logger.info("Running S3 to Redshift Jobs") for job_config in job_configs["s3_jobs"]: run_s3_job(job_config) logger.info("Running SQL Jobs") for job_config in job_configs["destination_sql_jobs"]: run_sql_job(job_config) logger.info( "################################### JOB COMPLETE ###################################" ) run()
import sys import six text_factory = str import os import time import json from files import Files from models import File, Symbol, Definitions, Ref, init_db, create_session from tags.base import find_tags import logging from log import configure_logging configure_logging() logger = logging.getLogger('fordeploy') class Genxref(object): def __init__(self, project_name, project_path, start_path='/'): self.files = Files(project_path) self.filestype = {} self.project_name = project_name self.project_path = project_path self.commit_cnt = 0 self.MAX_COMMIT = 1000 self.start_path = start_path
import gobject import logging import sys from testers import Unwrapped, Wrapped from factory import DBusFactory from wrappers import ConfigDefaultsWrapper import log logger = None BUS_NAME = "org.ovirt.node" DBusGMainLoop(set_as_default=True) if __name__ == "__main__": log.configure_logging(True) if '--debug' in sys.argv else \ log.configure_logging(False) logger = logging.getLogger(__name__) if "-d" in sys.argv: DBusGMainLoop(set_as_default=True) loop = gobject.MainLoop() logger.info("listening ...") if "--test" in sys.argv: c = ConfigDefaultsWrapper(Wrapped) d = DBusFactory(BUS_NAME, c, instance=c.instance) d.service_factory() p = DBusFactory(BUS_NAME, Unwrapped) p.service_factory() else: try: from ovirt.node.config import defaults except ImportError as e:
def main(): args = parse_args() level = args.verbose and logging.DEBUG or logging.INFO config = load_config() video_buffer.operator = config.get('operator', 'add') video_buffer.add_effect('clear', fx.Clear, enabled=config.get('clear', True)) video_buffer.add_effect('background', fx.BackGround, color=[0, 0, 0], enabled=config.get('background', False)) video_buffer.add_effect('fade', fx.FadeBackGround, enabled=config.get('fade', False)) # video_buffer.add_effect('midi_note_spark_1', fx.MidiNoteSpark, nrange=(300,420), enabled=config.get('midi_note_spark_1', False)) # video_buffer.add_effect('midi_note_spark_2', fx.MidiNoteSpark, nrange=(0,150), enabled=config.get('midi_note_spark_2', False)) # video_buffer.add_effect('midi_note_spark_3', fx.MidiNoteSpark, nrange=(150,300), enabled=config.get('midi_note_spark_3', False)) # video_buffer.add_effect('strobe', fx.Strobe, enabled=config.get('strobe', False)) video_buffer.add_effect('noise', fx.Noise, enabled=config.get('noise', False)) video_buffer.add_effect('wave', fx.Wave, enabled=config.get('wave', False)) video_buffer.add_effect('creamsicle', fx.Creamsicle, enabled=config.get('creamsicle', False)) # note_ranges = ((260,320), (300,340), (340,380), (380,420), (0,40),(40,80),(80,120),(120,160),) # note_ranges = ((340, 380), (340,420), (340,380), (380,420), (0,40),(40,80),(80,120),(120,160),) # note_ranges = ((0,0), (300,420),) # for i, note_range in enumerate(note_ranges): # name = 'midi_note'+str(i) # video_buffer.add_effect(name, fx.MidiNote, nrange=note_range, enabled=config.get(name, False)) video_buffer.add_effect('midi_note', fx.MidiNote, nrange=(.5,0.8), enabled=config.get('midi_note', False)) # video_buffer.add_effect('pointX', fx.PointFx, nrange=(360,420), enabled=True) # video_buffer.add_effect('pointY', fx.PointFx, nrange=(360,420), enabled=True) # video_buffer.add_effect('pointZ', fx.PointFx, nrange=(360,420), enabled=True) video_buffer.add_effect('scanner', fx.LarsonScanner, enabled=config.get('scanner', True), scanners=[ {'p1': .9,'p2': .98, 'width': .025, 'color': (.8,.1,0.05)}, {'p1': .6,'p2': .65, 'width': .015, 'color': (.1,.8,.05)}, {'p1': .3,'p2': .4, 'width': .015, 'color': (.04,.2,1)}, {'p1': .02,'p2': .1, 'width': .015, 'color': (1,.8,0)}, ]) # video_buffer.add_effect('peak_meter', fx.PeakMeter, enabled=config.get('peak_meter', False), meters=( # {'n1': 60, 'n2': 120, 'reverse': True, 'color': (1,.5,0)}, # {'n1': 120, 'n2': 160, 'reverse': False, 'color': (1,.5,0)}, # {'n1': 160, 'n2': 214, 'reverse': True, 'color': (1,.5,0)}, # {'n1': 214, 'n2': 260, 'reverse': False, 'color': (1,.5,0)}, # {'n1': 260, 'n2': 332, 'reverse': True, 'color': (1,.5,0)}, # {'n1': 332, 'n2': 380, 'reverse': False, 'color': (1,.5,0)}, # {'n1': 320, 'n2': 420, 'reverse': True, 'color': (1,.5,0)}, # {'n1': 0, 'n2': 100, 'reverse': False, 'color': (1,.5,0)}, # )) # video_buffer.add_effect('convolution', fx.Convolution, enabled=config.get('convolution', False)) video_buffer.add_effect('yb&rgp', fx.YellowBlackAndRedGreenPurple, enabled=config.get('yb&rgp', False)) video_buffer.add_effect('matrix', fx.Matrix, enabled=config.get('matrix', False)) video_buffer.add_effect('camera_rot', fx.CameraRot, enabled=config.get('camera_rot', False)) def toggle_fx(addr, state): logging.info(f"toggling : {addr} : {state}") x,y = map(lambda x: int(x)-1, addr.split('/')[2:]) i = x + 7*y fx = list(video_buffer.effects.values())[i] logging.info(fx) fx.toggle() osc_maps = [ ('/metronome', video_buffer.effects['scanner'].metronome), # ('/audio/envelope', video_buffer.effects['peak_meter'].envelope), ('/midi/note', video_buffer.effects['midi_note'].set), ('/q', video_buffer.effects['fade'].set), # /fade or /fader cause bugs in touchosc, awesome ('/color/r', functools.partial(video_buffer.effects['background'].set, color='r')), ('/color/g', functools.partial(video_buffer.effects['background'].set, color='g')), ('/color/b', functools.partial(video_buffer.effects['background'].set, color='b')), ('/brightness', video_buffer.set_brightness), ('/gamma', video_buffer.set_gamma), ('/operator/*', video_buffer.set_operator, True), ('/fx/*', toggle_fx), ('/*', osc_logger) ] console_coros = () if args.no_console: log.configure_logging(level=level) import uvloop asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) else: osc_maps.append(('/*', console.osc_recv)) log.configure_logging(level=level, queue_handler=True) console_coros = console.init(video_buffer) osc_server = OSCServer( maps = osc_maps, server_address = (args.ip, args.port) ) serial_comms.init(video_buffer) coros = ( osc_server.serve(), idle(), *console_coros, websocket_server.serve(video_buffer) # input_audio_stream(functools.partial(callback_video_buffer, video_buffer=video_buffer)) ) try: loop = asyncio.get_event_loop() loop.set_exception_handler(exception_handler) loop.run_until_complete(main_loop(coros)) except (KeyboardInterrupt, CancelledError, UserQuit) as e: print("have a great day") except MainLoopError as e: print("whooops") finally: loop.close() save_config()
os.open(redirect, os.O_RDWR) # standard input # Duplicate standard os.dup2(0, 1) # standard output (1) os.dup2(0, 2) # standard error (2) return os.getpid() def is_running(process): s = subprocess.Popen(["ps", "axw"], stdout=subprocess.PIPE) for x in s.stdout: if re.search(process, x): return True return False if __name__ == "__main__": import log log.configure_logging('process_test') COMMAND = """ifconfig {}""".format("-a -v") def show_address(line): if "inet " in line: addr = line.strip().split(' ')[1] print("### ADDRESS FOUND : {} ###".format(addr)) result = execute(COMMAND, line_function=show_address) #print(result)
masks = np.array(masks) # logical or to combine into one. if a pixel is greater than the threshold on any of the wavebands files then it # it will be allowed final_mask = masks[0] #np.logical_or(masks[0], masks[1]) #final_mask = np.logical_or(final_mask, masks[2]) if len(masks) > 1: print "error {0}".format(len(masks)) np.savetxt(output_folder_path + "/sigma{0}_positions_mask.txt".format( int(sigma_multiplier)), final_mask, delimiter=",", fmt="%i") # ####################################################################################### # # Main # ####################################################################################### logger = None if __name__ == "__main__": config = config.get_config(sys.argv) log.configure_logging(config['log_file_path']) logger = log.get_logger("object_detection_masks") logger.debug("*** Starting ***") main(config) logger.debug("*** Finished ***")
from cli import parse_args from log import configure_logging from log import get_logger logger = get_logger(__name__) def _main(args): logger.info("Parsing file %s", args.file) parser = args.parser(args.file) parser.parse() output = args.output_format(parser.specification) print(output) if __name__ == "__main__": parsed_args = parse_args() configure_logging(parsed_args.verbose) logger.debug("Arguments: %s", vars(parsed_args)) _main(parsed_args)
# !/usr/bin/env python3 # -*- coding: utf-8 -*- from flask_script import Manager, Server from log import configure_logging from app.app import create_app from register import db configure_logging() app = create_app('default') manager = Manager(app) manager.add_command("runserver", Server(host="0.0.0.0", port=8000)) @app.before_request def connect_db(): if db.is_closed(): db.connect() @app.teardown_request def close_db(exc): if not db.is_closed(): db.close() @app.route('/') @app.route('/index') def index(): return 'It works!'
parser = argparse.ArgumentParser() parser.add_argument("--train", help="path to training ratings file (to fit)") parser.add_argument("--requests", help="path to the input requests (to predict)") parser.add_argument('--silent', action='store_true', help="deactivate debug output") parser.add_argument("outputfile", nargs=1, help="output file (where predictions are stored)") args = parser.parse_args() if args.silent: configure_logging('INFO') else: configure_logging('DEBUG') logger = logging.getLogger('reco-cs') path_train_ = args.train if args.train else "data/training.csv" logger.debug("using training ratings from {}".format(path_train_)) path_requests_ = args.requests if args.requests else "data/requests.csv" logger.debug("using requests from {}".format(path_requests_)) logger.debug("using output as {}".format(args.outputfile[0])) # Reading REQUEST SET from input file into pandas request_data = pd.read_csv(path_requests_) # request_data = spark.read.csv(path_requests_, header=True)
def init(sysPath): global decLib, decodeProc, osLib, mInference, isInit, encLib, setRgbData, encodeProc, sockLib, sockInit, sockAppend, sockAppendMeta sys.path.append('/home/Streaming_Server_new/yolo2') sys.path.append('/home/Streaming_Server_new') logger = logging.getLogger(__name__) configure_logging('test.log', 'CRITICAL') # lock acquire for init yolo model on specific gpu for item in sysPath.split(","): sys.path.insert(0, item) file_path = sys.path[0] + '/foo.lock' lock_path = file_path + '.lock' lock = FileLock(lock_path, timeout=-1) with lock: open(file_path, 'a') lock.acquire() try: import yolov2 as infer finally: lock.release() # init C++ libraries for image processing prop = configparser.RawConfigParser() prop.read(sys.path[0]+'/SparkConfig.properties') brokerList = prop.get('KafkaConfig', 'metadata.broker.list').split(",") hAddr = prop.get('HbaseConfig', 'hbase.address') hTableName = prop.get('HbaseConfig', 'hbase.table') osLib = ctypes.cdll.LoadLibrary('libc.so.6') decLib = ctypes.cdll.LoadLibrary(prop.get('CdllConfig', 'decoder.path')) decLib.init() decodeProc = decLib.decodeProc decodeProc.argtypes = [ctypes.POINTER(ctypes.c_uint8), ctypes.c_int] decodeProc.restype = ctypes.POINTER(ctypes.c_uint8) sockLib = ctypes.cdll.LoadLibrary( prop.get('CdllConfig', 'sockclient.path')) sockInit = sockLib.init sockInit.argtypes = [ctypes.c_char_p, ctypes.c_int] sockAppend = sockLib.append sockAppend.argtypes = [ctypes.POINTER(ctypes.c_uint8), ctypes.c_int] sockAppendMeta = sockLib.appendMeta sockAppendMeta.argtypes = [ctypes.POINTER( ctypes.c_char), ctypes.c_uint8, ctypes.c_int, ctypes.c_int, ctypes.c_ulonglong, ctypes.c_uint, ctypes.c_int, ctypes.POINTER(ctypes.c_char)] sockAppendInfo = sockLib.appendInfo sockAppendInfo.argtypes = [ctypes.POINTER( ctypes.c_char), ctypes.c_float, ctypes.c_float, ctypes.c_float, ctypes.c_float] sockSend = sockLib.sockSend sockClose = sockLib.sockClose # get yolo v2 instance mInference = infer.YOLOV2_XIILAB() isInit = True
from bottle import template, redirect, request from oauth2client.contrib.multistore_file import get_credential_storage from beaker.middleware import SessionMiddleware from google_api import flow, scope from apiclient.discovery import build from database import Document, sql_alchemy_plugin, User, create_db import httplib2 import bottle import arrow import logging import funcy import log as log_module log_module.configure_logging() log = logging.getLogger("server") app = bottle.Bottle() app.install(sql_alchemy_plugin) # Configure the SessionMiddleware session_opts = { 'session.type': 'ext:database', 'session.url': 'sqlite:///session.db', } app_with_session = SessionMiddleware(app, session_opts) db = create_db() auth_uri = flow.step1_get_authorize_url()