def Trash(filename,called): cfg = Config() logger = logging.getLogger('Media_Manager.builtin') tmp_path,tmp_filename = os.path.split(filename); watched_dirs = cfg.read_config('Watch_dirs'); try: trash_dir = cfg.get_config_value('Trash','trash_dir') except: logger.error("Trash Dir not set please check config"); return(); for all in watched_dirs: # Resolve Items Dir Structure if it has one if tmp_path[:len(watched_dirs[all])] == watched_dirs[all]: trash_dest = trash_dir + (tmp_path[len(watched_dirs[all]):]) + '/'; if os.path.exists(trash_dest): shutil.move(filename,trash_dest); logger.info("%s moved to Trash" %filename); else: os.makedirs(trash_dest); shutil.move(filename,trash_dest); logger.info("%s moved to Trash" %filename);
def move(self, direction, sensors, config): if self.state == 'evading': direction = 'stopped' allowed = {'F': True, 'R': True} if sensors['F'] < 25: print 'Not allowed to drive forwards' allowed['F'] = False if sensors['R'] < 25: print 'Not allowed to drive backwards' allowed['R'] = False if sensors['F'] < 25 or sensors['R'] < 25: direction = 'stopped' if sensors['F'] < 10: direction = 'backwards' if sensors['R'] < 10: direction = 'forwards' if sensors['F'] < 10 and sensors['R'] < 10: direction = random.choice(['left', 'right']) if sensors['F'] < 10 or sensors['R'] < 10: self.state = 'evading' print 'Evading', direction if direction != self.state: if self.state != 'evading': self.state = direction Config.update_config(config, 'Body', 'direction', direction) if direction == 'forwards' and allowed['F']: GPIO.output(35, False) GPIO.output(36, False) GPIO.output(37, True) GPIO.output(38, True) elif direction == 'backwards' and allowed['R']: GPIO.output(35, True) GPIO.output(36, True) GPIO.output(37, False) GPIO.output(38, False) elif direction == 'left': GPIO.output(35, False) GPIO.output(36, True) GPIO.output(37, True) GPIO.output(38, False) elif direction == 'right': GPIO.output(35, True) GPIO.output(36, False) GPIO.output(37, False) GPIO.output(38, True) else: self.stop(config) print direction
def __create_server_cfg(self, server_id, dest, name, track, layout, carlist, session, dyntrack, weathers, adminpw, max_clients, description): dest += '/server_cfg.ini' if not self.__copy_base_config(dest): return False with open(dest, 'r') as file: content = file.read() config = Config() port = config.get_udptcp_port() + server_id http_port = config.get_http_port() + server_id content = content.replace('PLACEHOLDER_UDP_TCP_PORT', str(port) ) content = content.replace('PLACEHOLDER_HTTP_PORT', str(http_port) ) if not self.tracks.exists(track, layout): return False if layout == 'default': layout = '' content = content.replace('PLACEHOLDER_TRACK_CONFIG', layout) content = content.replace('PLACEHOLDER_TRACK', track) max_clients = self.__get_max_clients( max_clients, carlist, track, layout) if max_clients < 1: return False content = content.replace('PLACEHOLDER_MAX_CLIENTS', str(max_clients) ) (content, success) = self.__set_carlist_of(content, carlist) if not success: return False (content, success) = self.__set_sessions_of(content, session) if not success: return False (content, success) = self.__set_dyntrack_of(content, dyntrack) if not success: return False (content, success) = self.__set_weather_of(content, weathers) if not success: return False name_prefix = config.get_name_prefix() if name_prefix == None: server_name = name else: server_name = config.get_name_prefix() + ' - ' + name content = content.replace('PLACEHOLDER_NAME', server_name) content = content.replace('PLACEHOLDER_ADMIN_PW', adminpw) if description != None: content = content.replace('PLACEHOLDER_DESC', description) with open(dest, 'w') as file: file.write(content) return True
def config_plugin(self,plugin,pluginname): tmp_cfg = Config() plugin_cfg = tmp_cfg.check_plugin_config(pluginname) for all in plugin_cfg: set_cfg = setattr(plugin,all,plugin_cfg[all]) return ()
def main(): locale.setlocale(locale.LC_ALL, "") config = Config("./data/config.json") company = get_data_from_json("./data/company.json") config.set("payment_paypal", "PayPal address: " + company["paypal"]) with codecs.open("template/bank-details.html", "r", encoding="utf-8") as html_doc: config.set("payment_wire", html_doc.read()) template = InvoiceTemplate(config.get("html_template_path"), company) if template.is_invalid(): return invoice_list = InvoiceList(config.get("database_path")) invoice_list.parse_csv(config) htmls = map( template.get_invoices_as_html, invoice_list.db, itertools.repeat(config) ) filenames = (invoice.get_filename() for invoice in invoice_list.db) db_file_path = config.get("database_path") assert os.path.isfile(db_file_path) db_file_name = os.path.splitext(os.path.basename(db_file_path))[0] dir_out = os.path.join(config.get("output_path"), db_file_name) set_up_output_directory(dir_out) save_html_files(dir_out, htmls, filenames) render(dir_out, as_png=False)
def stop(self, config): if self.state != 'stopped': GPIO.output(35, False) GPIO.output(36, False) GPIO.output(37, False) GPIO.output(38, False) self.state = 'stopped' Config.update_config(config, 'Body', 'direction', 'stopped')
def __init__(self): # get the user configuration of db info: user_config = Config() user_db_param = user_config.get_config_db_info() self.db_host = user_db_param['host'] self.db_port = user_db_param['port'] self.db_user = user_db_param['user'] self.db_pass = user_db_param['pass']
def __init__(self, host): self.host = host self.log = Log() if 'switch' in Config.read(): self.config = Config.read()['switch'] else: self.log.critical( 'Отсутствует запись с настройками Switch в файле конфига!') self.config = None
def start(config_path): try: util.seperator("Starting Daily Run") config = Config(default_dir, config_path) config.update_libraries() except Exception as e: util.print_stacktrace() logger.critical(e) logger.info("") util.seperator("Finished Daily Run")
def __init__(self): # Полчить ip адрес регистратора из конфига if 'registrar' not in Config.read(): self.log.critical( 'Отсутствует IP адрес регистратора в файле конфига!') self.REGISTRAR_IP_ADDRESS = None self.REGISTRAR_IP_ADDRESS = Config.read()['registrar']['ip'] # Создаем объект регистратор (свитч к которому подключено настраиваемое оборудование) self.registrar = Switch(host=self.REGISTRAR_IP_ADDRESS)
def main(): # settings of tornado application settings = { 'root_path': root_path, 'data_path': os.path.join(root_path, 'data'), 'conf_path': os.path.join(root_path, 'data', 'config.ini'), 'index_path': os.path.join(root_path, 'static', 'index.html'), 'static_path': os.path.join(root_path, 'static'), 'xsrf_cookies': True, 'cookie_secret': make_cookie_secret(), } application = web.Application([ (r'/xsrf', web.XsrfHandler), (r'/authstatus', web.AuthStatusHandler), (r'/login', web.LoginHandler), (r'/logout', web.LogoutHandler), (r'/query/(.+)', web.QueryHandler), (r'/utils/network/(.+?)(?:/(.+))?', web.UtilsNetworkHandler), (r'/utils/process/(.+?)(?:/(.+))?', web.UtilsProcessHandler), (r'/utils/time/(.+?)(?:/(.+))?', web.UtilsTimeHandler), (r'/utils/ssl/(.+?)(?:/(.+))?', web.UtilsSSLHandler), (r'/setting/(.+)', web.SettingHandler), (r'/operation/(.+)', web.OperationHandler), (r'/page/(.+)/(.+)', web.PageHandler), (r'/backend/(.+)', web.BackendHandler), (r'/sitepackage/(.+)', web.SitePackageHandler), (r'/client/(.+)', web.ClientHandler), (r'/((?:css|js|js.min|lib|partials|images|favicon\.ico|robots\.txt)(?:\/.*)?)', web.StaticFileHandler, { 'path': settings['static_path'] }), (r'/($)', web.StaticFileHandler, { 'path': settings['index_path'] }), (r'/file/(.+)', web.FileDownloadHandler, { 'path': '/' }), (r'/fileupload', web.FileUploadHandler), (r'/version', web.VersionHandler), (r'/.*', web.ErrorHandler, { 'status_code': 404 }), ], **settings) # read configuration from config.ini cfg = Config(settings['conf_path']) server_ip = cfg.get('server', 'ip') server_port = cfg.get('server', 'port') server = tornado.httpserver.HTTPServer(application) server.listen(server_port, address=server_ip) write_pid() tornado.ioloop.IOLoop.instance().start()
def __init__(self): self.log = Log() if 'redis' in Config.read(): self.config = Config.read()['redis'] else: self.log.critical('Отсутствует запись с настройками Redis в файле конфига!') self.config = None self.host = self.config['host'] self.password = self.config['password'] self.port = self.config['port'] self.db = self.config['db']
def __init__(self): self.log = Log() if 'postgresql' in Config.read(): self.config = Config.read()['postgresql'] else: self.log.critical('Отсутствует запись с настройками БД в файле конфига!') self.config = None self.host = self.config['host'] self.port = self.config['port'] self.name = self.config['name'] self.username = self.config['username'] self.password = self.config['password']
def start(config_path, test, daily, collections): if daily: type = "Daily " elif test: type = "Test " elif collections: type = "Collections " else: type = "" util.seperator("Starting {}Run".format(type)) try: config = Config(default_dir, config_path) config.update_libraries(test, collections) except Exception as e: util.print_stacktrace() logger.critical(e) logger.info("") util.seperator("Finished {}Run".format(type))
def __init__(self): #get the user configuration of db info: user_config = Config() user_db_param = user_config.get_config_db_info() self.db_host = user_db_param['host'] self.db_port = user_db_param['port'] self.db_user = user_db_param['user'] self.db_pass = user_db_param['pass'] #create db if not exists self.str_db_k_data = 'db_k_data' #k_data database self.create_db(self.str_db_k_data) self.str_db_history_data = 'db_history_data' #history_data database self.create_db(self.str_db_history_data) self.str_db_investment_data = 'db_investment_data' #investment database self.create_db(self.str_db_investment_data) #stock classification database self.str_db_stock_classification = 'db_stock_class' #stock classification database self.create_db(self.str_db_stock_classification) #consolidated bs (year) database self.str_db_consolidated_bs_year = 'db_consolidated_bs_year' self.create_db(self.str_db_consolidated_bs_year) #consolidated bs (season) database self.str_db_consolidated_bs_season = 'db_consolidated_bs_season' self.create_db(self.str_db_consolidated_bs_season) #consolidated pl(year) database self.str_db_consolidated_pl_year = 'db_consolidated_pl_year' self.create_db(self.str_db_consolidated_pl_year) #consolidated pl(season) database self.str_db_consolidated_pl_season = 'db_consolidated_pl_season' self.create_db(self.str_db_consolidated_pl_season) #consolidated cash(year) database self.str_db_consolidated_cash_year = 'db_consolidated_cash_year' self.create_db(self.str_db_consolidated_cash_year) #consolidated cash(season) database self.str_db_consolidated_cash_season = 'db_consolidated_cash_season' self.create_db(self.str_db_consolidated_cash_season) #create table self.table_creator = Table_creator()
def start(config_path, is_test, daily, collections_to_run, libraries_to_run, resume_from): if daily: start_type = "Daily " elif is_test: start_type = "Test " elif collections_to_run: start_type = "Collections " elif libraries_to_run: start_type = "Libraries " else: start_type = "" start_time = datetime.now() util.separator(f"Starting {start_type}Run") try: config = Config(default_dir, config_path, libraries_to_run) config.update_libraries(is_test, collections_to_run, resume_from) except Exception as e: util.print_stacktrace() logger.critical(e) logger.info("") util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}")
def __init__(self, **kwargs): self.config = Config() intents = discord.Intents.none() intents.guilds = True intents.messages = True intents.reactions = True intents.message_content = True super().__init__( help_command=helpcommand.EmbedHelpCommand(), case_insensitive=True, command_prefix=self.config.prefix, owner_id=int(self.config.owner_id), intents=intents, description="Bot for following twitter users on discord", allowed_mentions=discord.AllowedMentions(everyone=False), **kwargs, ) self.logger = log.get_logger("Siniara") self.start_time = time() self.twitter_blue = int("1da1f2", 16) self.db = maria.MariaDB(self) self.cogs_to_load = [ "cogs.commands", "cogs.errorhandler", "cogs.asyncstreamer", "cogs.twitter", ]
def __init__(self): self.logger = logging.getLogger('Media_Manager.plugin') self.logger.debug ("Init Plugin Sub System"); cfg = Config() try: self.path = cfg.get_config_value('Default','plugin_path') except: self.logger.warn("No Plugin path found"); self.path = None; if self.path == None: self.pluglist = None else: sys.path.append(self.path); self.pluglist = self.find_plugins(self.path);
def set_config_parameter(self, name, value): from modules.config import Config with self.app.app_context(): update_data = {"name": name, "value": value} self.cache.get("config")[name].__dict__.update(**update_data) c = Config.update(**update_data) self.emit("UPDATE_CONFIG", c)
def __init__(self, config_file='config.txt'): CONFIGURATION = Config(config_file) #Initialization of model self.model = self.init_model( CONFIGURATION.CHANNELS_IN, CONFIGURATION.CHANNELS_OUT, CONFIGURATION.LOAD_MODEL, CONFIGURATION.MODEL_LOAD_PATH, CONFIGURATION.MODEL_NAME, CONFIGURATION.MODEL_SUFFIX, CONFIGURATION.USE_DECONV_LAYERS) #Initialization of device self.device = self.init_device(CONFIGURATION.DEVICE) #Initialization of optimizer self.optimizer = self.init_optimizer(CONFIGURATION.ADAM_LR, self.model.parameters()) #Initialization of loss function self.criterion = self.init_criterion(CONFIGURATION.CLASS_WEIGHT_0, CONFIGURATION.CLASS_WEIGHT_1, CONFIGURATION.LOSS_MODIFIER, self.device) #Initialization of metric function self.metric = self.init_metric() #Initialization of augmentation function self.aug = self.init_augmentation(CONFIGURATION) self.CONFIGURATION = CONFIGURATION
def getContext(args, withCreds=True, withConfig=True, failOnDiscoveryError=True): """ Get context """ setupLogging(args) ctx = types.SimpleNamespace() ctx.ar = args ctx.cr = None ctx.cf = None ctx.cs = getConstants() if withCreds: ctx.creds = Creds(ctx) ctx.cr = ctx.creds.get() if withConfig: ctx.config = Config(ctx, failOnDiscoveryError=failOnDiscoveryError) ctx.cf = ctx.config.getFull() elif withConfig: fail("Can't get configuration without credentials") if args.dump_context: _printHeader('COMMAND LINE ARGUMENTS') print(yaml.dump({'ar': nestedNsToObj(ctx.ar)})) _printHeader('CREDENTIALS') print(yaml.dump({'cr': nestedNsToObj(ctx.cr)})) _printHeader('CONFIGURATION') print(yaml.dump({'cf': nestedNsToObj(ctx.cf)})) sys.exit(0) return ctx
def runWithConfig(self, cnf): config = Config(cnf) engine = Engine(config) while engine.ticks_left > 0: engine.tick() engine.ticks_left -= 1 return engine.detector.getPressure()
def __init__(self, conf_fname): self._log = logging.getLogger(self.__class__.__name__) self._categories = None self._ldamodel = None self._token_set = None self._conf = Config.from_json(conf_fname, LDA_model.CONF_KEYS) if self._conf.data_type not in _CONF_TYPES: raise ValueError( "configuration.data_type: '{}' not in '{}'".format( self._conf.data_type, _CONF_TYPES.keys())) self._data_set = _CONF_TYPES[self._conf.data_type](self._conf.data_src) if self._conf.vectorizer_type not in VECTORIZER_TYPES: raise ValueError( "configuration.vectorizer_type: {} not in '{}'".format( self.conf.vectorizer_type, VECTORIZER_TYPES)) if self._conf.vectorizer_type == 'count': self._vectorizer = CountVectorizer( min_df=2, max_df=1.0, stop_words='english', lowercase=True, token_pattern='[a-zA-Z\-][a-zA-Z\-]{2,}') else: use_idf = self._conf.vectorizer_type == 'tfidf' self._vectorizer = TfidfVectorizer( min_df=2, max_df=1.0, stop_words='english', use_idf=use_idf, lowercase=True, token_pattern='[a-zA-Z\-][a-zA-Z\-]{2,}') self._max_iter = int(self._conf.train_iter)
def index(): """ Posts """ conf = Config() per_page = conf.PER_PAGE page, per_page, offset = get_page_args() with MySQL() as c: c.execute("SELECT * FROM posts") c.fetchall() num_rows = c.rowcount pagination = Pagination(page=page, per_page=per_page, total=num_rows, bs_version=4, alignment="center") with MySQL() as c: c.execute( f"SELECT post_id, post_title, post_content, DATE_FORMAT(post_date, '%d, %M, %Y at %h:%i %p') as post_date, author_id FROM posts ORDER BY post_id DESC LIMIT {offset}, {per_page}" ) result_post = c.fetchall() """ Account """ # if user has ticked remember_me before, we set its session login to true and stop executing the code below. if (session.get("remember_me")): setUserLoggedIn(True) return render_template("index.html", active='home', pagination=pagination, news=result_post, admins=retrieveAdmins()) # if the method we get is not post, we send the user back to index.html if (request.method == "POST"): # set username variable to form input. # set password variable to password input. username = request.form.get("username") password = request.form.get("password") ret = loginUser(username, password) if (ret == 0): return jsonify(success=False, error_msg="Invalid username, please try again.") elif (ret == 1): return jsonify(success=False, error_msg="Wrong password, please try again.") if (ret == 2): flash("You have successfully logged in", "success") return jsonify(success=True) return render_template("index.html", active='home', pagination=pagination, news=result_post, admins=retrieveAdmins())
def run_tests(default_dir): try: config = Config(default_dir) logger.info("") util.separator("Mapping Tests") for library in config.libraries: config.map_guids(library) anidb_tests(config) imdb_tests(config) mal_tests(config) tautulli_tests(config) tmdb_tests(config) trakt_tests(config) tvdb_tests(config) util.separator("Finished All Plex Meta Manager Tests") except KeyboardInterrupt: util.separator("Canceled Plex Meta Manager Tests")
def timezone(self, inifile, timezone=None): """Get or set system timezone. Pass None to parameter config (as default) to get timezone, or pass timezone full name like 'Asia/Shanghai' to set timezone. """ tzpath = '/etc/localtime' zonepath = '/usr/share/zoneinfo' config = Config(inifile) if not config.has_section('time'): config.add_section('time') if timezone == None: # firstly read from config file timezone = '' if config.has_option('time', 'timezone'): timezone = config.get('time', 'timezone') if timezone: return timezone # or else check the system config file dist = ServerInfo.dist() if dist['name'] in ('centos', 'redhat'): clockinfo = raw_loadconfig('/etc/sysconfig/clock') if clockinfo and 'ZONE' in clockinfo: timezone = clockinfo['ZONE'] return timezone else: pass # or else find the file match /etc/localtime with open(tzpath) as f: tzdata = f.read() regions = ServerSet.timezone_regions() for region in regions: regionpath = os.path.join(zonepath, region) for zonefile in os.listdir(regionpath): if not os.path.isfile(os.path.join(regionpath, zonefile)): continue with open(os.path.join(regionpath, zonefile)) as f: if f.read() == tzdata: # got it! return '%s/%s' % (region, zonefile) else: # check and set the timezone timezonefile = os.path.join(zonepath, timezone) if not os.path.exists(timezonefile): return False try: shutil.copyfile(timezonefile, tzpath) except: return False # write timezone setting to config file return config.set('time', 'timezone', timezone)
def predict_and_eval_score(configfile, data_path='data/valid/', seismic_name='seismic.npy', val_mask='horizons_1.npy'): TrainRunner(configfile).predict(data_path, seismic_name, suffix='valid') config = Config(configfile) mask = np.load(f'data/valid/{val_mask}').astype(np.uint8) predicted_mask = np.load('output/predictions/' + config.MODEL_NAME + '-valid.npy') return iou_score(mask, predicted_mask, 0.5)
def read_logs(cfn): cf = Config(cfn) metricsfile = cf.LOGDIR + cf.MODEL_NAME + '-metrics-train.txt' lossfile = cf.LOGDIR + cf.MODEL_NAME + '-loss-train.txt' with open(metricsfile) as f: metrics = list(map(float, f.read().split(' ')[:len(f.read()) - 1])) with open(lossfile) as f: loss = list(map(float, f.read().split(' ')[:len(f.read()) - 1])) return loss, metrics
def __init__(self, **kwargs): self.config = Config("config.toml") super().__init__( command_prefix=self.config.prefix, owner_id=self.config.owner_id, **kwargs ) self.logger = logger self.start_time = time() self.global_cd = commands.CooldownMapping.from_cooldown( 10.0, 60.0, commands.BucketType.member ) self.db = maria.MariaDB(self)
def __init__(self): super(CXBot, self).__init__() self.c = Config() self.max_messages = 20000 self.ready = False self.logger = Logger('logs/%s.log' % datetime.now().date().isoformat()) self.busy = False
def Move(filename,called): logger = logging.getLogger('Media_Manager.builtin') cfg= Config(); if called == '': logger.warn("Builtin_move no destination specified"); return(); src_path,src_filename = os.path.split(filename); watched_dirs = cfg.read_config('Watch_dirs'); for all in watched_dirs: if src_path[:len(watched_dirs[all])] == watched_dirs[all]: dest_path = called +(src_path[len(watched_dirs[all]):]); if os.path.exists(dest_path): shutil.move(filename,dest_path) logger.info("%s moved to %s" %(filename,dest_path)) else: os.makedirs(dest_path); shutil.move(filename,dest_path); logger.info("%s moved to %s" %(filename,dest_path))
def __init__(self): self.log = Log() if 'rabbit' in Config.read(): self.config = Config.read()['rabbit'] else: self.log.critical( 'Отсутствует запись с настройками RabbitMQ в файле конфига!') self.config = None self.host = self.config['host'] self.port = self.config['port'] self.username = self.config['username'] self.password = self.config['password'] self.exchange = self.config['exchange'] self.exchange_type = self.config['exchange_type'] cred = pika.PlainCredentials(self.username, self.password) params = pika.ConnectionParameters(host=self.host, port=self.port, virtual_host='/', credentials=cred) self.connection = pika.BlockingConnection(params)
def __enter__(self): try: conf = Config() self.conn = pymysql.connect(host=conf.MYSQL_HOST, user=conf.MYSQL_USERNAME, passwd=conf.MYSQL_PASSWORD, db=conf.MYSQL_DATABASE, autocommit=True, cursorclass=pymysql.cursors.DictCursor) except pymysql.ProgrammingError as err: print(f"ERROR: Caught an Error: {err}") finally: self.cursor = self.conn.cursor() return self.cursor
def configure(self,args): # Load Config file if args['config'] == None: print ("Please point me at a config file") sys.exit(); else: Config.config_file = (args['config']) self.cfg = Config(); #Configure log file if args['log file'] == None: self.log_filename = self.cfg.get_config_value('logging','logfile'); # check for logfile in the config else: self.log_filename = args['log file'];
def process_newitem(self,filename): #Check to see if the ext has been configured self.cfg = Config(); try: ext = Tools.get_ext(filename); self.logger.debug("Found %s" %ext) section = self.cfg.read_config(ext) except: self.logger.info ("No Config Found for %s" % ext); return (); for all in section: tmp = section[all] if tmp[:6] == 'plugin': self.launch_plugin(section[all],filename) elif tmp[:7] =='builtin': self.launch_builtin(section[all],filename)
def add_config_parameter(self, name, value, type, description, options=None): from modules.config import Config with self.app.app_context(): c = Config.insert( **{ "name": name, "value": value, "type": type, "description": description, "options": options }) if self.cache.get("config") is not None: self.cache.get("config")[c.name] = c
def main(): # faz a veirificação da presença de complementos de linguagem necessários ao nltk try: nltk.tokenize.word_tokenize('Existe nltk punkt') except LookupError: nltk.download('punkt') try: spacy.load('pt') except IOError: download('pt') config = Config('data/configuration/', 'config.json') # executa as principais funções de cada classe, lendo arquivos de entrada e criando o modelo parser = run_data_parse(config) #model = run_model(config) # salva as principais informações do dataset create_dataset_info(parser)
class Main(): def __init__(self): opts = self.cmdline(); # Parse command line options self.configure(opts); # parse config self.setup_logging(); self.watch_media(); def cmdline(self): parser = argparse.ArgumentParser(description='Automatily Manage Media items'); parser.add_argument('-c','--config',help="Specify a config file to use",required=False) parser.add_argument('-l','--log file',help='Specify the Log file to use if blank stdout is used',required=False) parser.add_argument('-p','--plugin_dir',help='Path to plugins',required=False); #TODO fix Debug parser.add_argument('-d','--debug',help='Debug Mode lots of junk in the logfile',required=False); args = vars(parser.parse_args()); return (args) def configure(self,args): # Load Config file if args['config'] == None: print ("Please point me at a config file") sys.exit(); else: Config.config_file = (args['config']) self.cfg = Config(); #Configure log file if args['log file'] == None: self.log_filename = self.cfg.get_config_value('logging','logfile'); # check for logfile in the config else: self.log_filename = args['log file']; def setup_logging(self): #Log Format formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') #Create Logger self.logger = logging.getLogger('Media_Manager'); self.logger.setLevel(logging.DEBUG); if self.log_filename == None: self.logger.info("No Logfile found"); else: #Create File handler self.logger.info("Logging to %s" % self.log_filename); fh = logging.FileHandler(self.log_filename) fh.setLevel(logging.DEBUG); fh.setFormatter(formatter) self.logger.addHandler(fh) #ch = logging.StreamHandler(); #ch.setLevel(logging.INFO); #ch.setFormatter(formatter) #self.logger.addHandler(ch) def watch_media(self): wd = self.cfg.read_config('Watch_dirs') if wd == None: self.logger.warn("No directorys to watch nothing for me todo"); sys.exit(); else: Mm = Media() for all in wd: Mm.add_dir(wd[all]); Mm.start_watching();
class MediaHandler(pyinotify.ProcessEvent): logger = logging.getLogger('Media_Manager.media') def process_IN_MOVED_TO(self,event): self.logger.warn("file was moved %s" %(os.path.join(event.path,event.name))) self.process_IN_CLOSE_WRITE(event); def process_IN_CREATE(self, event): if event.dir: self.logger.debug("Detected Dir creation %s" %event.name) return() def process_IN_CLOSE_WRITE(self,event): self.logger.debug("Detected File Creation: %s" % os.path.join(event.path, event.name)); self.process_newitem(os.path.join(event.path,event.name)) def process_IN_DELETE(self,event): if event.dir: self.logger.debug("Detected Dir Deleted %s" %event.name) else: self.logger.debug("File removed %s" % os.path.join(event.path, event.name)); def process_newitem(self,filename): #Check to see if the ext has been configured self.cfg = Config(); try: ext = Tools.get_ext(filename); self.logger.debug("Found %s" %ext) section = self.cfg.read_config(ext) except: self.logger.info ("No Config Found for %s" % ext); return (); for all in section: tmp = section[all] if tmp[:6] == 'plugin': self.launch_plugin(section[all],filename) elif tmp[:7] =='builtin': self.launch_builtin(section[all],filename) def launch_builtin(self,plugin,filename): if self.still_exists(filename): try: # Check to see if anything was attached to the plugin (e.g a destination); builtin, opts = plugin.split() except: opts = ''; builtin = plugin builtin = getattr (Builtin,builtin[8:]) method = builtin(filename,opts); def launch_plugin(self,pluginname,filename): if self.still_exists(filename): plug = Plugins() plug.load_plugin(pluginname,filename) def still_exists(self,filename): if os.path.isfile(filename): return (True) else: self.logger.debug("file no longer exists %s" % filename); return(False)