def setup_logging(default_path='logging.yaml', default_level=logging.CRITICAL, env_key='LOG_CFG'): """Module to configure program-wide logging. Designed for yaml configuration files.""" global log log_level = read_logging_config('logging', 'log_level') log = logging.getLogger(__name__) level = logging._checkLevel(log_level) log.setLevel(level) system_logging = read_logging_config('logging', 'system_logging') if system_logging: path = default_path value = os.getenv(env_key, None) if value: path = value if os.path.exists(path): with open(path, 'rt') as f: try: config = yaml.safe_load(f.read()) logging.config.dictConfig(config) except Exception as e: print(e) print('Error in Logging Configuration. Using default configs') logging.basicConfig(level=default_level) else: logging.basicConfig(level=default_level) print('Failed to load configuration file. Using default configs') else: log.disabled = True
# Twilio (SMS) # This script is not intended to be run manually, rather it is called by other modules. import time import logging import subprocess from pushbullet import Pushbullet from twilio.rest import Client import system_info import mysql.connector from mysql.connector import Error from system_logging import read_logging_config # Setup module Logging. Main setup is done in tank_control_master.py level = read_logging_config('logging', 'log_level') level = logging._checkLevel(level) log = logging.getLogger(__name__) log.setLevel(level) ## We need to have our database functions here instead of calling use_database.py ## This resolves an circular import issue. def read_mysql_database(table, column): try: connection = mysql.connector.connect( user=system_info.mysql_username, password=system_info.mysql_password, host=system_info.mysql_servername, database=system_info.mysql_database)
if testing: plot_dir = '/root/plot_manager/test_plots/' plot_size = 10000000 status_file = '/root/plot_manager/local_transfer_job_running_testing' drive_activity_test = '/root/plot_manager/check_drive_activity.sh' drive_activity_log = '/root/plot_manager/drive_monitor.iostat' else: plot_dir = '/mnt/enclosure1/front/column1/drive43' plot_size = 108644374730 # Based on K32 plot size status_file = '/root/plot_manager/local_transfer_job_running' drive_activity_test = '/root/plot_manager/check_drive_activity.sh' drive_activity_log = '/root/plot_manager/drive_monitor.iostat' # Setup Module logging. Main logging is configured in system_logging.py setup_logging() level = read_logging_config('plot_manager_config', 'system_logging', 'log_level') level = logging._checkLevel(level) log = logging.getLogger('move_local_plots') log.setLevel(level) # Let's Get Started # Setup to read and write to our config file. # If we are expecting a boolean back pass True/1 for bool, # otherwise False/0 config = configparser.ConfigParser() def read_config_data(file, section, item, bool): pathname = '/root/plot_manager/' + file config.read(pathname)
import mmap # Do some housework today = datetime.today().strftime('%A').lower() current_military_time = datetime.now().strftime('%H:%M:%S') current_timestamp = int(time.time()) # Where is our Chia logfile located? chia_log = '/home/chia/.chia/mainnet/log/debug.log' # Where do we log our new coins so we don't duplicate them? new_coin_log = '/root/coin_monitor/logs/new_coins.log' # Setup Module logging. Main logging is configured in system_logging.py setup_logging() level = read_logging_config('coin_monitor_config', 'system_logging', 'log_level') level = logging._checkLevel(level) log = logging.getLogger(__name__) log.setLevel(level) # Setup to read and write to our config file. # If we are expecting a boolean back pass True/1 for bool, # otherwise False/0 def read_config_data(file, section, item, bool): pathname = '/root/coin_monitor/' + file config.read(pathname) if bool: return config.getboolean(section, item) else: return config.get(section, item)