def __init__(self): self.oP = OutPipe("InputReceiver", 0) self.cR = ConfigReader(GAME_PATH + "controls") self.sE = ScriptExecuter() self.eI = EngineInterface(objectMode=False) self.keyboard = self.eI.getKeyboard() self.mouse = self.eI.getMouse() self.tH = TypingHandler(self.keyboard) self.pairs = {} self.responses = {} self.oldKeyboard = self.keyboard.events self.oldMouse = self.mouse.events self.sE.addContext("Input", self) self.sE.execute(INPUT_PATH + "csp") self.sE.execute(INPUT_PATH + "input") self.keyEvents = [] self.readControls() self.locked = False self.xsens = 50 self.ysens = 50 self.inverted = 0 self.predict = False self.recent = {} self.oP("Initialized.")
def configure_log(logdir=logdir, level=logging.WARNING, name=None): logger = logging.getLogger(name) for hdlr in logger.handlers[:]: logger.removeHandler(hdlr) logger.setLevel(level) logger.propagate = False logging.getLogger("requests").setLevel(logging.CRITICAL) filename = "oftmatrix.log" if name is not None: filename = "oftmatrix.{}.log".format(name) fileHandler = logging.FileHandler(filename=os.path.join(logdir, filename)) fileHandler.setFormatter(mainLogFormatter) logger.addHandler(fileHandler) logger.addHandler(mainConsoleHandler) config = ConfigReader() alertemails = config.get('logging', 'error_alertemails') if len(alertemails) > 0: smtpHandler = BufferingMailgunHandler( toaddrs=alertemails, fromaddr=config.get('logging', 'error_fromaddr'), mailgunconfig=config.get('logging', 'mailgunconfig'), subject="Error in OFTMatrix", capacity=10) smtpHandler.setFormatter(mainLogFormatter) smtpHandler.setLevel(logging.ERROR) logger.addHandler(smtpHandler)
def main(): reader = ConfigReader("config.cfg", "postgres") db_config = reader.get_config() tracker = Tracker("Test job", db_config) reporter = Reporter(db_config, [ "b0ca6902-8d7b-49a2-9a17-b94e42e839fc", "c3a2320c-19f1-4051-aa5f-85fc58d39ac9", "e74252e6-558c-49c1-aa13-99be2cc59585" ]) reporter.get_report()
def __init__(self): self._configreader = ConfigReader() self._addressesprovider = AddressesProvider() self._messageprovider = MessageProvider( self._configreader.GetConfiguration("known_files"), self._configreader.GetConfiguration("signature")) self._messagelogger = MessageLogger() self._mailsender = SendMail( self._configreader.GetConfiguration("sender_email")) self._previewmail = MailPreview() self._previewmail.Preview(self._messageprovider.GetMessage()[1])
def _load_configuration(self): """ Try and load configuration based on the predefined precendence """ self.config_reader = ConfigReader(self._config_path) self.credentials_reader = CredentialsReader( self._get_credentials_path()) self.metadata_reader = MetadataReader(self._metadata_server) self._load_credentials() self._load_region() self._load_hostname() # TODO: implement Auto Scaling Group self._set_endpoint() self.debug = self.config_reader.debug self.pass_through = self.config_reader.pass_through self._check_configuration_integrity()
def main(): config = ConfigReader() loglevel = logging.INFO if config.get("logging", "loglevel"): numeric_level = getattr(logging, config.get("logging", "loglevel").upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % loglevel) loglevel = numeric_level configure_log(level=loglevel) configure_log(name="werkzeug", level=logging.INFO) installThreadExcepthook() def uncaught_exception_handler(exc_type, exc_value, exc_traceback): logging.info('haha wtf {}'.format(exc_traceback)) logging.error("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)) sys.excepthook = uncaught_exception_handler wsthread = WebServerThread(app) wsthread.start() global CONFIG CONFIG = load_config() logging.info('config loaded {}'.format(CONFIG)) oftmatrix.run() try: wd.run() except: pass initialize_matrix() try: while 1: time.sleep(1) except KeyboardInterrupt: logging.info("shutdown has been initiated from the console...") oftmatrix.stop() try: wd.stop() except: pass wsthread.stop_server()
def __init__(self, config_file): if path.exists(config_file) and path.isfile(config_file): conf = ConfigReader(config_file) self.output = conf.get("output") self.output_file = conf.get("output_file") self.level = conf.get("level") self.__log_levels = { "debug": 10, "info": 20, "warning": 30, "error": 40, "critical": 50, } self.message_format = conf.get("message_format") self.date_format = conf.get("date_format") self.formatter = Formatter(self.level, self.message_format, self.date_format) else: raise IOError("Can't find configuration file.")
def _load_configuration(self): """ Try and load configuration based on the predefined precendence """ self.config_reader = ConfigReader(self._config_path) self.credentials_reader = CredentialsReader( self._get_credentials_path()) self.metadata_reader = MetadataReader(self._metadata_server) self._load_credentials() self._load_region() self._load_hostname() self._load_proxy_server_name() self._load_proxy_server_port() self._set_endpoint() self._set_ec2_endpoint() self._load_autoscaling_group() self.debug = self.config_reader.debug self.pass_through = self.config_reader.pass_through self.push_asg = self.config_reader.push_asg self.push_constant = self.config_reader.push_constant self.constant_dimension_value = self.config_reader.constant_dimension_value self._check_configuration_integrity()
def __init__(self): #Remove engine log try: os.remove(ENGINE_PATH + "engine_log" + TEXT_EXT) except: pass #Remove net logs try: for i in os.listdir(NET_PATH): os.remove(NET_PATH + i) except: pass #Create net path try: os.mkdir(NET_PATH) except: pass self.oP = OutPipe("Launcher", 0) for line in self.getSystemInfo(): self.oP(line) self.cR = ConfigReader(ENGINE_PATH + "engine") self.sE = ScriptExecuter() self.eI = EngineInterface() self.eI.preLoad() self.eI.preLoadEpi() #Sloppy, removes the blacker that covers up starting glitches self.eI.getOverlayScene().objects["BlackScreen"].endObject() self.sound = SoundEngine() self.sound.preLoadAudio() self.s = None self.c = None self.output = True self.oP("Initialized.")
from cellgrid import CellGrid from cell import Cell from coord import Coord from configreader import ConfigReader import route import json import datetime config = ConfigReader("settings.conf") api_key = config.apikey grid = CellGrid(Coord(39.0095, -77.16796), Coord(38.8044, -76.89331), grid_width=5, grid_height=5) with open('gridfeaturecounts.json', 'r') as featurecounts_per_cell: javascript_analysis_results_json = featurecounts_per_cell.read().replace( '\n', '') grid.load_javascript_analysis_json(javascript_analysis_results_json) grid.generate_scaled_scores_zero_to_one() print(grid.to_geojson())
from pyspark.sql import SparkSession from pyspark.sql.types import StructType, StructField, DateType, \ StringType, TimestampType, DecimalType, IntegerType from configreader import ConfigReader from parsers import parse_line spark = SparkSession.builder.getOrCreate() spark.sparkContext.addPyFile("parsers.py") from azure.storage.blob import BlobServiceClient reader = ConfigReader("config.cfg", "azure-storage") config = reader.get_config() # Get Azure storage info from config storage_acct_name = config["account_name"] storage_acct_access_key = config["access_key"] storage_container = config["container_name"] mount_root = config["mount_root"] # Set Spark Azure storage account and key storage_acct_key_str = f"fs.azure.account.key.{storage_acct_name}.blob.core.windows.net" spark.conf.set(storage_acct_key_str, storage_acct_access_key) # Set base Spark filepath for container container_base_path = f"wasbs://{storage_container}@{storage_acct_name}.blob.core.windows.net" mount_base_path = f"{mount_root}/{storage_container}" # Set up container client blob_service_client = BlobServiceClient(account_url=f"https://{storage_acct_name}.blob.core.windows.net", \ credential=storage_acct_access_key)
# coding=utf-8 from selenium import webdriver from configreader import ConfigReader # gc = webdriver.Chrome() cr = ConfigReader() cr1 = ConfigReader() cr2 = ConfigReader() host = ConfigReader().get_project("host") port = ConfigReader().get_project("port") path = ConfigReader().get_project("path") print(f"{host}:{port}/{path}") print(id(cr.cf)) print(id(cr1.cf)) print(id(cr2.cf)) # gc.get(f"{host}:{port}/{path}")
from configreader import ConfigReader import getpass import psycopg2 from prettytable import PrettyTable from prettytable import from_db_cursor def get_script_path(): return os.path.dirname(os.path.realpath(sys.argv[0])) configfile = '/etc/pgsnapman/pgsnapman.config' if not os.path.exists(configfile): configfile = home = expanduser("~") + '/.pgsnapman.config' print configfile if not os.path.exists(configfile): configfile = get_script_path() + '/../bin/pgsnapman.config' config = ConfigReader(configfile) PGSCHOST=config.getval('PGSCHOST') PGSCPORT=config.getval('PGSCPORT') PGSCUSER=config.getval('PGSCUSER') PGSCDB=config.getval('PGSCDB') PGSCPASSWORD=config.getval('PGSCPASSWORD') print('') print('+-----------------------------+') print('| pgsnapman script uploader |') print('+-----------------------------+') print('') print('Verifying database connection...') if PGSCPASSWORD == '': PGSCPASSWORD=getpass.getpass('password: ') try:
def initialize_upload(protocol): config_reader = ConfigReader(CONFIG_FILE_PATH) uploader = Uploader(60, AVRO_FILE_ROOT_DIR, config_reader.get_connection_string()) uploader.run_schedule()
for pattern in sortDict.keys(): if re.search(pattern, filename): logging.info(" Regex matched: " + pattern + " with Filename: " + filename) src = folder_to_track + "/" + filename to = sortDict.get(pattern) + "/" + filename try: os.rename(src, to) logging.info(" Moved " + src + " to " + to) except FileNotFoundError as Identifier: logging.warning(Identifier) logging.warning(" Please look at your destination path and whether the path exists. ") # Config StartLogging() config = ConfigReader() sortDict = config.getParams() folder_to_track = config.getTrackFolder() # Handling eventHandler = DownloadFileHandler() observer = Observer() observer.schedule(eventHandler, folder_to_track, recursive =True) observer.start() try: while True: time.sleep(10) logging.info(" Running, timestamp: [" + str(time.asctime()) + "]") except KeyboardInterrupt: observer.stop()
from daterelate.daterelate import relate from selenium import webdriver from selenium.common.exceptions import NoSuchElementException, TimeoutException from selenium.webdriver.chrome.options import Options as ChromeOptions from selenium.webdriver.firefox.options import Options as FirefoxOptions from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as conditions from selenium.webdriver.support import ui from configreader import ConfigReader os.environ['DISPLAY'] = ':0' # Set the display if set to run as cronjob HOMEPAGE = 'http://myaccount.telkom.co.ke' TODAY = datetime.now() config = ConfigReader('defaults.ini') TITLE = config.get('notificationtitle', default='Telkom Balance') NUMBER = config.get('number', section='credentials', default='') PASSWD = config.get('pass', section='credentials', default='') s = os.path.join(os.path.expanduser('~'), 'bin') driver_path = config.get('driverspath', default=s) chrome_driver_name = config.get('chromedrivername', section='Chrome', default='chromedriver') firefox_driver_name = config.get('firefoxdrivername', section='Firefox', default='geckodriver') headless = config.get('headless', default=True) chrome_options = ChromeOptions() firefox_options = FirefoxOptions()
from configreader import ConfigReader config = ConfigReader("../../res/test.cfg") print config.get("name") print config.get_string("name") print config.get_int("some_int") print config.get_float("some_float") print config.get_long("some_long") print config.get_boolean("some_bool") print config.get_boolean("some_int") print config.get_boolean("some_nonexistent_property")
def __init__(self, lat, lon): self.lon = lon self.lat = lat self.config = ConfigReader("settings.conf") self.api_key = self.config.apikey self.client = googlemaps.Client(key=self.api_key)
def main(): ### GET LIST OF INPUT FILES FROM STORAGE CONTAINER FOR SPARK TO READ # Read from config files storage_config = ConfigReader("config.cfg", "azure-storage").get_config() db_config = ConfigReader("config.cfg", "postgres").get_config() # Get Azure storage info from config storage_acct_name = storage_config["account_name"] storage_acct_access_key = storage_config["access_key"] storage_container = storage_config["container_name"] mount_root = storage_config["mount_root"] # Set Spark Azure storage account and key storage_acct_key_str = f"fs.azure.account.key.{storage_acct_name}.blob.core.windows.net" # Strings for setting accessing filenames from storage file_type = "txt" input_dir = "data" file_suffix = f".{file_type}" suffix_len = len(file_suffix) mount_base_path = f"{mount_root}/{storage_container}" # Set up container client blob_service_client = BlobServiceClient(account_url=f"https://{storage_acct_name}.blob.core.windows.net", \ credential=storage_acct_access_key) container_client = blob_service_client.get_container_client( storage_container) # Get list of file names blob_list = container_client.list_blobs(name_starts_with=input_dir) txtfile_paths = [ blob.name for blob in blob_list if blob.name[-suffix_len:] == file_suffix ] txtfile_full_paths = [ f"{mount_base_path}/{file}" for file in txtfile_paths ] ### SET UP SPARK SESSION AND RUN THROUGH STEPS # Start spark session and set up storage account key spark = SparkSession.builder.getOrCreate() spark.conf.set(storage_acct_key_str, storage_acct_access_key) steps = [] step_1 = PipelineStep1("Step 1: Ingest", spark=spark, mount_base_path=mount_base_path, input_path=",".join(txtfile_full_paths), output_path=f"{mount_base_path}/ingested-data") steps.append(step_1) step_2 = PipelineStep2("Step 2: Preprocess", spark=spark, mount_base_path=mount_base_path, input_path=step_1.output_path, output_path=f"{mount_base_path}/preprocessed-data") steps.append(step_2) step_3 = PipelineStep3("Step 3: ETL", spark=spark, mount_base_path=mount_base_path, input_path=step_2.output_path, output_path=f"{mount_base_path}/ETL-output") steps.append(step_3) # Attempt each step and insert results into db table job_ids = [] for step in steps: tracker = Tracker(job_name=step.name, db_config=db_config) job_ids.append(tracker.job_id) try: step.run() tracker.update_job_status("Success") except Exception as e: print(e) tracker.update_job_status("Failed")
def __init__(self, config, files=[None], host='0.0.0.0', port=27017, is_json=False): """ Importer will setup config and perform parallel import of tsv files into mongo. """ if len(files) == 0 or type(files) is not list: raise ValueError( "List of one of more tsv files required for import.") self.config = ConfigReader(config) init_file = files[0] tsv_files = files[1:] self.conn = Connector(self.config.db_name, host=host, port=port) if self.config.axes is not None: self.conn.registerAxes(self.config.axes) if self.config.ksf_map is not None: self.conn.registerKeyspaceFile(self.config.ksf_file, self.config.ksf_name, self.config.ksf_keys, self.config.ksf_axis) # register the first file to establish minor keyspace # allow registeration of keyspaces only if init_file is not None: # replace minor keyspace in tsv identifier with ksminor name # fix this later self.rename = {self.config.ksemb_id: 'key'} if is_json: with open(init_file) as j: import json, re dfs = [ json.loads( re.sub( self.config.ksemb_id, 'contents', re.sub('barcode', self.rename[self.config.ksemb_id], str(line), 1), 1)) for line in j ] init_df = dfs[0] tsv_files = dfs[1:] del dfs else: init_df = getDataFrame(init_file, ksminor_filter=self.config.ksemb_filter, ksminor_id=self.config.ksemb_id, rename=self.rename, transpose=self.config.transpose) ksmin_keys = None if self.config.infer_units: ksmin_keys = list(init_df.index) units = [{"name": i, "description": ""} for i in ksmin_keys] self.config.units = units units = self.conn.registerUnits(self.config.units) self.minor_keyspaceId = self.conn.registerKeyspaceEmbedded( init_df, self.config.ksemb_id, self.config.ksemb_name, self.config.ksemb_axis, rename=self.rename, keys=ksmin_keys, is_json=is_json) # construct first dataframe and add to list to be registered try: self.conn.registerDataFrame(init_df, self.minor_keyspaceId, units, is_json=is_json) print "Completed: {0}".format(init_file) except Exception, e: traceback.print_exc(file=sys.stdout) print str(e) # work on the rest at once if len(tsv_files) > 0: parallelGen(tsv_files, self.minor_keyspaceId, units, self.config.db_name, self.config.ksemb_filter, self.config.ksemb_id, self.rename, host, port, is_json=is_json)
from sys import exit from globals import __MAIN_LOOP_DELAY__ import loggingwrapper as log from aterror import ATConnectionLostError from configreader import ConfigReader from atprotocol import ATProtocol from operatordispatcher import OperatorDispatcher if __name__ == '__main__': # preparation __log = log.create_log("moop.log", log.DEBUG) # reading config try: __config = ConfigReader("moop.cfg") except Exception as e: __log.critical("config not loaded: {}".format(e)) exit(1) if not __config.is_readed(): __log.critical("no config file was found") exit(1) # проверка параметров подключения к устройству if not __config.is_params_exists(("com_port", "com_baudrate")): __log.critical( "serial communication parameters not completely defined in config file '{}'" .format(__config.get_file_name())) exit(1) # проверка параметров подключения к субд if not __config.is_params_exists(