def __init__(self, pipe, config): self.pipe = pipe self.data_source = data_source_controller.DataSource( self.pipe[descriptor_constants.DATA_SOURCE], config['s3']) self.data_destination = destination_controller.DataDestination( self.pipe[descriptor_constants.NAME], config['s3']) self.logger = log.get_logger("{} DATA SOURCE".format( self.pipe[descriptor_constants.NAME])) self.MAX_FILES_PER_RUN = 5
import Infrastructure.s3 as s3 import Infrastructure.thread_runner as thread_runner import Infrastructure.log as log import printer import procedures import datetime logger = log.get_logger("process") class GcodeProcessor: def __init__(self): self.config = {} self.config['accessKey'] = '' self.config['secretKey'] = '' self.config['bucketName'] = 'printer.candylero.com' self.thread_runner = thread_runner.Runner() def run(self): descriptor = { "name": "chappie", "seconds": 10, "is_running": False, "log": [], "error": "" } self.thread_runner.set_interval(self.run_printer, 10, "chappie", descriptor) def run_printer(self, descriptor, sec, t1): if not descriptor["is_running"]:
def __init__(self): super().__init__() self.logger = log.get_logger('DB')
import sys import threading import psutil import os from Infrastructure import log process = psutil.Process(os.getpid()) logger = log.get_logger("THREAD_RUNNER") class Runner: def __init__(self): self.logger = logger def set_interval(self, func, sec, name, descriptor): # each interval run in a different thread def func_wrapper(): func(descriptor, sec, t1) self.set_interval(func, sec, name, descriptor) t1.kill() if not t1.is_alive(): self.logger.info("Thread killed {}") t1 = ThreadWithTrace(target=func_wrapper) t1.start() return t1 class ThreadWithTrace(threading.Thread): def __init__(self, *args, **keywords): threading.Thread.__init__(self, *args, **keywords)
def __init__(self, name, s3_config): self.logger = log.get_logger("{} DATA SOURCE".format(name)) self.s3_client = s3.S3Client(s3_config) self.Mysql = mysql.DB() self.Postgresql = postgresql.DB()
import logging import sys from Infrastructure import config, log import DataFlow.orchestrator as orchestrator LOG_FORMAT = ('%(levelname) -10s %(asctime)s %(name) -30s %(funcName) ' '-35s %(lineno) -5d: %(message)s') LOGGER = logging.getLogger(__name__) # allow track all `logging` instances logging.basicConfig(level=logging.INFO, format=LOG_FORMAT) logger = log.get_logger("THOR") def get_config(): config_path = sys.argv[1] return config.get_config(config_path) def THOR_dataflow(): config = get_config() data_flow = orchestrator.Flow(config) return data_flow.run() if __name__ == "__main__": # load configuration logger.info( "------------------------------------------------------------------") logger.info("starting THOR")
import psycopg2 import psycopg2.extras from Infrastructure import log logger = log.get_logger("Postgres") class Connector: def __init__(self, config): self.host = config['hostname'] self.database = config['database'] self.user = config['username'] self.password = config['password'] self.connection = None def connect(self): i = 1 while not self.connection: try: self.connection = psycopg2.connect(host=self.host, database=self.database, user=self.user, password=self.password) except Exception as e: i += 1 logger.info("Error postgres connection " + str(e)) logger.info("Connect postgres " + str(i)) if i > 10: break
import pymysql from Infrastructure import log logger = log.get_logger("MySQL") class Connector: def __init__(self, config, db): self.host = config['hostname'] self.user = config['username'] self.password = config['password'] self.db = db self.connection = None self.pattern_mysql_pattern = "{:%Y-%m-%d %H:%M:%S}" def connect(self): i = 1 while not self.connection: try: self.connection = pymysql.connect(host=self.host, user=self.user, passwd=self.password, db=self.db) cursor = self.connection.cursor() cursor.execute('SET autocommit = 0;') except Exception as e: i += 1 logger.info("Error mysql connection " + str(e)) logger.info("Connect Mysql " + str(i)) if i > 10: break