def connect_to_db(): """ connects to persistence connects to persistence for persistence operations to validate owner and developer information Returns ------- returns a pymongo persistence client """ mongo_section = "mongodb" url = "mongo_url" db = "database" uid = "mongo_uid" pwd = "mongo_pwd" write = 'w' config_path = XprConfigParser.DEFAULT_CONFIG_PATH config = XprConfigParser(config_path) # add exception db_client = MongoPersistenceManager(url=config[mongo_section][url], db=config[mongo_section][db], uid=config[mongo_section][uid], pwd=config[mongo_section][pwd], w=config[mongo_section][write]) return db_client
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH): super().__init__() self.config = XprConfigParser(config_path)["packages_setup"] self.logger = XprLogger() dependency_config_file = self.config[self.DEPENDENCY_SECTION][ self.DEPENDENCY_CONFIG_FILE] if not os.path.exists(dependency_config_file): self.logger.error(("Unable to find the dependency js" "file at the mentioned path")) raise PackageFailedException("Invalid dependency config file") try: with open(dependency_config_file) as config_fs: dependency_config = json.load(config_fs) except EnvironmentError as err: self.logger.fatal(err) raise PackageFailedException("Invalid config file") self.graph = nx.DiGraph() edges = list() for key in dependency_config: for value in dependency_config[key]: edges.append((key, value)) self.graph.add_edges_from(edges) if not nx.is_directed_acyclic_graph(self.graph): self.logger.fatal(("Unable to handle dependencies due to cyclic " "loop")) self.graph = None raise PackageFailedException("Cyclic Dependency Found")
def createcomponents(components, reponame, newrepourl, projectname): logger.debug("Creating components locally.") for component in components: component_name = component['name'] component_type = component['type'] flavor = component['flavor'] logger.info(f"\n Adding new component : {component}\n") src_flavor_dir = f"/tmp/skeleton-build/{component_type}/{flavor}" dst_flavor_dir = f"/tmp/{reponame}/{component_name}" copytree(src_flavor_dir, dst_flavor_dir) if flavor.lower() == 'python' and component_type == 'service': default_dir = f"/tmp/{reponame}/{component_name}/"\ f"{{{{XPRESSO_PROJECT_NAME}}}}" new_dir = f"/tmp/{reponame}/{component_name}/{component_name}" move(default_dir, new_dir) replace_string( "{{XPRESSO_PROJECT_NAME}}", component_name, f"/tmp/{reponame}/{component_name}" ) # create pipeline try: config = XprConfigParser() jenkins_manager = JenkinsManager(config) jenkins_manager.create_pipeline( f'{projectname}__{component_name}', newrepourl) print("pipeline created") except: print("error in creating pipeline")
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH): self.logger = XprLogger() self.config = XprConfigParser(config_path)[self.PACHYDERM_CONFIG] try: self.pachyderm_client = self.connect_to_pachyderm() except PachydermOperationException as err: raise ValueError(err.message)
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH, executor=None): if not executor: executor = LocalShellExecutor() super().__init__(executor) self.config = XprConfigParser(config_path)["packages_setup"]
def __init__(self): self.config = XprConfigParser(self.config_path) self.db_utils = Utils(url=self.config[self.MONGO_SECTION][self.URL], db=self.config[self.MONGO_SECTION][self.DB], uid=self.config[self.MONGO_SECTION][self.UID], pwd=self.config[self.MONGO_SECTION][self.PWD], w=self.config[self.MONGO_SECTION][self.W]) self.logger = XprLogger()
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH, executor=None): if not executor: executor = LocalShellExecutor() super().__init__(executor) self.config = XprConfigParser(config_path) self.default_project_path = self.config["general"]["package_path"]
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH): self.logger = XprLogger() self.config = XprConfigParser(config_path) self.path = os.path.join( os.path.expanduser('~'), self.config[self.CONTROLLER_SECTION][self.CLIENT_PATH]) self.token_file = '{}.current'.format(self.path) self.server_path = self.config[self.CONTROLLER_SECTION][ self.SERVER_URL]
def __init__(self, token): self.logger = XprLogger() config_path = XprConfigParser.DEFAULT_CONFIG_PATH self.config = XprConfigParser(config_path) self.token = token self.token_expiry = None self.login_expiry = None self.login_status = False
def __init__(self, persistence_manager): self.kubeflow_utils = KubeflowUtils(persistence_manager) self.logger = XprLogger() self.executor = LocalShellExecutor() config_path = XprConfigParser.DEFAULT_CONFIG_PATH self.config = XprConfigParser(config_path) self.declarative_pipeline_folder = self.config[PROJECTS_SECTION][ DECLARATIVE_PIPELINE_FOLDER] self.content = self.declarative_pipeline_folder_check()
def load_config(self, config_log_type) -> XprConfigParser: """ Args: config_log_type(str): Type of the config file i.e xpr or setup Returns: str : configuration filename based on the type provided """ if config_log_type == "setup": config_log_filename = "setup_log.json" elif config_log_type == "xpr": config_log_filename = "xpr_log.json" else: raise ValueError("Invalid parameter passed to load_config") config = None try: config = XprConfigParser( os.path.join(self.find_config(config_log_filename))) except FileNotFoundError as err: # This is intended pass except InvalidConfigException as err: print( "Invalid config Found. Loading from the config from default path. \n{}".format( str(err))) finally: if config is None: try: config = XprConfigParser( XprConfigParser.DEFAULT_CONFIG_PATH_XPR_LOG) except FileNotFoundError as err: print( "Unable to file the config file in base directory. Loading from the config " "from default path. \n{}".format(str(err))) raise err except InvalidConfigException as err: print("Invalid config Found. \n{}".format(str(err))) raise err return config
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH): self.config = XprConfigParser(config_file_path=config_path) self.api_gateway = None self.initialize_gateway( gateway_provider=self.config[GatewayManager.CONFIG_GATEWAY_KEY][ GatewayManager.CONFIG_GATEWAY_PROVIDER], admin_url=self.config[GatewayManager.CONFIG_GATEWAY_KEY][ GatewayManager.CONFIG_GATEWAY_ADMIN], proxy_url=self.config[GatewayManager.CONFIG_GATEWAY_KEY][ GatewayManager.CONFIG_GATEWAY_PROXY], config_path=config_path) self.logger = XprLogger()
def __init__(self, persistence_manager): self.persistence_manager = persistence_manager self.logger = XprLogger() config_path = XprConfigParser.DEFAULT_CONFIG_PATH self.config = XprConfigParser(config_path) PROJECTS_SECTION = 'projects' DEPLOYMENT_FILES_FOLDER = 'deployment_files_folder' self.deployment_files_folder = self.config[PROJECTS_SECTION][ DEPLOYMENT_FILES_FOLDER] if not os.path.isdir(self.deployment_files_folder): os.makedirs(self.deployment_files_folder, 0o755)
def __init__(self): self.logger = XprLogger() # script is supposed to be run on the VM itself, so host is localhost client = MongoClient('localhost', replicaset='rs0') self.db = client.xprdb self.db.authenticate('xprdb_admin', 'xprdb@Abz00ba') config_path = XprConfigParser.DEFAULT_CONFIG_PATH config = XprConfigParser(config_path) MONGO = 'mongodb' FILEPATH = 'formats_file' self.path = config[MONGO][FILEPATH] with open(self.path, 'r') as file: self.data = json.loads(file.read())
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH, executor=None): if not executor: executor = LocalShellExecutor() super().__init__(executor) self.config = XprConfigParser(config_path) self.persistence_manager = MongoPersistenceManager( url=self.config[self.MONGO_SECTION][self.URL], db=self.config[self.MONGO_SECTION][self.DB], uid=self.config[self.MONGO_SECTION][self.UID], pwd=self.config[self.MONGO_SECTION][self.PWD], w=self.config[self.MONGO_SECTION][self.W])
def __init__(self): self.logger = XprLogger() self.config = XprConfigParser(self.config_path) self.defaulturl = self.config['bitbucket']['restapi'] self.teamname = self.config['bitbucket']['teamname'] self.username = self.config['bitbucket']['username'] self.password = self.config['bitbucket']['password'] # Following project format provided for bibucket RESTAPI self.defaultprojectbody = { "name": "", "description": "", "key": "", "is_private": False } # Following repo format provided for bibucket RESTAPI self.defaultrepobody = {"scm": "git", "project": {"key": ""}}
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH, executor=None): if not executor: executor = LocalShellExecutor() super().__init__(executor=executor) self.config = XprConfigParser(config_path)["packages_setup"] self.logger = XprLogger() self.apt_config = self.config[self.APT_SECTION] self.public_key = self.apt_config[self.APT_PUBLIC_KEY] self.private_key = self.apt_config[self.APT_PRIVATE_KEY] self.hosted_package_folder = self.apt_config[ self.APT_HOSTED_PACKGE_KEY] self.sign_paraphrase = None self.sign_key_id = None self.home_folder = os.getcwd()
def __init__(self, dataset_name: str = "default", description: str = "This is a dataset", config_path: str = XprConfigParser.DEFAULT_CONFIG_PATH): self.config = XprConfigParser(config_file_path=config_path) self.data = pd.DataFrame() self.name = dataset_name self.type = DatasetType.STRUCTURED self.description = description self.num_records = len(self.data) self.creation_date = datetime.datetime.now() self.creation_by = "default" self.project = "default" self.repo = "default" self.branch = "master" self.version = 1 self.tag = "1.0.0" self.info = DatasetInfo() self.local_storage_required = False self.sample_percentage = 100.00
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH_SETUP_LOG): self.config = XprConfigParser(config_path) self.logger = XprLogger() self.ldapmanager = LdapManager() self.client = docker.from_env()
import os import urllib import subprocess from copy import deepcopy from shutil import copy2, copytree, rmtree, move from xpresso.ai.admin.controller.external import bitbucketapi from xpresso.ai.admin.controller.utils import error_codes from xpresso.ai.admin.controller.external.jenkins_manager import JenkinsManager from xpresso.ai.core.utils.xpr_config_parser import XprConfigParser from xpresso.ai.core.logging.xpr_log import XprLogger from xpresso.ai.admin.controller.exceptions.xpr_exceptions import * config_path = XprConfigParser.DEFAULT_CONFIG_PATH config = XprConfigParser(config_path) username = config['bitbucket']['username'] password = config['bitbucket']['password'] escape_password = urllib.parse.quote(password) skeletonpath = f"http://{username}:{escape_password}@bitbucket.org/"\ f"abzooba-screpo/skeleton-build.git" bitbucket = bitbucketapi.bitbucketapi() logger = XprLogger() def replace_string(key, replacement, direc): print(key) print(replacement) print(direc) for dname, dirs, files in os.walk(direc):
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH_SETUP_LOG): self.config = XprConfigParser(config_path) self.client = docker.from_env()
def __init__(self): self.xpr_config = XprConfigParser( config_file_path=XprConfigParser.DEFAULT_CONFIG_PATH) self.logger = XprLogger()
class MongoPersistenceManager: # code for singleton class ''' class __Singleton: def __init__(self): pass def __str__(self): return repr(self) + self.val ''' """ This class performs various database (CRUD) operations on a Mongo DB database. Attributes: url (str) - URL of the server persistence (str) - name of the database uid (str) - user id pwd (str) - password w (int) - write concern (default = 0) """ config_path = XprConfigParser.DEFAULT_CONFIG_PATH config = XprConfigParser(config_path) INTERVAL_BETWEEN_RETRIES = int( config["mongodb"]["interval_between_retries"]) MAX_RETRIES = int(config["mongodb"]["max_retries"]) logger = XprLogger() # singleton instance # instance = None # (static) database connection mongo_db = None # def __getattr__(self, name): # return getattr(self.instance, name) def __init__(self, url: str, db: str, uid: str, pwd: str, w: int = 1): """ Constructor. :param url (str): Mongo DB server URL :param persistence (str): name of database to perform operations on """ self.logger.info( "Entering MongoPersistenceManager constructor with parameters url %s, persistence %s, uid %s, pwd %s, w %s" % (url, db, uid, pwd, w)) self.url = url self.db = db self.uid = uid if len(self.uid) == 0: self.uid = None self.pwd = pwd self.w = w self.logger.debug( "Created MongoPersistenceManager object successfully") self.logger.info("Exiting constructor") def connect(self) -> Database: """ Connects to a specific database of a server. :return: Mongo Client object and database connection """ self.logger.info("Entering connect method") self.logger.debug("Checking if connection already active") if self.mongo_db is None: self.logger.debug( "Attempting connection to database %s on server %s" % (self.db, self.url)) # connect to server mongo_client = MongoClient(host=self.url, w=self.w) self.logger.debug("Created Mongo client object") # Note Starting with version 3.0 the MongoClient constructor no longer blocks while connecting to the server # or servers, # and it no longer raises ConnectionFailure if they are unavailable, nor ConfigurationError if the user’s # credentials are wrong. # Instead, the constructor returns immediately and launches the connection process on background threads. # make sure connection has been established connected = False attempt = 1 while not connected and attempt <= self.MAX_RETRIES: try: # The ismaster command is cheap and does not require auth. self.logger.debug("Checking connection to server") mongo_client.admin.command('ismaster') connected = True self.logger.debug("Connected to server successfully") except ConnectionFailure: # TBD: LOG self.logger.debug( "Server not available: waiting for connection. Attempt %s of %s" % (attempt, self.MAX_RETRIES)) # wait INTERVAL_BETWEEN_RETRIES seconds and retry MAX_RETRIES times time.sleep(self.INTERVAL_BETWEEN_RETRIES) attempt += 1 if not connected: self.logger.error( "Unable to connect to database after %s attempts" % self.MAX_RETRIES) raise UnsuccessfulConnectionException( "Unable to connect to database") self.logger.debug("Connected to server successfully") # get database pointer self.logger.debug("Connecting to database %s" % self.db) MongoPersistenceManager.mongo_db = mongo_client[self.db] if MongoPersistenceManager.mongo_db is None: # TBD: LOG raise UnsuccessfulConnectionException("Unknown database %s" % self.db) self.logger.debug( "Connected to database successfully. Authenticating user") # authenticate user try: if (self.uid is not None) and (self.pwd is not None): MongoPersistenceManager.mongo_db.authenticate( self.uid, self.pwd) except PyMongoError: self.logger.debug( ("Invalid user ID %s or password" % self.uid)) raise UnsuccessfulConnectionException( "Invalid user ID %s or password" % self.uid) # return database pointer self.logger.debug("Authentication successful") else: self.logger.debug("Connection already active") self.logger.info("Exiting connect method with return value %s" % MongoPersistenceManager.mongo_db) return MongoPersistenceManager.mongo_db def disconnect(self, mongo_client: MongoClient): """ disconnects from the database """ self.logger.info("Entering disconnect method with parameters %s" % mongo_client) try: # close connection # TBD: LOG mongo_client.close() except ConnectionFailure: # do nothing - no point throwing exception if problem closing connection self.logger.error( "Connection failure while trying to disconnect from %s, %s" % (self.url, self.db)) pass self.logger.debug("Disconnected sucessfully") self.logger.info("Exiting disconnect method") def insert(self, collection: str, obj, duplicate_ok: bool) -> str: """ :param collection: (string) collection to insert into :param obj: (dict) object to be inserted :param duplicate_ok: (bool) true if object can be inserted even if it already exists :return: ID of the inserted / updated object """ self.logger.info( "Entering insert method with parameters collection: %s, obj: %s, duplicate_ok: %s" % (collection, obj, duplicate_ok)) doc_id = None # if duplicate_ok == false, check if the object exists # assumption: unique index exists in collection, hence no need to check for duplicates # duplicate_ok is ignored # add_object = duplicate_ok ''' if not duplicate_ok: self.logger.debug( "Duplicates not allowed. Checking if object exists already") kwargs = {} kwargs["filter"] = obj self.logger.debug("Calling perform_db_operation for find operation") obj_found = self.perform_db_operation(collection, DBOperation.FIND, **kwargs) doc_id = -1 if obj_found is not None: try: doc_id = obj_found[0]["_id"] doc_id = -1 self.logger.debug("Object exists already. Not inserting") except IndexError: add_object = True self.logger.debug("Object does not exist. Inserting") else: self.logger.debug("Duplicates allowed. Inserting object") add_object = True obj_found.close()''' # if add_object: kwargs = obj self.logger.debug("Calling perform_db_operation for insert operation") result = self.perform_db_operation(collection, DBOperation.INSERT, **kwargs) doc_id = result.inserted_id self.logger.debug("Insert successful. New document ID is %s" % doc_id) self.logger.info("Exiting insert method with return value %s" % doc_id) return doc_id def update(self, collection: str, doc_filter, obj, upsert: bool = False): """ :param collection: (string) name of collection to update :param doc_filter: filter for the object to be updated :param obj: new attributes of the object to be updated :param upsert: (bool) true if object to be inserted if not found :return: ID of the object updated """ self.logger.info( "Entering update method with parameters collection: %s, doc_filter: %s, obj: %s, upsert: %s" % (collection, doc_filter, obj, upsert)) kwargs = {} kwargs["filter"] = doc_filter update = {} update["$set"] = obj kwargs["update"] = update kwargs["upsert"] = upsert self.logger.debug("Calling perform_db_operation for update operation") result = self.perform_db_operation(collection, DBOperation.UPDATE, **kwargs) doc_id = result.upserted_id self.logger.debug("Update successful. Document ID: %s" % doc_id) self.logger.info("Exiting update method with return value %s" % doc_id) return doc_id def replace(self, collection: str, doc_filter, obj, upsert: bool = False): """ :param collection: (str) name of collection in which to replace document :param doc_filter: filter for the object to be replaced :param obj: new attributes of the object to be replaced :param upsert: (bool) true if object to be inserted if not found :return: ID of the object replaced """ self.logger.info( "Entering replace method with parameters collection: %s, doc_filter: %s, obj: %s, upsert: %s" % (collection, doc_filter, obj, upsert)) kwargs = {} kwargs["filter"] = doc_filter kwargs["replacement"] = obj kwargs["upsert"] = upsert self.logger.debug("Calling perform_db_operation for replace operation") result = self.perform_db_operation(collection, DBOperation.REPLACE, **kwargs) doc_id = result.upserted_id self.logger.debug("Replace successful. Document ID: %s" % doc_id) self.logger.info("Exiting replace method with return value %s" % doc_id) return doc_id def find(self, collection: str, doc_filter): """ finds one or more documents in the collection matching the specified filter :param collection: (str) to be searched :param doc_filter: (dict) query to be applied :return: (array of dict) document(s) found, or None """ self.logger.info( "Entering fnd method with parameters collection: %s, doc_filter: %s" % (collection, doc_filter)) kwargs = {"filter": doc_filter} self.logger.debug("Calling perform_db_operation for find operation") result = self.perform_db_operation(collection, DBOperation.FIND, **kwargs) self.logger.debug( "Operation completed. Results: %s. Converting to object array" % result) # convert result from cursor to array of dict final_res = [] for record in result: final_res.append(record) result.close() self.logger.debug("Conversion complete. Results: %s" % final_res) self.logger.info("Exiting find method with return value %s" % final_res) return final_res def delete(self, collection: str, doc_filter): """ deletes documents from a collection that match the specified filter :param collection: (str) collection from which document(s) is/are to be deleted :param doc_filter: query to be applied to find documents :return: number of documents deleted """ self.logger.info( "Entering delete method with parameters collection: %s, doc_filter: %s" % (collection, doc_filter)) kwargs = {} kwargs["filter"] = doc_filter self.logger.debug("Calling perform_db_operation for delete operation") result = self.perform_db_operation(collection, DBOperation.DELETE, **kwargs) self.logger.debug("Operation successful. %s records deleted" % result.deleted_count) self.logger.info("Exiting delete method with return value %s" % result.deleted_count) return result.deleted_count def perform_db_operation(self, collection: str, operation: str, **kwargs): """ performs a database operation - guarantees success within N retries (throws UnsuccessfulOperationException if unsuccessful after N retries) :param collection: (str) name of collection to be operated on :param operation: (str) name of operation to be performed :param kwargs: arguments for operation :return: """ self.logger.info( "Entering perform_db_operation method with parameters collection: %s, operation: %s, " "arguments: %s" % (collection, operation, kwargs)) # connect to the database self.logger.debug("Connecting to database") mongo_db = self.connect() mongo_client = mongo_db.client self.logger.debug("Connected to database") # get the collection pointer - throw exception if not found mongo_collection = mongo_db[collection] if mongo_collection is None: raise UnsuccessfulOperationException(operation, **kwargs) self.logger.debug("Got collection %s" % collection) # try the operation - repeat MAX_RETRIES times operation_successful = False attempt = 1 result = None while not operation_successful and attempt <= self.MAX_RETRIES: self.logger.debug("Attempting %s operation. Attempt %s of %s" % (operation, attempt, self.MAX_RETRIES)) try: # perform operation if operation is DBOperation.INSERT: self.logger.debug("attempting mongo_collection.insert_one") try: result = mongo_collection.insert_one(kwargs) operation_successful = result.acknowledged self.logger.debug("Attempt complete. Result = %s" % operation_successful) except DuplicateKeyError: print('Duplicate Key Error') operation_successful = False raise UnsuccessfulOperationException( "Insert unsuccessful due to primary key violation") elif operation is DBOperation.FIND: self.logger.debug("attempting mongo_collection.find") result = mongo_collection.find(**kwargs) operation_successful = True self.logger.debug("Attempt complete. Result = %s" % operation_successful) elif operation is DBOperation.UPDATE: self.logger.debug("attempting mongo_collection.update_one") result = mongo_collection.update_one(**kwargs) operation_successful = result.acknowledged self.logger.debug("Attempt complete. Result = %s" % operation_successful) elif operation is DBOperation.REPLACE: self.logger.debug( "attempting mongo_collection.replace_one") result = mongo_collection.replace_one(**kwargs) operation_successful = result.acknowledged self.logger.debug("Attempt complete. Result = %s" % operation_successful) elif operation is DBOperation.DELETE: self.logger.debug( "attempting mongo_collection.delete_many") result = mongo_collection.delete_many(**kwargs) operation_successful = result.acknowledged self.logger.debug("Attempt complete. Result = %s" % operation_successful) except ConnectionFailure: # try again after INTERVAL_BETWEEN_RETRIES seconds self.logger.debug( "Connection Failure. Trying again after %s seconds" % self.INTERVAL_BETWEEN_RETRIES) time.sleep(self.INTERVAL_BETWEEN_RETRIES) attempt += 1 # disconnect from database self.logger.debug("Disconnecting from database") # self.disconnect(mongo_client) self.logger.debug("Disconnected successfully") # operation may not have succeeded even after MAX_RETRIES attempts if not operation_successful: self.logger.error("Operation unsuccessful even after %s attempts" % self.MAX_RETRIES) raise UnsuccessfulOperationException(operation, **kwargs) self.logger.info( "Exiting from perform_db_operation method with return value %s" % result) return result
def __init__(self): self.config = XprConfigParser(XprConfigParser.DEFAULT_CONFIG_PATH) self.logger = XprLogger() self.adminuser = "******" self.adminpassword = "******"
def __init__(self): self.config = XprConfigParser() self.logger = XprLogger()
def __init__(self, persistence_manager): self.config = XprConfigParser(self.config_path) self.logger = XprLogger() self.persistence_manager = persistence_manager
def __init__(self, persistence_manager): self.persistence_manager = persistence_manager self.logger = XprLogger() config_path = XprConfigParser.DEFAULT_CONFIG_PATH self.config = XprConfigParser(config_path)
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH): self.logger = XprLogger() self.config = XprConfigParser(config_path)["pachyderm"] self.pachyderm_client = self.connect_to_pachyderm()
def __init__(self, level=logging.DEBUG): self.xpr_config = XprConfigParser( config_file_path=XprConfigParser.DEFAULT_CONFIG_PATH_XPR_LOG) if self.xpr_config[self.LOGGING_SECTION][self.FIND_CONFIG_RECURSIVE]: self.xpr_config = self.load_config("xpr") self.name = self.xpr_config[self.PROJECT_NAME] super(XprLogger, self).__init__(self.name) self.setLevel(level) logger_formatter = XprCustomFormatter( self.xpr_config[self.LOGGING_SECTION][self.FORMATTER]) logstash_formatter = XprLogstashCustomFormatter( self.xpr_config[self.LOGGING_SECTION][self.FORMATTER]) log_folder = os.path.expanduser( self.xpr_config[self.LOGGING_SECTION][self.LOGS_FOLDER_PATH]) if not os.path.exists(log_folder): try: os.makedirs(log_folder, 0o755) except IOError as err: print( "Permission Denied to create logs folder at the specidied directory. \n{}".format( str(err))) # Adding file handler for levels below warning try: if self.xpr_config[self.LOGGING_SECTION][self.LOGGING_FILE_BOOL]: try: wfh = logging.FileHandler(os.path.join( log_folder, '.'.join((self.xpr_config[self.PROJECT_NAME], "log"))), 'w') except IOError as err: print("Permission denied to create log files. " "Saving log files in base directory . \n{}".format( str(err))) wfh = logging.FileHandler( os.path.join(os.path.expanduser("~"), '.'.join((self.xpr_config[ self.PROJECT_NAME], "log"))), 'w') wfh.setFormatter(logger_formatter) wfh.setLevel(logging.DEBUG) self.addHandler(wfh) except Exception as err: print("Unable to add file handler to logger. \n{}".format(str(err))) raise err # Adding file handler for levels more critical than warning try: if self.xpr_config[self.LOGGING_SECTION][self.LOGGING_FILE_BOOL]: try: efh = logging.FileHandler(os.path.join( log_folder, '.'.join((self.xpr_config[self.PROJECT_NAME], "err"))), 'w') except IOError as err: print("Permission denied to create log files. " "Saving log files in base directory . \n{}".format( str(err))) efh = logging.FileHandler( os.path.join(os.path.expanduser("~"), '.'.join((self.xpr_config[ self.PROJECT_NAME], "err"))), 'w') efh.setFormatter(logger_formatter) efh.setLevel(logging.ERROR) self.addHandler(efh) except Exception as err: print( "Unable to add file handler to logger . \n{}".format(str(err))) raise err # Adding logstash logging handler try: if self.xpr_config[self.LOGGING_SECTION][ self.LOGGING_LOGSTASH_BOOL]: cache_filename = "" if self.xpr_config[self.LOGGING_SECTION][ self.LOGSTASH_CACHE_BOOL]: cache_filename = os.path.join( log_folder, "cache.persistence") lh = AsynchronousLogstashHandler( host=self.xpr_config[self.LOGGING_SECTION][ self.LOGSTASH_HOST], port=self.xpr_config[self.LOGGING_SECTION][ self.LOGSTASH_PORT], database_path=cache_filename) lh.setFormatter(logstash_formatter) self.addHandler(lh) except Exception as err: print("Unable to add logstash handler to logger. \n{}".format( str(err))) raise err
def __init__(self, config_path): self.config = XprConfigParser(config_file_path=config_path) self.logger = XprLogger() self.metrics_list = None self.persistence_manager = None