def __init__(self, config=None, log_name=__name__): """Make an HTTP request. This class allows you to create custom request args and or enable debug mode. Presently the ``config`` argument would only be used to enable debug. :param config: ``dict`` :param log_name: ``str`` This is used to log against an existing log handler. """ self.config = config if self.config is None: self.config = {} self.log = logger.getLogger(log_name) self.request_kwargs = {'timeout': self.config.get('timeout', 60)} self.headers = {'User-Agent': 'cloudlib'} if isinstance(self.config, dict): if 'headers' in self.config: self.headers.update(self.config.get('headers')) if self.config.get('debug', False): httplib.HTTPConnection.debuglevel = 1
def __init__(self, packages_dict, log_name=__name__): """Install packages on a local Linux Operating System. :param packages_dict: ``dict`` :param log_name: ``str`` This is used to log against an existing log handler. """ self.log = logger.getLogger(log_name) self.shell = shell.ShellCommands(log_name=log_name) self.distro = None self.packages_dict = packages_dict self.install_process = { 'apt': "apt-get update && apt-get" " -o Dpkg::Options:='--force-confold'" " -o Dpkg::Options:='--force-confdef'" " -y install %s", 'yum': "yum -y instaall %s", 'zypper': "zypper -n install %s" } self.install_string = None
def __init__(self, config=None, log_name=__name__): """Make an HTTP request. This class allows you to create custom request args and or enable debug mode. Presently the ``config`` argument would only be used to enable debug. :param config: ``dict`` :param log_name: ``str`` This is used to log against an existing log handler. """ self.config = config if self.config is None: self.config = {} self.log = logger.getLogger(log_name) self.request_kwargs = {'timeout': self.config.get('timeout', 60)} self.headers = { 'User-Agent': 'cloudlib' } if isinstance(self.config, dict): if 'headers'in self.config: self.headers.update(self.config.get('headers')) if self.config.get('debug', False): httplib.HTTPConnection.debuglevel = 1
def main(): """Run the main app. This application will create all Python wheel files from within an environment. The purpose is to create pre-compiled python wheels from the RPC playbooks. """ # Parse input arguments user_args = _user_args() # Load the logging _logging = logger.LogSetup(debug_logging=user_args['debug']) if user_args['quiet'] is True or user_args['debug'] is False: stream = False else: stream = True _logging.default_logger(name='rpc_wheel_builder', enable_stream=stream) global LOG LOG = logger.getLogger(name='rpc_wheel_builder') # Create the output path output_path = _get_abs_path(path=user_args['output']) LOG.info('Getting output path') _mkdirs(path=output_path) # Create the build path LOG.info('Getting build path') if user_args['build_dir'] is not None: build_path = _get_abs_path(path=user_args['build_dir']) _mkdirs(path=build_path) else: build_path = tempfile.mkdtemp(prefix='rpc_wheels_build_') pre_input = user_args['pre_input'] if pre_input: pre_input_path = _get_abs_path(path=user_args['pre_input']) with open(pre_input_path, 'rb') as f: global PYTHON_PACKAGES PYTHON_PACKAGES = json.loads(f.read()) else: # Get the input path LOG.info('Getting input path') input_path = _get_abs_path(path=user_args['input']) new_setup(user_args=user_args, input_path=input_path, output_path=output_path, quiet=stream) # Create all of the python package wheels make_wheels(wheel_dir=output_path, build_dir=build_path, quiet=stream) # if git_repos was defined save all of the sources to the defined location git_repos_path = user_args.get('git_repos') if git_repos_path: _store_git_repos(git_repos_path, quiet=stream)
def __init__(self, log_name=__name__, debug=False): """Run a shell command on a local Linux Operating System. :param log_name: ``str`` This is used to log against an existing log handler. :param debug: ``bol`` """ self.log = logger.getLogger(log_name) self.debug = debug
def __init__(self, log_name=__name__): """Parse values in a given configuration file. :param log_name: ``str`` This is used to log against an existing log handler. """ self.log = logger.getLogger(log_name) self.name = None self.config_file = None self.filename = None
def __init__(self, config, log_name=__name__): """General purpose Email Message Sender. This module is used to send messages based on some set values. :param config: ``dict`` :param log_name: ``str`` This is used to log against an existing log handler. """ # Set logger self.log = logger.getLogger(log_name) # Set the default args if not isinstance(config, dict): msg = 'No Configuration Provided' self.log.fatal(msg) raise cloudlib.MissingConfig(msg) else: self.config = config # Set SMTP mail_url = self.config.get('mail_url') mail_port = self.config.get('mail_port') if mail_port is None: raise cloudlib.MissingConfigValue('Missing "mail_port" in config') elif mail_url is None: raise cloudlib.MissingConfigValue('Missing "mail_url" in config') else: self.smtp = smtplib.SMTP(mail_url, mail_port) # enable Debug if self.config.get('debug', False) is True: self.smtp.set_debuglevel(True) key = self.config.get('mail_key') cert = self.config.get('mail_cert') if key is not None and cert is not None: self.smtp.starttls(key, cert) else: self.smtp.starttls() username = self.config.get('mail_username') password = self.config.get('mail_password') if username is not None and password is not None: self.smtp.login(username, password)
def __init__(self, load_app, app_name=__name__, default_cfg=None, network_cfg=None, ssl_cfg=None, protocol='HTTP/1.1'): """Loads the flask application. :param load_app: ``object`` :param app_name: ``str`` :param default_cfg: ``dict`` :param network_cfg: ``dict`` :param ssl_cfg: ``dict`` """ # Set the app used within this WSGI server self.app = load_app self.app_name = app_name # Get configuration dictionaries self.net_cfg = self._empty_config(network_cfg) self.ssl_cfg = self._empty_config(ssl_cfg) self.def_cfg = self._empty_config(default_cfg) # Set the logger print self.def_cfg.get('appname', app_name) self.log = logger.getLogger(self.def_cfg.get('appname', app_name)) self.debug = self.def_cfg.get('debug_mode', False) self.server_socket = self._socket_bind() wsgi.HttpProtocol.default_request_version = protocol self.protocol = wsgi.HttpProtocol pool_size = int(self.net_cfg.get('connection_pool', 1000)) self.spawn_pool = eventlet.GreenPool(size=pool_size) self.active = True self.worker = None eventlet.patcher.monkey_patch()
"""Utilities used throughout the project.""" import base64 import functools import hashlib import json import os import time from cloudlib import logger from cloudlib import shell LOG = logger.getLogger('repo_builder') def retry(exception, tries=3, delay=1, backoff=1): """Retry calling the decorated function using an exponential backoff. original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry :param exception: the exception to check. may be a tuple of exceptions to check :type exception: ``Exception`` or ``tuple` :param tries: number of times to try (not retry) before giving up :type tries: ``int`` :param delay: initial delay between retries in seconds :type delay: ``int`` :param backoff: backoff multiplier e.g. value of 2 will double the delay
# This software has no warranty, it is provided 'as is'. It is your # responsibility to validate the behavior of the routines and its accuracy # using the code provided. Consult the GNU General Public license for further # details (see GNU General Public License). # http://www.gnu.org/licenses/gpl.html # ============================================================================= import hashlib from cloudlib import logger from cloudlib import indicator from turbolift import methods LOG = logger.getLogger('turbolift') class ListRunMethod(methods.BaseMethod): """Setup and run the list Method.""" def __init__(self, job_args): super(ListRunMethod, self).__init__(job_args) def start(self): """Return a list of objects from the API for a container.""" LOG.info('Listing options...') with indicator.Spinner(**self.indicator_options): objects_list = self._list_contents() if not objects_list: return
# This software has no warranty, it is provided 'as is'. It is your # responsibility to validate the behavior of the routines and its accuracy # using the code provided. Consult the GNU General Public license for further # details (see GNU General Public License). # http://www.gnu.org/licenses/gpl.html # ============================================================================= import os from cloudlib import logger from cloudlib import indicator from turbolift import methods LOG = logger.getLogger("turbolift") class ArchiveRunMethod(methods.BaseMethod): """Setup and run the list Method.""" def __init__(self, job_args): super(ArchiveRunMethod, self).__init__(job_args) def start(self): LOG.info("Archiving...") with indicator.Spinner(**self.indicator_options): archive = self._compressor(file_list=self._index_fs()) LOG.info("Ensuring Container...") with indicator.Spinner(**self.indicator_options):
def test_getlogger_new_logger(self): log = logger.getLogger(name='testLogger') for handler in log.handlers: return self.assertTrue(handler.name == 'testLogger') else: self.fail('The log handler name was not set')
# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys possible_topdir = os.path.normpath( os.path.join(os.path.abspath(os.getcwd()), os.pardir) ) if os.path.exists(os.path.join(possible_topdir, 'cloudlib', '__init__.py')): sys.path.insert(0, possible_topdir) from cloudlib import logger LOG = logger.getLogger(name='testLogger') print type(LOG) print LOG.handlers for handler in LOG.handlers: print handler.name print dir(handler)
def main(): """Run the main app. This application will create all Python wheel files from within an environment. The purpose is to create pre-compiled python wheels from the RPC playbooks. """ # Parse input arguments user_args = _user_args() # Load the logging _logging = logger.LogSetup(debug_logging=user_args['debug']) if user_args['quiet'] is True or user_args['debug'] is False: stream = False else: stream = True _logging.default_logger( name='rpc_wheel_builder', enable_stream=stream ) global LOG LOG = logger.getLogger(name='rpc_wheel_builder') # Create the output path output_path = _get_abs_path(path=user_args['output']) LOG.info('Getting output path') _mkdirs(path=output_path) # Create the build path LOG.info('Getting build path') if user_args['build_dir'] is not None: build_path = _get_abs_path(path=user_args['build_dir']) _mkdirs(path=build_path) else: build_path = tempfile.mkdtemp(prefix='rpc_wheels_build_') pre_input = user_args['pre_input'] if pre_input: pre_input_path = _get_abs_path(path=user_args['pre_input']) with open(pre_input_path, 'rb') as f: global PYTHON_PACKAGES PYTHON_PACKAGES = json.loads(f.read()) else: # Get the input path LOG.info('Getting input path') input_path = _get_abs_path(path=user_args['input']) new_setup( user_args=user_args, input_path=input_path, output_path=output_path, quiet=stream ) # Create all of the python package wheels make_wheels( wheel_dir=output_path, build_dir=build_path, quiet=stream ) # if git_repos was defined save all of the sources to the defined location git_repos_path = user_args.get('git_repos') if git_repos_path: _store_git_repos(git_repos_path, quiet=stream)
"""Perform Openstack Authentication.""" import json from turbolift import exceptions from turbolift.authentication import utils from cloudlib import logger LOG = logger.getLogger('turbolift') def authenticate(job_args): """Authentication For Openstack API. Pulls the full Openstack Service Catalog Credentials are the Users API Username and Key/Password. Set a DC Endpoint and Authentication URL for the OpenStack environment """ # Load any authentication plugins as needed job_args = utils.check_auth_plugin(job_args) # Set the auth version auth_version = utils.get_authversion(job_args=job_args) # Define the base headers that are used in all authentications auth_headers = { 'Content-Type': 'application/json', 'Accept': 'application/json'
# Store network Configuration network_config = CONFIG.config_args(section="network") # Store SSL configuration ssl_config = CONFIG.config_args(section="ssl") # Enable or disable DEBUG mode DEBUG = default_config.get("debug", False) except IOError: # If the configuration file is not present, set the two bits we need DEBUG = True APPNAME = "example" # Load Logging LOG = logger.getLogger(APPNAME) # Load the flask APP APP = flask.Flask(APPNAME) # Enable general debugging if DEBUG is True: APP.debug = True LOG.debug(APP.logger) # Enable Application Threading APP.threaded = True # Enforce strict slashes in URI's APP.url_map.strict_slashes = False
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # (c) 2015, Kevin Carter <*****@*****.**> import os from cloudlib import logger from yaprt import utils LOG = logger.getLogger("data_processing") def package_processing(args, repo_data): user_packages = repo_data["__user__"] = dict() requirements = user_packages["requirements"] = list() if args["packages"] or args["packages_file"]: if args["packages_file"]: requirements.extend(utils.get_items_from_file(file_name=args["packages_file"])) if args["packages"]: requirements.extend(args["packages"]) user_packages["requirements"] = list(set(requirements)) git_repos = list()
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys possible_topdir = os.path.normpath( os.path.join(os.path.abspath(os.getcwd()), os.pardir)) if os.path.exists(os.path.join(possible_topdir, 'cloudlib', '__init__.py')): sys.path.insert(0, possible_topdir) from cloudlib import logger LOG = logger.getLogger(name='testLogger') print type(LOG) print LOG.handlers for handler in LOG.handlers: print handler.name print dir(handler)
def test_getlogger_new_logger(self): log = logger.getLogger(name="testLogger") for handler in log.handlers: return self.assertTrue(handler.name == "testLogger") else: self.fail("The log handler name was not set")
def main(): """Run the main app. This application will create all Python wheel files from within an environment. The purpose is to create pre-compiled python wheels from the RPC playbooks. """ # Parse input arguments user_args = _user_args() # Load the logging _logging = logger.LogSetup(debug_logging=user_args['debug']) if user_args['quiet'] is True or user_args['debug'] is False: stream = False else: stream = True _logging.default_logger( name='rpc_wheel_builder', enable_stream=stream ) global LOG LOG = logger.getLogger(name='rpc_wheel_builder') # Create the output path output_path = _get_abs_path(path=user_args['output']) LOG.info('Getting output path') _mkdirs(path=output_path) # Create the build path LOG.info('Getting build path') indicator_kwargs = { 'debug': user_args['debug'], 'quiet': user_args['quiet'], 'note': 'Gather dependencies... ' } with IndicatorThread(**indicator_kwargs): if user_args['build_dir'] is not None: build_path = _get_abs_path(path=user_args['build_dir']) _mkdirs(path=build_path) else: build_path = tempfile.mkdtemp(prefix='rpc_wheels_build_') pre_input = user_args['pre_input'] if pre_input: pre_input_path = _get_abs_path(path=user_args['pre_input']) with open(pre_input_path, 'rb') as f: global PYTHON_PACKAGES PYTHON_PACKAGES = json.loads(f.read()) else: # Get the input path LOG.info('Getting input path') new_setup( user_args=user_args, input_path=_get_abs_path(path=user_args['input']) ) indicator_kwargs['note'] = 'Building wheels... ' with IndicatorThread(**indicator_kwargs): # Create all of the python package wheels make_wheels( wheel_dir=output_path, build_dir=build_path ) indicator_kwargs['note'] = 'Generating build log... ' with IndicatorThread(**indicator_kwargs): # Get a timestamp and create a report file utctime = datetime.datetime.utcnow() utctime = utctime.strftime("%Y%m%d_%H%M%S") backup_name = '%s-build-report-%s.json' % ( user_args['release'], utctime ) output_report_file = os.path.join( output_path, 'json-reports', backup_name ) # Make the directory if needed _mkdirs(path=os.path.dirname(output_report_file)) # Generate a timestamped report file LOG.info('Generating packaging report [ %s ]', output_report_file) with open(output_report_file, 'wb') as f: f.write( json.dumps( PYTHON_PACKAGES, indent=2, sort_keys=True ) ) # If link_dir is defined create a link to all built wheels. links_path = user_args.get('link_dir') if links_path: indicator_kwargs['note'] = 'Creating file links... ' with IndicatorThread(**indicator_kwargs): links_path = _get_abs_path(path=links_path) LOG.info('Creating Links at [ %s ]', links_path) _mkdirs(path=links_path) # Change working directory. os.chdir(links_path) # Create all the links for inode in PYTHON_PACKAGES['built_files']: try: dest_link = os.path.join(links_path, inode) # Remove the destination inode if it exists if os.path.exists(dest_link): os.remove(dest_link) # Create the link using the relative path os.symlink(os.path.relpath( os.path.join(output_path, inode)), dest_link ) except OSError as exp: LOG.warn( 'Error Creating Link: [ %s ] Error: [ %s ]', inode, exp ) else: LOG.debug('Link Created: [ %s ]', dest_link) # if git_repos was defined save all of the sources to the defined location git_repos_path = user_args.get('git_repos') if git_repos_path: indicator_kwargs['note'] = 'Storing updated git sources...' with IndicatorThread(**indicator_kwargs): LOG.info('Updating git sources [ %s ]', links_path) _store_git_repos(_get_abs_path(path=git_repos_path))
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # (c) 2015, Kevin Carter <*****@*****.**> import os from cloudlib import logger from yaprt import utils LOG = logger.getLogger('data_processing') def package_processing(args, repo_data): user_packages = repo_data['__user__'] = dict() requirements = user_packages['requirements'] = list() if args['packages'] or args['packages_file']: if args['packages_file']: requirements.extend( utils.get_items_from_file(file_name=args['packages_file']) ) if args['packages']: requirements.extend(args['packages']) user_packages['requirements'] = list(set(requirements))
# # (c) 2015, Kevin Carter <*****@*****.**> import collections from distutils import version import os import re import tempfile import urlparse from cloudlib import logger from yaprt import utils LOG = logger.getLogger("repo_builder") VERSION_DESCRIPTORS = [">=", "<=", ">", "<", "==", "~=", "!="] def build_wheels(args): """Work through the various wheels based on arguments. :param args: User defined arguments. :type args: ``dict`` """ report = utils.read_report(args=args) wb = WheelBuilder(user_args=args) # Everything is built in order for consistency, even if it's not being # used later. wb.get_requirements(report=report)
"""Create an HTML index at the root of all directories recursively. This module will create an HTML index as ``index.html`` in the root directory of every directory found, recursively, within a given path. """ import base64 import os from cloudlib import logger import html from yaprt import utils LOG = logger.getLogger('repo_builder') def return_hash(src_file): """Return a hash for a given file. :param src_file: Name of the file that will be hashed. :type src_file: ``str`` :returns: ``str`` """ hash_sum = utils.hash_return( local_file=src_file, hash_type='md5' ) if not hash_sum: raise utils.AError('hash failure on "%s"', src_file)
import os import pwd import shutil import tarfile from cloudlib import logger from cloudlib import shell from cloudlib import package_installer import genastack from genastack.common import basic_init from genastack.common import role_loader from genastack.common import utils LOG = logger.getLogger('genastack-engine') class EngineRunner(object): """Base class for the engine parser.""" def __init__(self, args): self.args = args self.run_roles = [] self.install_db = None self.job_dict = collections.defaultdict(list) self.shell = shell.ShellCommands(log_name='genastack-engine') @staticmethod def __set_perms(inode, kwargs): """Set the permissions on a local inode.
def main(): """Run the main app. This application will create all Python wheel files from within an environment. The purpose is to create pre-compiled python wheels from the RPC playbooks. """ # Parse input arguments user_args = _user_args() # Load the logging _logging = logger.LogSetup(debug_logging=user_args['debug']) if user_args['quiet'] is True or user_args['debug'] is False: stream = False else: stream = True _logging.default_logger(name='rpc_wheel_builder', enable_stream=stream) global LOG LOG = logger.getLogger(name='rpc_wheel_builder') # Create the output path output_path = _get_abs_path(path=user_args['output']) LOG.info('Getting output path') _mkdirs(path=output_path) # Create the build path LOG.info('Getting build path') indicator_kwargs = { 'debug': user_args['debug'], 'quiet': user_args['quiet'], 'note': 'Gather dependencies... ' } with IndicatorThread(**indicator_kwargs): if user_args['build_dir'] is not None: build_path = _get_abs_path(path=user_args['build_dir']) _mkdirs(path=build_path) else: build_path = tempfile.mkdtemp(prefix='rpc_wheels_build_') pre_input = user_args['pre_input'] if pre_input: pre_input_path = _get_abs_path(path=user_args['pre_input']) with open(pre_input_path, 'rb') as f: global PYTHON_PACKAGES PYTHON_PACKAGES = json.loads(f.read()) else: # Get the input path LOG.info('Getting input path') new_setup(user_args=user_args, input_path=_get_abs_path(path=user_args['input'])) indicator_kwargs['note'] = 'Building wheels... ' with IndicatorThread(**indicator_kwargs): # Create all of the python package wheels make_wheels(wheel_dir=output_path, build_dir=build_path) indicator_kwargs['note'] = 'Generating build log... ' with IndicatorThread(**indicator_kwargs): # Get a timestamp and create a report file utctime = datetime.datetime.utcnow() utctime = utctime.strftime("%Y%m%d_%H%M%S") backup_name = '%s-build-report-%s.json' % (user_args['release'], utctime) output_report_file = os.path.join(output_path, 'json-reports', backup_name) # Make the directory if needed _mkdirs(path=os.path.dirname(output_report_file)) # Generate a timestamped report file LOG.info('Generating packaging report [ %s ]', output_report_file) with open(output_report_file, 'wb') as f: f.write(json.dumps(PYTHON_PACKAGES, indent=2, sort_keys=True)) # If link_dir is defined create a link to all built wheels. links_path = user_args.get('link_dir') if links_path: indicator_kwargs['note'] = 'Creating file links... ' with IndicatorThread(**indicator_kwargs): links_path = _get_abs_path(path=links_path) LOG.info('Creating Links at [ %s ]', links_path) _mkdirs(path=links_path) # Change working directory. os.chdir(links_path) # Create all the links for inode in PYTHON_PACKAGES['built_files']: try: dest_link = os.path.join(links_path, inode) # Remove the destination inode if it exists if os.path.exists(dest_link): os.remove(dest_link) # Create the link using the relative path os.symlink( os.path.relpath(os.path.join(output_path, inode)), dest_link) except OSError as exp: LOG.warn('Error Creating Link: [ %s ] Error: [ %s ]', inode, exp) else: LOG.debug('Link Created: [ %s ]', dest_link) # if git_repos was defined save all of the sources to the defined location git_repos_path = user_args.get('git_repos') if git_repos_path: indicator_kwargs['note'] = 'Storing updated git sources...' with IndicatorThread(**indicator_kwargs): LOG.info('Updating git sources [ %s ]', links_path) _store_git_repos(_get_abs_path(path=git_repos_path))