def test_ref(): src_yaml = """ foo: bar ref_foo: !ref:foo """ src = Configuration.from_string(src_yaml) print_yaml("yaml for src", src) assert src.ref_foo == src.foo missing_ref_yaml = """ foo: bar ref_foo: !ref:bar """ raised_key_error = True with pytest.raises(KeyError): Configuration.from_string(missing_ref_yaml) raised_key_error = False assert raised_key_error logger.debug("Raised KeyError") # use previous context ... ref_src_foo_yaml = """ ref_src_foo: !ref:foo """ ref_src_foo = Configuration.from_string(ref_src_foo_yaml, configure=False) merged = deepcopy(src).merge(ref_src_foo) merged.configure() print_yaml("yaml that refs src", merged)
def test_overwrite_tag(): src_yaml = """ foo: bar """ overwrite_fail_yaml = """ foo: [1, 2, 3] """ src = Configuration.from_string(src_yaml) print_yaml("Src", src) overwrite_fail = Configuration.from_string(overwrite_fail_yaml) print_yaml("Overwrite fail", overwrite_fail) error_raised = True with pytest.raises(ConfigurationError): logger.debug("Going to merge configs that should fail") merged = deepcopy(src).merge(overwrite_fail) logger.critical("You should never see this") error_raised = False assert error_raised logger.debug("Raised ConfigurationError") assert src.foo == 'bar' # ### use overwrite to overwrite src.foo overwrite_yaml = """ foo: !overwrite [1, 2, 3] """ overwrite = Configuration.from_string(overwrite_yaml) print_yaml("Overwrite", overwrite) merged = deepcopy(src).merge(overwrite) print_yaml("Merged", merged)
def test_lookup(): src_yaml = """ foo: bar ref_foo: !lookup foo """ src = Configuration.from_string(src_yaml, configure=False) from ksgen.yaml_utils import LookupDirective LookupDirective.lookup_table = src print_yaml("yaml for src", src) # use previous context ... ref_src_foo_yaml = """ ref_src_foo: !lookup foo missing_lookup: !lookup non.existent.key """ ref_src_foo = Configuration.from_string(ref_src_foo_yaml, configure=False) merged = deepcopy(src).merge(ref_src_foo) LookupDirective.lookup_table = merged print_yaml("yaml that refs src", merged) import yaml final_config = Configuration.from_string(yaml.safe_dump(merged)) print_yaml("yaml that refs src", final_config) assert final_config.ref_foo == src.foo assert final_config.ref_src_foo == src.foo assert final_config.missing_lookup == '{{ non.existent.key }}'
def test_merge(): d1 = { "d1": [1, 2, 3], "s": "foo", "a": [1, 2, 3], "nested_dict": { "d1": "ok", "d": "ok" } } c1 = Configuration.from_dict(d1) print_yaml("d1", d1) d2 = { "d2": "foobar", "s": "foobar", "a": [3, 4, 5], "nested_dict": { "d2": "ok", "d": { "foo": "bar" } } } c2 = Configuration.from_dict(d2) print_yaml("d2", d2) c3 = c1.merge(c2) print_yaml("Merged", dict(c3))
def test_merge_error(): src_dict = { "d1": [1, 2, 3], "s": "foo", "a": [1, 2, 3], "nested_dict": { "d1": "ok", "d": "ok" } } other_dict = { "d2": "foobar", "s": "foobar", "a": [3, 4, 5], "nested_dict": { "d2": "ok", "d": { # ### raises ConfigError "foo": "bar" } } } src = Configuration.from_dict(src_dict) print_yaml("src", src) other = Configuration.from_dict(other_dict) print_yaml("other", other) with pytest.raises(ConfigurationError): merged = deepcopy(src).merge(other) print_yaml("Merged", dict(merged)) logger.info("Merge raised configuration error")
def __init__(self, url: str, ssh_keyfile: str, keypass: Credential): self.__config = Configuration() self.__config.load() self.__raw_version = self.__config[SettingKeyNames.CBS_VERSION] self.__url = url self.__ssh_keyfile = ssh_keyfile self.__ssh_keypass = keypass (self.__version, self.__build) = CouchbaseServerInstaller._parse_version(self.__raw_version)
def test_lookups_in_extends(): src = Configuration.from_file(TEST_DIR + '/data/extends/extends.yml') print_yaml("Extends using !lookup:", src) src.configure() from ksgen.yaml_utils import LookupDirective LookupDirective.lookup_table = src print_yaml("Extends using !lookup:", src)
def main(): """ Run the script """ os.system('cp ../config.xml config/config.xml') os.system('cp ../network.xml config/network.xml') config = Configuration('../config.ini') results = begin_all_sims(config) save_results(results, 'rawResults.pkl')
def load(self): if self._loaded: return self._file_list = [] self._invalid_paths = [] self._create_file_list(self._settings, self._file_list) logger.info( "\nList of files to load :\n - %s", '\n - '.join( [x[len(self._config_dir) + 1:] for x in self._file_list])) if self._invalid_paths: logger.info("invalid files :\n %s", '\n'.join(self._invalid_paths)) raise OptionError(self._invalid_paths) all_cfg = Configuration.from_dict({}) for f in self._file_list: cfg = load_configuration(f, self._config_dir) try: del cfg[DEFAULTS_TAG] except KeyError: pass else: logger.debug("Successfully removed default traces from %s" % f) all_cfg.merge(cfg) self._all_settings.merge(all_cfg) self._loaded = True
def load_configuration(environment, config_name, config_dir): """ :param environment: Environment name :type environment: str :param config_name: Config file prefix :type config_name: str :param config_dir: Config directory :type config_dir: str :return: Configuration """ config_filename = '{}_{}.yaml'.format(config_name, environment) config_filename = os.path.join(config_dir, config_filename) if not os.path.isfile(config_filename): config_filename = '{}.yaml'.format(config_name) config_filename = os.path.join(config_dir, config_filename) config = Configuration.from_file(config_filename) config.configure() def to_dict(mapping): return { k: (to_dict(v) if isinstance(v, Mapping) else v) for k, v in mapping.items() } return to_dict(config)
def load(self): if self._loaded: return self._file_list = [] self._invalid_paths = [] self._create_file_list(self._settings, self._file_list) logger.info( "\nList of files to load :\n - %s", '\n - '.join([ x[len(self._config_dir) + 1:] for x in self._file_list ])) if self._invalid_paths: logger.info("invalid files :\n %s", '\n'.join(self._invalid_paths)) raise OptionError(self._invalid_paths) all_cfg = Configuration.from_dict({}) for f in self._file_list: cfg = load_configuration(f, self._config_dir) try: del cfg[DEFAULTS_TAG] except KeyError: pass else: logger.debug("Successfully removed default traces from %s" % f) all_cfg.merge(cfg) self._all_settings.merge(all_cfg) self._loaded = True
def checkValidRoles(h): result = True definedRoles = Configuration.from_file('config/roles.yaml').configure() for r in h["roles"]: if not r in definedRoles: print "==> {0} is not a valid role.".format(r) result = False return result
def test_load_from_file(self): filename = path.join(path.dirname(__file__), 'examples', 'example.default.conf') c = Configuration.from_file(filename) c.configure() self.assertEqual(c.a, 1) self.assertIsInstance(c.b, timedelta) self.assertEqual(c.b, timedelta(days=1))
def load_variables(file_path, namespace): file_path = Template(file_path).render(os.environ) variables = dict( Configuration.from_file(os.path.abspath(file_path)).configure() ) staticconf.DictConfiguration( variables, namespace=namespace, flatten=False)
def test_array_extend(): src_dict = { "src": [11, 12, 13], "merge": [100, 101, 102], 'nested_dict': { 'src': [111, 112, 113], 'merge': [1000, 1001, 1002] } } other_dict = { "other": [22, 22, 23], "merge": [200, 202, 202], 'nested_dict': { 'other': [222, 222, 223], 'merge': [2000, 2002, 2002] } } src = Configuration.from_dict(src_dict) print_yaml("Src", src) other = Configuration.from_dict(other_dict) print_yaml("Other", other) merged = deepcopy(src).merge(other) print_yaml("merged", merged) print_yaml("Src after merge", src) print_yaml("Other after merge", other) assert verify_key_val(src, src_dict, 'merge') assert verify_key_val(src, src_dict, 'src') assert verify_key_val(src, src_dict, 'nested_dict.merge') assert verify_key_val(src, src_dict, 'nested_dict.src') with pytest.raises(KeyError): verify_key_val(src, src_dict, 'other') assert verify_key_val(merged, src_dict, 'src') assert verify_key_val(merged, other_dict, 'other') assert merged['merge'] == src_dict['merge'] + other_dict['merge'] assert verify_key_val(merged, src_dict, 'nested_dict.src') assert verify_key_val(merged, other_dict, 'nested_dict.other')
def test_simple_merge(): src_dict = { "merge": "src merge", "src": 'src only', 'nested_dict': { 'merge': 'nested src merge', 'src': 'nested src only' } } other_dict = { "merge": "other merge", "other": 'other only', 'nested_dict': { 'merge': 'nested other merge', 'other': 'nested other only' } } src = Configuration.from_dict(src_dict) print_yaml("Src", src) other = Configuration.from_dict(other_dict) print_yaml("Other", other) merged = deepcopy(src).merge(other) print_yaml("merged", merged) print_yaml("Src after merge", src) print_yaml("Other after merge", other) assert verify_key_val(src, src_dict, 'merge') assert verify_key_val(src, src_dict, 'src') assert verify_key_val(src, src_dict, 'nested_dict.merge') assert verify_key_val(src, src_dict, 'nested_dict.src') with pytest.raises(KeyError): verify_key_val(src, src_dict, 'other') assert verify_key_val(merged, other_dict, 'merge') assert verify_key_val(merged, src_dict, 'src') assert verify_key_val(merged, other_dict, 'other') return
def validate_configuration(options): # must have 3 sections common if __name__ == '__main__': parser = build_argument_parser() options = parser.parse_args() config = Configuration.from_file(options.config)
def load_yaml(file_path, namespace): file_path = Template(file_path).render(os.environ) yaml_dict = Configuration.from_file( os.path.abspath(file_path), configure=False ) staticconf.DictConfiguration( yaml_dict, namespace=namespace, flatten=False)
def configuration_loader(filename): from collections import Mapping from configure import Configuration config = Configuration.from_file(filename) config.configure() def to_dict(mapping): return {k: (to_dict(v) if isinstance(v, Mapping) else v) for k, v in mapping.items()} return to_dict(config)
def test_limit_chars(): src_yaml = """ substr: !limit_chars [ 'abcdefghijklmnopqrstuvwxyz', 7 ] zero: !limit_chars [ 'abcdefghijklmnopqrstuvwxyz', 0 ] """ src = Configuration.from_string(src_yaml) print_yaml("Src", src) assert src.substr == 'abcdefg' assert src.zero == ''
def main(): """ Main execution point """ try: source_folder = sys.argv[1] destination_folder = sys.argv[2] except Exception: print('Please provide the path to source and destination folders.') else: folders = ['packages', 'router', 'routing_algos'] config = Configuration('../config.ini') copy_folders(source_folder, destination_folder, folders) setup(destination_folder, folders, config)
def validate(config_path): try: config = Configuration.configure(config_path) except ConfigurationError as ce: logger.exception(ce) try: with open('config.schema.yaml', 'r') as f: schema = yaml.load(f) except: schema = yaml.load('') v = DEConfigValidator
def test_random(): src_yaml = """ random: !random 8 """ random_so_far = set() for x in range(2 ** 12): src = Configuration.from_string(src_yaml) if src.random in random_so_far: print len(random_so_far) assert src.random not in random_so_far random_so_far.add(src.random)
def test_merge_lookup(): lookup_yaml = """ foo: bar merge: !lookup foo """ lookup = Configuration.from_string(lookup_yaml) print_yaml("Lookup yaml", lookup) new_yaml = """ merge: baz """ new_values = Configuration.from_string(new_yaml) print_yaml("merge", new_values) error_raised = True with pytest.raises(ConfigurationError): logger.debug("Going to merge configs that should fail") deepcopy(lookup).merge(new_values) logger.critical("You should never see this") error_raised = False assert error_raised logger.debug("Raised ConfigurationError")
def lineReceived(self, line): stdout.write("Server%d Received Data: %s\n" %( Configuration.getMyID(), line)) delMsgs = self.algorithm.receiveMsg(line, self.node) #return the jsonmsg to various nodes #send jsonmsg to nodes to notify them #it's for conflicts #self.send2Node(0, "fake conflict notification") if delMsgs: for delMsg in delMsgs: #put delete event into the log and execute delete e = self.node.createEvent(delMsg) for i in range(self.algorithm.n):#@TODO this line is broadcast jsonmsg = self.algorithm.sendMsg2Node(i) self.send2Node(i, jsonmsg)
def test_env(): src_yaml = """ user: !env [USER] invalid: !env [DOES_NOT_EXIST, ~] same_user: !env [ DOES_NOT_EXIST, !env [USER, baaaz] ] default: !env [ DOES_NOT_EXIST, !env [FOOO, default] ] """ src = Configuration.from_string(src_yaml) print_yaml("Src", src) assert src.user is not None assert src.invalid is None assert src.same_user == src.user assert src.default == 'default'
def test_dict_order(): yaml = """ too: boo loo: too foo: bar moo: - soo """ cfg = Configuration.from_string(yaml).configure() assert cfg.keys() == ['too', 'loo', 'foo', 'moo'] for k in cfg.iterkeys(): logger.debug('%s', k) for k, v in cfg.iteritems(): logger.debug('%s: %s', k, v) print_yaml("src", cfg)
def load_configuration(file_path, rel_dir=None): """ Return the Configuration for the file_path. Also logs an error if there is an error while parsing the file. If the optional rel_dir is passed the error message would print the file_path relative to rel_dir instead of current directory """ logger.debug('Loading file: %s', file_path) try: return Configuration.from_file(file_path).configure() except ConfigurationError as e: rel_dir = rel_dir or os.curdir logger.error("Error loading: %s; reason: %s", os.path.relpath(file_path, rel_dir), e) raise return None
def test_env(): src_yaml = """ user: !env [USER] invalid: !env [DOES_NOT_EXIST1, ~] same_user: !env [ DOES_NOT_EXIST2, !env [USER, baaaz] ] default: !env [ DOES_NOT_EXIST3, !env [FOOO, default] ] home_short: !env [ HOMExxx, '/my/home/under/the/bridge/', 7 ] """ src = Configuration.from_string(src_yaml) print_yaml("Src", src) assert src.user is not None assert src.invalid is None assert src.same_user == src.user assert src.default == 'default' assert src.home_short == '/my/hom'
def _load_defaults(self, path, value): param = '-'.join(path[len(self.config_dir + os.sep):].split('/')[::2]) if not self.parsed['--' + param]: logging.warning( "\'--%s\' hasn't been provided, using \'%s\' as default" % (param, value)) self.defaults.append(''.join(['--', param, '=', str(value)])) else: value = self.parsed['--' + param] file_path = path + os.sep + str(value) + '.yml' loaded_file = Configuration.from_file(file_path) if loaded_file.get(DEFAULTS_TAG): path += os.sep + str(value) for sub_key, sub_value in loaded_file[DEFAULTS_TAG].iteritems(): self._load_defaults(path + os.sep + sub_key, sub_value)
def _load_defaults(self, path, value): param = '-'.join(path[len(self.config_dir + os.sep):].split('/')[::2]) if not self.parsed['--' + param]: logging.warning( "\'--%s\' hasn't been provided, using \'%s\' as default" % ( param, value)) self.defaults.append(''.join(['--', param, '=', str(value)])) else: value = self.parsed['--' + param] file_path = path + os.sep + str(value) + '.yml' loaded_file = Configuration.from_file(file_path) if loaded_file.get(DEFAULTS_TAG): path += os.sep + str(value) for sub_key, sub_value in loaded_file[DEFAULTS_TAG].iteritems(): self._load_defaults(path + os.sep + sub_key, sub_value)
def sendMsg2Node(self, nodek): #@TODO load log from database log = self.dc.getLogs(0,sys.maxint) NP = {} #partial log ES = {} #event lists matrix = [[0 for _ in range(self.n) ] for _ in range(self.n)] for i in range(self.n): for j in range(self.n): matrix[i][j] = self.dc.getTime(i, j) for (id,name,time,content) in log: event = Event(id,name,time,content) if not self.__hasRec(matrix, event, nodek): ES[event.name] = (event.time, event.node, event.content) logging.debug((event.time, event.node, event.content)) NP["matrix"] = matrix NP["events"] = ES NP["senderID"] = Configuration.getMyID() NP["receiverID"] = nodek logging.debug(json.dumps(NP)) return json.dumps(NP)
def getVars(self): """ Used at setup of test """ config = Configuration.from_file('./config.yml').configure() self.testbrowser = config['browser'] self.host = config['sharehost'] self.port = config['shareport'] self.sharehost = config['sharehost'] self.shareport = config['shareport'] self.repohost = config['repohost'] self.repoport = config['repoport'] self.host = config['sharehost'] if config['https'] is True: uri = 'https' else: uri = 'http' if self.shareport is None: self.url = uri + '://' + self.sharehost else: self.url = uri + '://' + self.sharehost + ':' + str(self.shareport) if self.repoport is None: self.repourl = uri + '://' + self.repohost else: self.repourl = uri + '://' + self.repohost + ':' + str(self.repoport) self.username = config['user'] self.password = config['passwd'] self.loginurl = self.url + config['loginurl'] self.photopath = config['photopath'] self.cmisurl = config['cmisurl'] self.cmisatom = self.repourl + config['cmisatom'] self.ftpurl = config['sharehost'] self.ftpport = config['ftpport'] self.imap_host = config['imap_host'] self.imap_port = config['imap_port'] return self
def load(self): if self._loaded: return self._file_list = [] self._invalid_paths = [] self._create_file_list(self._settings, self._file_list) logger.info( "\nList of files to load :\n - %s", '\n - '.join( [x[len(self._config_dir) + 1:] for x in self._file_list])) if self._invalid_paths: logger.info("invalid files :\n %s", '\n'.join(self._invalid_paths)) raise OptionError(self._invalid_paths) all_cfg = Configuration.from_dict({}) for f in self._file_list: cfg = load_configuration(f, self._config_dir) all_cfg.merge(cfg) self._all_settings.merge(all_cfg) self._loaded = True
def load(self): if self._loaded: return self._file_list = [] self._invalid_paths = [] self._create_file_list(self._settings, self._file_list) logger.info( "\nList of files to load :\n - %s", '\n - '.join([ x[len(self._config_dir) + 1:] for x in self._file_list ])) if self._invalid_paths: logger.info("invalid files :\n %s", '\n'.join(self._invalid_paths)) raise OptionError(self._invalid_paths) all_cfg = Configuration.from_dict({}) for f in self._file_list: cfg = load_configuration(f, self._config_dir) all_cfg.merge(cfg) self._all_settings.merge(all_cfg) self._loaded = True
def load_yaml_config(filename): filename = os.path.abspath(filename) config = Configuration.from_file(filename).configure() return config
# -*- coding: utf-8 -*- import os import warnings from configure import Configuration try: environment = os.environ['APPLICATION_ENV'] except: warnings.warn('catch error when get os environment APPLICATION_EVN, ' 'use default prod instead') environment = 'prod' path = os.path.dirname(__file__) config_file = '{0}/config/config_{1}.yml'.format(path, environment) settings = Configuration.from_file(config_file).configure()
def __init__(self): self.n = Configuration.getN() self.C = 0 self.ID = Configuration.getMyID() self.dc = DataConn()
def _load_file(self, f): logging.debug('Loading file: %s', f) cfg = Configuration.from_file(f).configure() self._all_settings.update(cfg)
def config(self, v, ctx=None): return Configuration.from_string(v.strip(), ctx=ctx)
elif options.odma_mode.lower() == "st_1024": options.predefined_config = "hdl_unit_sim.odma_st_1024.defconfig" else: options.predefined_config = "hdl_unit_sim.odma.defconfig" else: options.predefined_config = "hdl_unit_sim.bridge.defconfig" if __name__ == '__main__': msg.ok_msg_blue("--------> WELCOME to IBM OpenCAPI Acceleration Framework") question_and_answer = qa.QuestionAndAnswer(options) question_and_answer.ask(qa.ask_clean_str) if options.clean: env_clean(ocaccel_workflow_log) cfg = Configuration(options) cfg.log = ocaccel_workflow_log question_and_answer.cfg = cfg question_and_answer.ask(qa.ask_configure_str) if not options.no_configure: cfg.configure() # In unit sim mode, all configurations are handled automatically, no need to update the cfg if not options.unit_sim: cfg.update_cfg() if not options.no_env_check: env_check(options) question_and_answer.ask(qa.ask_make_model_str) if not options.no_make_model and options.simulator.lower() != "nosim":
# from configure import Include # Function for programatically representing includes to the configure module # def represent_Include(dumper, include): # return dumper.represent_scalar(u'!include:%s' % include.filename, '') # yaml.add_representer(Include, represent_Include) # Load the base configuration YAML # with open(appfile, 'r') as stream: # app = yaml.load(stream) # Auto load realms and sites directories # for stem in ['realm', 'site']: # plural = stem + 's' # # basedir = os.path.join(rootdir, plural) # app[plural] = app.get(plural, {}) or {} # for dir in os.listdir(os.path.join(appdir, plural)): # relative = os.path.join(plural, dir, stem + '.yaml') # if os.path.isfile(os.path.join(appdir, relative)): # app[plural][dir] = Include(relative) # config = Configuration.from_string(yaml.dump(app), pwd=os.path.abspath(appdir)) config = Configuration.from_file(appfile) print config print Just(1) & Nothing Either Maybe
def running_config_POST(self, arg): try: # Expectation: # { # 'credentials': {'tested-rc': '<STRING>', 'tested-passwd': '<STRING>', # 'testing-rc': '<STRING>', 'testing-passwd': '<STRING>'}, # 'kb_cfg': {<USER_OVERRIDED_CONFIGS>}, # 'topo_cfg': {<TOPOLOGY_CONFIGS>} # 'tenants_cfg': {<TENANT_AND_USER_LISTS_FOR_REUSING>} # } user_config = json.loads(arg) # Parsing credentials from application input cred_config = user_config["credentials"] cred_tested = Credentials(openrc_contents=cred_config["tested-rc"], pwd=cred_config["tested-passwd"]) if "testing-rc" in cred_config and cred_config["testing-rc"] != cred_config["tested-rc"]: cred_testing = Credentials(openrc_contents=cred_config["testing-rc"], pwd=cred_config["testing-passwd"]) else: # Use the same openrc file for both cases cred_testing = cred_tested session_id = hashlib.md5(str(cred_config)).hexdigest() kb_config = KBConfig() if KBSessionManager.has(session_id): response.status = 403 response.text = u"Session is already existed." return response.text # Parsing server and client configs from application input # Save the public key into a temporary file if "public_key" in user_config["kb_cfg"]: pubkey_filename = "/tmp/kb_public_key.pub" f = open(pubkey_filename, "w") f.write(user_config["kb_cfg"]["public_key_file"]) f.close() kb_config.config_scale["public_key_file"] = pubkey_filename if "prompt_before_run" in user_config["kb_cfg"]: kb_config.config_scale["prompt_before_run"] = False if user_config["kb_cfg"]: alt_config = Configuration.from_string(user_config["kb_cfg"]).configure() kb_config.config_scale = kb_config.config_scale.merge(alt_config) # Parsing topology configs from application input if "topo_cfg" in user_config: topo_cfg = Configuration.from_string(user_config["topo_cfg"]).configure() else: topo_cfg = None # Parsing tenants configs from application input if "tenants_list" in user_config: tenants_list = Configuration.from_string(user_config["tenants_list"]).configure() else: tenants_list = None except Exception: response.status = 400 response.text = u"Error while parsing configurations: \n%s" % (traceback.format_exc) return response.text logging.setup("kloudbuster", logfile="/tmp/kb_log_%s" % session_id) kb_config.init_with_rest_api( cred_tested=cred_tested, cred_testing=cred_testing, topo_cfg=topo_cfg, tenants_list=tenants_list ) kb_session = KBSession() kb_session.kb_config = kb_config KBSessionManager.add(session_id, kb_session) return str(session_id)
import subprocess from subprocess import Popen, PIPE, STDOUT import os import boto.cloudformation import boto.s3.connection import gzip import logging import datetime import sys from configure import Configuration folderlocation = os.path.dirname(os.path.realpath(__file__)) #Load Connection Configuration File ConnectionConfig = Configuration.from_file(os.path.join(folderlocation, 'ConnectionConfig.yaml')).configure() # Load SQL Server Queries File queries = Configuration.from_file(os.path.join(folderlocation, 'SQLServerQueries.yaml')).configure() #Load Redshift Configuration File RedshiftConfig = Configuration.from_file(os.path.join(folderlocation, 'RedShiftTablesConfig.yaml')).configure() #Set the date for logging processdate = datetime.datetime.today().strftime('%Y%m%d') #Set the package variables #Packagename packagename = sys.argv[1]
class CouchbaseServerInstaller: __config: Configuration __url: str __version: str __build: str __raw_version: str __ssh_keyfile: str __ssh_keypass: Credential @staticmethod def version_to_code(version: str): try: parsed_version = Version(version) except InvalidVersion: print("Non-numeric version {} received, interpreting as codename...".format(version)) return version if parsed_version >= Version("7.0"): return "cheshire-cat" print(colored("This script uses features introduced in 6.5, earlier versions not supported...", "red")) raise UnsupportedException("Unsupported version of Couchbase Server requested") @staticmethod def _parse_version(version: str): version_build = version.split("-") if len(version_build) == 2: return (CouchbaseServerInstaller._version_to_code(version_build[0]), version_build[1]) return (version, None) @staticmethod def _generate_filename(version: str, build: str): if build is None: return "couchbase-server-enterprise-{}-centos7.x86_64.rpm".format(version) return "couchbase-server-enterprise-{}-{}-centos7.x86_64.rpm".format(version, build) def __init__(self, url: str, ssh_keyfile: str, keypass: Credential): self.__config = Configuration() self.__config.load() self.__raw_version = self.__config[SettingKeyNames.CBS_VERSION] self.__url = url self.__ssh_keyfile = ssh_keyfile self.__ssh_keypass = keypass (self.__version, self.__build) = CouchbaseServerInstaller._parse_version(self.__raw_version) def download(self): filename = CouchbaseServerInstaller._generate_filename(self.__version, self.__build) if not Path(filename).exists(): print("Downloading Couchbase Server {}...".format(self.__raw_version)) url = self._generate_download_url(self.__version, self.__build, filename) wget.download(url, filename) def install(self): filename = CouchbaseServerInstaller._generate_filename(self.__version, self.__build) if not Path(filename).exists(): raise Exception("Unable to find installer, please call download first") print("Installing Couchbase Server to {}...".format(self.__url)) ssh_client = SSHClient() ssh_client.load_system_host_keys() ssh_client.set_missing_host_key_policy(WarningPolicy()) ssh_connect(ssh_client, self.__url, self.__ssh_keyfile, str(self.__ssh_keypass)) (_, stdout, _) = ssh_client.exec_command("test -f {}".format(filename)) if stdout.channel.recv_exit_status() == 0: print("Install file already present on remote host, skipping upload...") else: print("Uploading file to remote host...") sftp = ssh_client.open_sftp() sftp_upload(sftp, filename, filename) sftp.close() ssh_command(ssh_client, self.__url, "sudo yum install -y {}".format(filename)) print("Install finished!") def _generate_download_url(self, version: str, build: str, filename: str): # All access via VPN or company network if build is not None: return "http://latestbuilds.service.couchbase.com/builds/latestbuilds/couchbase-server/{}/{}/{}".format( version, build, filename) return "http://latestbuilds.service.couchbase.com/builds/releases/{}/{}".format(version, filename)