def test_deep_structure_is_retrieved_the_same(self): from copy import deepcopy value = {'a': ['b', {'c': 123}]} kv1 = KV(self.tmp / 'kv.sqlite') kv1['a'] = deepcopy(value) kv2 = KV(self.tmp / 'kv.sqlite') self.assertEqual(kv2['a'], value)
def test_same_database_can_contain_two_namespaces(self): kv1 = KV(self.tmp / 'kv.sqlite') kv2 = KV(self.tmp / 'kv.sqlite', table='other') kv1['a'] = 'b' kv2['a'] = 'c' self.assertEqual(kv1['a'], 'b') self.assertEqual(kv2['a'], 'c')
def test_lock_fails_if_db_already_locked(self): import sqlite3 from threading import Thread from Queue import Queue db_path = self.tmp / "kv.sqlite" q1 = Queue() q2 = Queue() kv2 = KV(db_path, timeout=0.1) def locker(): kv1 = KV(db_path) with kv1.lock(): q1.put(None) q2.get() th = Thread(target=locker) th.start() try: q1.get() with self.assertRaises(sqlite3.OperationalError) as cm1: with kv2.lock(): pass self.assertEqual(cm1.exception.message, "database is locked") with self.assertRaises(sqlite3.OperationalError) as cm2: kv2["a"] = "b" self.assertEqual(cm2.exception.message, "database is locked") finally: q2.put(None) th.join()
def test_lock_fails_if_db_already_locked(self): import sqlite3 from threading import Thread from Queue import Queue db_path = self.tmp / 'kv.sqlite' q1 = Queue() q2 = Queue() kv2 = KV(db_path, timeout=0.1) def locker(): kv1 = KV(db_path) with kv1.lock(): q1.put(None) q2.get() th = Thread(target=locker) th.start() try: q1.get() with self.assertRaises(sqlite3.OperationalError) as cm1: with kv2.lock(): pass self.assertEqual(cm1.exception.message, 'database is locked') with self.assertRaises(sqlite3.OperationalError) as cm2: kv2['a'] = 'b' self.assertEqual(cm2.exception.message, 'database is locked') finally: q2.put(None) th.join()
def get_emails(filepath, default=''): """ Get emails from file comma separated if iterable is False """ try: db = KV(filepath, 'emails') emails = db.keys() except Exception, err: logger.exception(err) return default
def __init__(self, config): self.home_path = config['home'] self.var_path = self.home_path / 'var' self.log_path = self.var_path / 'log' self.deploy_path = self.var_path / 'deploy' self.config = config etc = self.home_path / 'etc' etc.mkdir_p() self.buckets_db = KV(etc / 'buckets.db', table='bucket') self.meta_db = KV(etc / 'buckets.db', table='meta') self.daemons = Supervisor(etc)
def migrate(): """ Migrate from older stats.db """ table = dynamodb.Table('CIBuilds') for item in OBJECTS: if 'stats.db' in item.key: download_file(item.key, 'stats.db') obj = {} db = KV('stats.db') click.echo(f'Processing {db["build-tag"]}') try: obj['build_datetime'] = db['starttime'] obj['build_endtime'] = db['endtime'] obj['job_name'] = db['job-name'] obj['build_number'] = db['build-number'] obj['node_name'] = db['node-name'] obj['build_tag'] = db['build-tag'] obj['git_commit'] = db['git-commit'] obj['git_url'] = db['git-url'] obj['git_branch'] = db['git-branch'] obj['test_result'] = db['test-result'] obj['workspace'] = db['workspace'] table.put_item(Item=obj) except: continue
def update_emails(filepath, email): """ Update metadata file with given email """ email = email.strip() if not email: return try: db = KV(filepath, 'emails') db[email] = True except Exception, err: logger.exception(err)
def setUp(self): self.expected_keys = ['config', 'provider'] self.app = AppConfig() self.db_file = tempfile.NamedTemporaryFile() self.app.state = KV(self.db_file.name) self.app.provider = AWS() self.app.provider.controller = "fake-tester-controller" self.app.provider.model = "fake-tester-model" self.app.provider.cloud_type = "ec2" self.app.config = {'spell': 'kubernetes-core'} self.app.juju.client = AsyncMock() self.app.log = MagicMock()
def test_value_saved_at_null_key_is_retrieved(self): kv = KV() kv[None] = 'a' self.assertEqual(kv[None], 'a')
def test_value_saved_with_str_key_is_not_retrieved_with_int_key(self): kv = KV() kv['13'] = 'a' self.assertIsNone(kv.get(13))
def test_lock_during_lock_still_saves_value(self): kv = KV() with kv.lock(): with kv.lock(): kv['a'] = 'b' self.assertEqual(kv['a'], 'b')
class Airship(object): """ The airship object implements most operations performed by airship. It acts as container for deployments. """ def __init__(self, config): self.home_path = config['home'] self.var_path = self.home_path / 'var' self.log_path = self.var_path / 'log' self.deploy_path = self.var_path / 'deploy' self.config = config etc = self.home_path / 'etc' etc.mkdir_p() self.buckets_db = KV(etc / 'buckets.db', table='bucket') self.meta_db = KV(etc / 'buckets.db', table='meta') self.daemons = Supervisor(etc) @property def cfg_links_folder(self): folder = self.home_path / CFG_LINKS_FOLDER if not folder.isdir(): folder.makedirs() return folder def initialize(self): self.var_path.mkdir_p() self.log_path.mkdir_p() (self.var_path / 'run').mkdir_p() self.deploy_path.mkdir_p() self.generate_supervisord_configuration() def generate_supervisord_configuration(self): self.daemons.configure(self.home_path) def _get_bucket_by_id(self, bucket_id): config = self.buckets_db[bucket_id] return Bucket(bucket_id, self, config) def get_bucket(self, name=_newest): if name is _newest: name = max(self.buckets_db) return self._get_bucket_by_id(name) def _bucket_folder(self, id_): return self.deploy_path / id_ def _generate_bucket_id(self): with self.meta_db.lock(): next_id = self.meta_db.get('next_bucket_id', 1) self.meta_db['next_bucket_id'] = next_id + 1 id_ = 'd%d' % (next_id,) self._bucket_folder(id_).mkdir() return id_ def new_bucket(self, config={}): bucket_id = self._generate_bucket_id() self.buckets_db[bucket_id] = {} bucket = self._get_bucket_by_id(bucket_id) return bucket def list_buckets(self): return {'buckets': [{'id': id_} for id_ in self.buckets_db]}
def test_udpate_with_dictionary_items_retrieved_via_getitem(self): kv = KV() kv.update({"a": "b"}) self.assertEqual(kv["a"], "b")
def test_get_missing_value_raises_key_error(self): with self.assertRaises(KeyError): KV()['missing']
def test_updated_item_is_retrieved_via_getitem(self): kv = KV() kv['a'] = 'b' kv['a'] = 'c' self.assertEqual(kv['a'], 'c')
def test_udpate_with_dictionary_items_retrieved_via_getitem(self): kv = KV() kv.update({'a': 'b'}) self.assertEqual(kv['a'], 'b')
def test_contains_existing_value_is_true(self): kv = KV() kv['a'] = 'b' self.assertTrue('a' in kv)
def test_saved_item_is_retrieved_via_get(self): kv = KV() kv['a'] = 'b' self.assertEqual(kv.get('a'), 'b')
def test_contains_missing_value_is_false(self): self.assertFalse('missing' in KV())
def test_get_missing_value_with_default_returns_argument(self): fallback = object() self.assertEqual(KV().get('missing', fallback), fallback)
def test_get_missing_value_returns_default(self): self.assertIsNone(KV().get('missing'))
def test_value_saved_with_float_key_is_retrieved_with_float_key(self): kv = KV() kv[3.14] = 'a' self.assertEqual(kv[3.14], 'a')
def test_kv_with_two_items_has_size_two(self): kv = KV() kv['a'] = 'x' kv['b'] = 'x' self.assertEqual(len(kv), 2)
def main(): if os.geteuid() == 0: print("") print(" !! This should _not_ be run as root or with sudo. !!") print("") sys.exit(1) # Verify we can access ~/.local/share/juju if it exists juju_dir = pathlib.Path('~/.local/share/juju').expanduser() if juju_dir.exists(): try: for f in juju_dir.iterdir(): if f.is_file(): f.read_text() except PermissionError: print("") print(" !! Unable to read from ~/.local/share/juju, please " "double check your permissions on that directory " "and its files. !!") print("") sys.exit(1) utils.set_terminal_title("conjure-up") opts = parse_options(sys.argv[1:]) opt_defaults = parse_options([]) # Load conjurefile, merge any overridding options from argv if not opts.conf_file: opts.conf_file = [] if pathlib.Path('~/.config/conjure-up.conf').expanduser().exists(): opts.conf_file.insert( 0, pathlib.Path('~/.config/conjure-up.conf').expanduser()) if (pathlib.Path('.') / 'Conjurefile').exists(): opts.conf_file.insert(0, pathlib.Path('.') / 'Conjurefile') for conf in opts.conf_file: if not conf.exists(): print("Unable to locate config {} for processing.".format( str(conf))) sys.exit(1) try: app.conjurefile = Conjurefile.load(opts.conf_file) except ValueError as e: print(str(e)) sys.exit(1) app.conjurefile.merge_argv(opts, opt_defaults) if app.conjurefile['gen-config']: Conjurefile.print_tpl() sys.exit(0) spell = os.path.basename(os.path.abspath(app.conjurefile['spell'])) if not os.path.isdir(app.conjurefile['cache-dir']): os.makedirs(app.conjurefile['cache-dir']) # Application Config kv_db = os.path.join(app.conjurefile['cache-dir'], '.state.db') app.state = KV(kv_db) app.env = os.environ.copy() app.env['KV_DB'] = kv_db app.config = {'metadata': None} app.log = setup_logging(app, os.path.join(app.conjurefile['cache-dir'], 'conjure-up.log'), app.conjurefile.get('debug', False)) # Make sure juju paths are setup juju.set_bin_path() juju.set_wait_path() app.no_track = app.conjurefile['no-track'] app.no_report = app.conjurefile['no-report'] # Grab current LXD and Juju versions app.log.debug("Juju version: {}, " "conjure-up version: {}".format( utils.juju_version(), VERSION)) # Setup proxy apply_proxy() app.session_id = os.getenv('CONJURE_TEST_SESSION_ID', str(uuid.uuid4())) spells_dir = app.conjurefile['spells-dir'] app.config['spells-dir'] = spells_dir spells_index_path = os.path.join(app.config['spells-dir'], 'spells-index.yaml') spells_registry_branch = os.getenv('CONJUREUP_REGISTRY_BRANCH', 'master') if not app.conjurefile['no-sync']: if not os.path.exists(spells_dir): utils.info("No spells found, syncing from registry, please wait.") try: download_or_sync_registry( app.conjurefile['registry'], spells_dir, branch=spells_registry_branch) except subprocess.CalledProcessError as e: if not os.path.exists(spells_dir): utils.error("Could not load from registry") sys.exit(1) app.log.debug( 'Could not sync spells from github: {}'.format(e)) else: if not os.path.exists(spells_index_path): utils.error( "You opted to not sync from the spells registry, however, " "we could not find any suitable spells in: " "{}".format(spells_dir)) sys.exit(1) with open(spells_index_path) as fp: app.spells_index = yaml.safe_load(fp.read()) addons_aliases_index_path = os.path.join(app.config['spells-dir'], 'addons-aliases.yaml') if os.path.exists(addons_aliases_index_path): with open(addons_aliases_index_path) as fp: app.addons_aliases = yaml.safe_load(fp.read()) spell_name = spell app.endpoint_type = detect_endpoint(app.conjurefile['spell']) if app.conjurefile['spell'] != consts.UNSPECIFIED_SPELL: app.spell_given = True # Check if spell is actually an addon addon = utils.find_addons_matching(app.conjurefile['spell']) if addon: app.log.debug("addon found, setting required spell") utils.set_chosen_spell(addon['spell'], os.path.join(app.conjurefile['cache-dir'], addon['spell'])) download_local(os.path.join(app.config['spells-dir'], addon['spell']), app.config['spell-dir']) utils.set_spell_metadata() StepModel.load_spell_steps() AddonModel.load_spell_addons() app.selected_addons = addon['addons'] app.alias_given = True controllers.setup_metadata_controller() app.endpoint_type = EndpointType.LOCAL_DIR elif app.endpoint_type == EndpointType.LOCAL_SEARCH: spells = utils.find_spells_matching(app.conjurefile['spell']) if len(spells) == 0: utils.error("Can't find a spell matching '{}'".format( app.conjurefile['spell'])) sys.exit(1) # One result means it was a direct match and we can copy it # now. Changing the endpoint type then stops us from showing # the picker UI. More than one result means we need to show # the picker UI and will defer the copy to # SpellPickerController.finish(), so nothing to do here. if len(spells) == 1: app.log.debug("found spell {}".format(spells[0][1])) spell = spells[0][1] utils.set_chosen_spell(spell_name, os.path.join(app.conjurefile['cache-dir'], spell['key'])) download_local(os.path.join(app.config['spells-dir'], spell['key']), app.config['spell-dir']) utils.set_spell_metadata() StepModel.load_spell_steps() AddonModel.load_spell_addons() app.endpoint_type = EndpointType.LOCAL_DIR # download spell if necessary elif app.endpoint_type == EndpointType.LOCAL_DIR: if not os.path.isdir(app.conjurefile['spell']): utils.warning("Could not find spell {}".format( app.conjurefile['spell'])) sys.exit(1) if not os.path.exists(os.path.join(app.conjurefile['spell'], "metadata.yaml")): utils.warning("'{}' does not appear to be a spell. " "{}/metadata.yaml was not found.".format( app.conjurefile['spell'], app.conjurefile['spell'])) sys.exit(1) spell_name = os.path.basename(os.path.abspath(spell)) utils.set_chosen_spell(spell_name, path.join(app.conjurefile['cache-dir'], spell_name)) download_local(app.conjurefile['spell'], app.config['spell-dir']) utils.set_spell_metadata() StepModel.load_spell_steps() AddonModel.load_spell_addons() elif app.endpoint_type in [EndpointType.VCS, EndpointType.HTTP]: utils.set_chosen_spell(spell, path.join( app.conjurefile['cache-dir'], spell)) remote = get_remote_url(app.conjurefile['spell']) if remote is None: utils.warning("Can't guess URL matching '{}'".format( app.conjurefile['spell'])) sys.exit(1) download(remote, app.config['spell-dir'], True) utils.set_spell_metadata() StepModel.load_spell_steps() AddonModel.load_spell_addons() app.env['CONJURE_UP_CACHEDIR'] = app.conjurefile['cache-dir'] app.env['PATH'] = "/snap/bin:{}".format(app.env['PATH']) if app.conjurefile['show-env']: if app.endpoint_type in [None, EndpointType.LOCAL_SEARCH]: utils.error("Please specify a spell for headless mode.") sys.exit(1) show_env() app.sentry = raven.Client( dsn=SENTRY_DSN, release=VERSION, transport=RequestsHTTPTransport, processors=( 'conjureup.utils.SanitizeDataProcessor', ) ) track_screen("Application Start") track_event("OS", platform.platform(), "") app.loop = asyncio.get_event_loop() app.loop.add_signal_handler(signal.SIGINT, events.Shutdown.set) # Enable charmstore querying app.juju.charmstore = CharmStore(app.loop) try: if app.conjurefile.is_valid: cloud = None region = None if '/' in app.conjurefile['cloud']: parse_cli_cloud = app.conjurefile['cloud'].split('/') cloud, region = parse_cli_cloud app.log.debug( "Region found {} for cloud {}".format(cloud, region)) else: cloud = app.conjurefile['cloud'] cloud_types = juju.get_cloud_types_by_name() if cloud not in cloud_types: utils.error('Unknown cloud: {}'.format(cloud)) sys.exit(1) if app.endpoint_type in [None, EndpointType.LOCAL_SEARCH]: utils.error("Please specify a spell for headless mode.") sys.exit(1) app.provider = load_schema(cloud_types[cloud]) try: app.provider.load(cloud) except errors.SchemaCloudError as e: utils.error(e) sys.exit(1) if region: app.provider.region = region app.headless = True app.ui = None app.env['CONJURE_UP_HEADLESS'] = "1" app.loop.create_task(events.shutdown_watcher()) app.loop.create_task(_start()) app.loop.run_forever() else: app.ui = ConjureUI() app.ui.set_footer('Press ? for help') EventLoop.build_loop(app.ui, STYLES, unhandled_input=events.unhandled_input, handle_mouse=False) app.loop.create_task(events.shutdown_watcher()) app.loop.create_task(_start()) EventLoop.run() finally: # explicitly close asyncio event loop to avoid hitting the # following issue due to signal handlers added by # asyncio.create_subprocess_exec being cleaned up during final # garbage collection: https://github.com/python/asyncio/issues/396 app.loop.close() sys.exit(app.exit_code)
def test_lock_during_lock_still_saves_value(self): kv = KV() with kv.lock(): with kv.lock(): kv["a"] = "b" self.assertEqual(kv["a"], "b")
def test_get_deleted_item_raises_key_error(self): kv = KV() kv['a'] = 'b' del kv['a'] with self.assertRaises(KeyError): kv['a']
def test_value_saved_by_one_kv_client_is_read_by_another(self): kv1 = KV(self.tmp / 'kv.sqlite') kv1['a'] = 'b' kv2 = KV(self.tmp / 'kv.sqlite') self.assertEqual(kv2['a'], 'b')
def locker(): kv1 = KV(db_path) with kv1.lock(): q1.put(None) q2.get()
def test_iter_yields_keys(self): kv = KV() kv['a'] = 'x' kv['b'] = 'x' kv['c'] = 'x' self.assertItemsEqual(kv, ['a', 'b', 'c'])
def test_saved_item_is_retrieved_via_get(self): kv = KV() kv["a"] = "b" self.assertEqual(kv.get("a"), "b")
def test_value_saved_with_int_key_is_retrieved_with_int_key(self): kv = KV() kv[13] = 'a' self.assertEqual(kv[13], 'a')
def test_value_saved_with_str_key_is_not_retrieved_with_int_key(self): kv = KV() kv["13"] = "a" self.assertIsNone(kv.get(13))
def test_new_kv_is_empty(self): self.assertEqual(len(KV()), 0)
def test_delete_missing_item_raises_key_error(self): kv = KV() with self.assertRaises(KeyError): del kv['missing']