def _update_inputs_connections(res_obj, args, old_connections, new_connections): # NOQA removed = [] for item in old_connections: if item not in new_connections: removed.append(item) added = [] for item in new_connections: if item not in old_connections: added.append(item) for emitter, _, receiver, _ in removed: emmiter_obj = resource.load(emitter) receiver_obj = resource.load(receiver) emmiter_obj.disconnect(receiver_obj) for emitter, emitter_input, receiver, receiver_input in added: emmiter_obj = resource.load(emitter) receiver_obj = resource.load(receiver) emmiter_obj.connect(receiver_obj, {emitter_input: receiver_input}) if removed or added: # TODO without save we will get error # that some values can not be updated # even if connection was removed receiver_obj.db_obj.save() if args: res_obj.update(args)
def create_master(): master = source.master() try: resource.load('nodemaster') except solar.dblayer.model.DBLayerNotFound: cr.create('master', 'f2s/fuel_node', {'index': master[0], 'ip': master[1]}, tags=['nodemaster'])
def backtrack(resource, input, values, real_values): r = sresource.load(resource) db_obj = r.db_obj def single(resource, name, get_val=False): db_obj = sresource.load(resource).db_obj se = db_obj.inputs._single_edge(name) se = tuple(se) if not se: if get_val: return dict(resource=resource, name=name, value=db_obj.inputs[name]) else: return dict(resource=resource, name=name) l = [] for (rname, rinput), _, meta in se: l.append(dict(resource=resource, name=name)) val = single(rname, rinput, get_val) if meta and isinstance(val, dict): val["meta"] = meta l.append(val) return l inps = {} if input: inps[input] = single(resource, input, values) else: for _inp in db_obj.inputs: inps[_inp] = single(resource, _inp, values) for name, values in inps.iteritems(): click.echo(yaml.safe_dump({name: values}, default_flow_style=False)) if real_values: click.echo("! Real value: %r\n" % sresource.load(resource).db_obj.inputs[name])
def test_removal(self): """Test that connection removed with resource.""" sample_meta_dir = self.make_resource_meta(""" id: sample handler: ansible version: 1.0.0 input: value: schema: int value: 0 """) sample1 = self.create_resource( 'sample1', sample_meta_dir, {'value': 1} ) sample2 = self.create_resource( 'sample2', sample_meta_dir, {} ) signals.connect(sample1, sample2) self.assertEqual(sample1.args['value'], sample2.args['value']) sample1 = resource.load('sample1') sample2 = resource.load('sample2') sample1.delete() self.assertEqual(sample2.args['value'], 0)
def test_connections_recreated_after_load(self): """Test if connections are ok after load Create resource in some process. Then in other process load it. All connections should remain the same. """ sample_meta_dir = self.make_resource_meta(""" id: sample handler: ansible version: 1.0.0 input: value: schema: int value: 0 """) def creating_process(): sample1 = self.create_resource('sample1', sample_meta_dir, {'value': 1}) sample2 = self.create_resource( 'sample2', sample_meta_dir, ) signals.connect(sample1, sample2) self.assertEqual(sample1.args['value'], sample2.args['value']) creating_process() signals.CLIENTS = {} sample1 = resource.load('sample1') sample2 = resource.load('sample2') sample1.update({'value': 2}) self.assertEqual(sample1.args['value'], sample2.args['value'])
def add_solar_agent(i): solar_agent_transport = cr.create( "solar_agent_transport%s" % i, "resources/transport_solar_agent", {"solar_agent_user": "******", "solar_agent_password": "******"}, )[0] transports = resource.load("transports%s" % i) ssh_transport = resource.load("ssh_transport%s" % i) transports_for_solar_agent = cr.create("transports_for_solar_agent%s" % i, "resources/transports")[0] # install solar_agent with ssh signals.connect(transports_for_solar_agent, solar_agent_transport, {}) signals.connect( ssh_transport, transports_for_solar_agent, {"key": "transports:key", "user": "******", "port": "transports:port", "name": "transports:name"}, ) # add solar_agent to transports on this node signals.connect( solar_agent_transport, transports, { "solar_agent_user": "******", "solar_agent_port": "transports:port", "solar_agent_password": "******", "name": "transports:name", }, )
def test_connections_recreated_after_load(self): """ Create resource in some process. Then in other process load it. All connections should remain the same. """ sample_meta_dir = self.make_resource_meta(""" id: sample handler: ansible version: 1.0.0 input: value: schema: int value: 0 """) def creating_process(): sample1 = self.create_resource( 'sample1', sample_meta_dir, {'value': 1} ) sample2 = self.create_resource( 'sample2', sample_meta_dir, {} ) signals.connect(sample1, sample2) self.assertEqual(sample1.args['value'], sample2.args['value']) creating_process() signals.CLIENTS = {} sample1 = resource.load('sample1') sample2 = resource.load('sample2') sample1.update({'value': 2}) self.assertEqual(sample1.args['value'], sample2.args['value'])
def add_solard(i): solard_transport = vr.create('solard_transport%s' % i, 'resources/transport_solard', { 'solard_user': '******', 'solard_password': '******' })[0] transports = resource.load('transports%s' % i) ssh_transport = resource.load('ssh_transport%s' % i) transports_for_solard = vr.create('transports_for_solard%s' % i, 'resources/transports')[0] # install solard with ssh signals.connect(transports_for_solard, solard_transport, {}) signals.connect( ssh_transport, transports_for_solard, { 'ssh_key': 'transports:key', 'ssh_user': '******', 'ssh_port': 'transports:port', 'name': 'transports:name' }) # add solard to transports on this node signals.connect( solard_transport, transports, { 'solard_user': '******', 'solard_port': 'transports:port', 'solard_password': '******', 'name': 'transports:name' })
def disconnect(receiver, emitter): click.echo('Disconnect {} from {}'.format(emitter, receiver)) emitter = sresource.load(emitter) receiver = sresource.load(receiver) click.echo(emitter) click.echo(receiver) emitter.disconnect(receiver) show_emitter_connections(emitter)
def disconnect(receiver, emitter): click.echo('Disconnect {} from {}'.format(emitter, receiver)) emitter = sresource.load(emitter) receiver = sresource.load(receiver) click.echo(emitter) click.echo(receiver) signals.disconnect(emitter, receiver) show_emitter_connections(emitter)
def disconnect(receiver, emitter): click.echo('Disconnect {} from {}'.format(emitter, receiver)) emitter = sresource.load(emitter) receiver = sresource.load(receiver) click.echo(emitter) click.echo(receiver) signals.disconnect(emitter, receiver) clients = signals.Connections.read_clients() show_emitter_connections(emitter.name, clients[emitter.name])
def _revert_remove(logitem): """Resource should be created with all previous connections """ commited = orm.DBCommitedState.load(logitem.res) args = dictdiffer.revert(logitem.diff, commited.inputs) connections = dictdiffer.revert(logitem.signals_diff, sorted(commited.connections)) resource.Resource(logitem.res, logitem.base_path, args=args, tags=commited.tags) for emitter, emitter_input, receiver, receiver_input in connections: emmiter_obj = resource.load(emitter) receiver_obj = resource.load(receiver) signals.connect(emmiter_obj, receiver_obj, {emitter_input: receiver_input})
def create_master(): master = source.master() try: resource.load('nodemaster') except solar.dblayer.model.DBLayerNotFound: cr.create('master', 'f2s/fuel_node', { 'index': master[0], 'ip': master[1] }, tags=['nodemaster'])
def _revert_remove(logitem): """Resource should be created with all previous connections""" commited = CommitedResource.get(logitem.resource) args = dictdiffer.revert(logitem.diff, commited.inputs) connections = dictdiffer.revert( logitem.connections_diff, sorted(commited.connections)) resource.Resource(logitem.resource, logitem.base_path, args=_get_args_to_update(args, connections), tags=commited.tags) for emitter, emitter_input, receiver, receiver_input in connections: emmiter_obj = resource.load(emitter) receiver_obj = resource.load(receiver) signals.connect(emmiter_obj, receiver_obj, { emitter_input: receiver_input})
def fuel_data(nobj): uid = str(nobj.data['id']) env_id = nobj.data['cluster'] node = resource.load('node{}'.format(uid)) res = resource.Resource('fuel_data{}'.format(uid), 'f2s/fuel_data', {'uid': uid, 'env': env_id}, tags=['node%s' % nobj.data['id']]) events = [ evapi.React(res.name, 'run', 'success', 'pre_deployment_start', 'run'), evapi.React(res.name, 'update', 'success', 'pre_deployment_start', 'run')] evapi.add_events(res.name, events) node = resource.load('node{}'.format(uid)) node.connect(res, {})
def connect(mapping, receiver, emitter): mapping_parsed = {} click.echo('Connect {} to {}'.format(emitter, receiver)) emitter = sresource.load(emitter) receiver = sresource.load(receiver) try: mapping_parsed.update(json.loads(mapping)) except ValueError: for m in mapping.split(): k, v = m.split('->') mapping_parsed.update({k: v}) signals.connect(emitter, receiver, mapping=mapping_parsed) show_emitter_connections(emitter)
def disconnect(receiver, emitter, input): if not input: click.echo('Disconnect {} from {}'.format(emitter, receiver)) else: click.echo('Disconnect {} from {} only {}'.format( emitter, receiver, input )) emitter = sresource.load(emitter) receiver = sresource.load(receiver) click.echo(emitter) click.echo(receiver) emitter.disconnect(receiver, input) show_emitter_connections(emitter)
def backtrack_inputs(resource): r = sresource.load(resource) inputs = [] def backtrack(i): def format_input(i): return '{}::{}'.format(i.resource.name, i.name) if isinstance(i, list): return [backtrack(bi) for bi in i] if isinstance(i, dict): return {k: backtrack(bi) for k, bi in i.items()} bi = i.backtrack_value_emitter(level=1) if isinstance(i, orm.DBResourceInput) and isinstance( bi, orm.DBResourceInput) and i == bi: return (format_input(i), ) return (format_input(i), backtrack(bi)) for i in r.resource_inputs().values(): click.echo( yaml.safe_dump({i.name: backtrack(i)}, default_flow_style=False))
def test_save(self): r = orm.DBResource(id='test1', name='test1', base_path='x') r.save() rr = resource.load(r.id) self.assertEqual(r, rr.db_obj)
def _discard_update(item): resource_obj = resource.load(item.res) old_connections = resource_obj.connections new_connections = dictdiffer.revert(item.signals_diff, sorted(old_connections)) args = dictdiffer.revert(item.diff, resource_obj.args) _update_inputs_connections( resource_obj, args, old_connections, new_connections)
def test(r): if isinstance(r, basestring): r = resource.load(r) log.debug('Trying {}'.format(r.name)) script_path = os.path.join(r.db_obj.base_path, 'test.py') if not os.path.exists(script_path): log.warning('resource {} has no tests'.format(r.name)) return {} log.debug('File {} found'.format(script_path)) with open(script_path) as f: module = imp.load_module('{}_test'.format(r.name), f, script_path, ('', 'r', imp.PY_SOURCE)) try: module.test(r) return { r.name: { 'status': 'ok', }, } except Exception: return { r.name: { 'status': 'error', 'message': traceback.format_exc(), } }
def populate_log_item(log_item, diff=None): if diff is None: diff = Diff.create_from_resource(resource.load(log_item.resource)) log_item.diff = diff.diff log_item.connections_diff = diff.connections log_item.base_path = diff.path return log_item
def test(r): if isinstance(r, basestring): r = resource.load(r) log.debug('Trying {}'.format(r.name)) script_path = os.path.join(r.db_obj.base_path, 'test.py') if not os.path.exists(script_path): log.warning('resource {} has no tests'.format(r.name)) return {} log.debug('File {} found'.format(script_path)) with open(script_path) as f: module = imp.load_module( '{}_test'.format(r.name), f, script_path, ('', 'r', imp.PY_SOURCE) ) try: module.test(r) return { r.name: { 'status': 'ok', }, } except Exception: return { r.name: { 'status': 'error', 'message': traceback.format_exc(), } }
def effective_input_value(resource, input_name): r = sresource.load(resource) inp = r.resource_inputs()[input_name] click.echo(yaml.safe_dump(backtrack_single(inp), default_flow_style=False)) click.echo('-' * 20) val = inp.backtrack_value() click.echo(val) click.echo('-' * 20)
def connect(mapping, receiver, emitter): mapping_parsed = None emitter = sresource.load(emitter) receiver = sresource.load(receiver) click.echo('Connect {} to {}'.format(emitter, receiver)) if mapping: mapping_parsed = {} try: mapping_parsed.update(json.loads(mapping)) except ValueError: for m in mapping.split(): k, v = m.split('->') mapping_parsed.update({k: v}) emitter.connect(receiver, mapping=mapping_parsed) show_emitter_connections(emitter)
def tag(add, tag_name, resource_name): click.echo('Tag {} with {} {}'.format(resource_name, tag_name, add)) r = sresource.load(resource_name) if add: r.add_tag(tag_name) else: r.remove_tag(tag_name) r.save()
def emitter(self): from solar.core import resource emitter = signals.Connections.emitter(self._attached_to_name, self.name) if emitter is not None: emitter_name, emitter_input_name = emitter return resource.load(emitter_name).args[emitter_input_name]
def tag(add, tags, resource_name): r = sresource.load(resource_name) if add: r.add_tags(*tags) click.echo('Tag(s) {} added to {}'.format(tags, resource_name)) else: r.remove_tags(*tags) click.echo('Tag(s) {} removed from {}'.format(tags, resource_name))
def receivers(self): from solar.core import resource for receiver_name, receiver_input in signals.Connections.receivers( self._attached_to_name, self.name ): yield resource.load(receiver_name).args[receiver_input]
def prefetch(name, tag): if name: resources = [sresource.load(name)] elif tag: resources = sresource.load_by_tags(set(tag)) for res in resources: res.prefetch() click.echo(res.color_repr())
def _discard_update(item): resource_obj = resource.load(item.resource) old_connections = resource_obj.connections new_connections = dictdiffer.revert( item.connections_diff, sorted(old_connections)) inputs = dictdiffer.revert(item.diff, resource_obj.args) _update_inputs_connections( resource_obj, _get_args_to_update(inputs, old_connections), old_connections, new_connections)
def fuel_data(nobj): uid = str(nobj.data['id']) env_id = nobj.data['cluster'] node = resource.load('node{}'.format(uid)) res = resource.Resource('fuel_data{}'.format(uid), 'f2s/fuel_data', { 'uid': uid, 'env': env_id }, tags=['node%s' % nobj.data['id']]) events = [ evapi.React(res.name, 'run', 'success', 'pre_deployment_start', 'run'), evapi.React(res.name, 'update', 'success', 'pre_deployment_start', 'run') ] evapi.add_events(res.name, events) node = resource.load('node{}'.format(uid)) node.connect(res, {})
def add_connections(resource_name, args): connections = [] for receiver_input, arg in args.items(): if isinstance(arg, list): for item in arg: c = parse_connection(resource_name, receiver_input, item) connections.append(c) else: c = parse_connection(resource_name, receiver_input, arg) connections.append(c) connections = [c for c in connections if c is not None] for c in connections: parent = resource.load(c['parent']) child = resource.load(c['child']) events = c['events'] mapping = {c['parent_input'] : c['child_input']} signals.connect(parent, child, mapping, events)
def _discard_update(item): resource_obj = resource.load(item.resource) old_connections = resource_obj.connections new_connections = dictdiffer.revert(item.connections_diff, sorted(old_connections)) inputs = dictdiffer.revert(item.diff, resource_obj.args) _update_inputs_connections(resource_obj, _get_args_to_update(inputs, old_connections), old_connections, new_connections)
def env(env_id, uids, full): """Prepares solar environment based on fuel environment. It should perform all required changes for solar to work """ env = Environment(env_id) uids = list(uids) if uids else [ str(n.data['id']) for n in env.get_all_nodes() ] for nobj in source.nodes(uids): try: # FIXME resource.load('node%s' % nobj.data['id']) except: node(nobj) fuel_data(nobj) _prefetch(env, uids) create_master() allocate(source.graph(env_id), ['null', 'master'] + uids if full else uids)
def env(env_id, uids, full): """Prepares solar environment based on fuel environment. It should perform all required changes for solar to work """ env = Environment(env_id) uids = list(uids) if uids else [ str(n.data['id']) for n in env.get_all_nodes()] for nobj in source.nodes(uids): try: # FIXME resource.load('node%s' % nobj.data['id']) except: node(nobj) fuel_data(nobj) _prefetch(env, uids) create_master() allocate( source.graph(env_id), ['null', 'master'] + uids if full else uids)
def prefetch(name, tag): if name: resources = [sresource.load(name)] elif tag: resources = sresource.load_by_tags(set(tag)) else: raise click.ClickException("No resource defined, use --tag or --name") for res in resources: res.prefetch() click.echo(res.color_repr())
def update(name, args): args_parsed = {} for arg in args: try: args_parsed.update(json.loads(arg)) except ValueError: k, v = arg.split('=') args_parsed.update({k: v}) click.echo('Updating resource {} with args {}'.format(name, args_parsed)) res = sresource.load(name) res.update(args_parsed)
def _revert_update(logitem): """Revert of update should update inputs and connections """ res_obj = resource.load(logitem.res) commited = res_obj.load_commited() args_to_update = dictdiffer.revert(logitem.diff, commited.inputs) connections = dictdiffer.revert(logitem.signals_diff, sorted(commited.connections)) _update_inputs_connections( res_obj, args_to_update, commited.connections, connections)
def _prefetch(env, uids): facts = env.get_default_facts('deployment', uids) facts = {node['uid']: node for node in facts} for uid in uids: res = resource.load('fuel_data{}'.format(uid)) node_facts = facts[uid] res_args = res.args for key in node_facts.keys(): if key not in res_args: res.input_add(key) res.update(node_facts)
def _revert_update(logitem): """Revert of update should update inputs and connections""" res_obj = resource.load(logitem.resource) commited = res_obj.load_commited() connections = dictdiffer.revert(logitem.connections_diff, sorted(commited.connections)) args = dictdiffer.revert(logitem.diff, commited.inputs) _update_inputs_connections(res_obj, _get_args_to_update(args, connections), commited.connections, connections)
def add_solar_agent(i): solar_agent_transport = vr.create('solar_agent_transport%s' % i, 'resources/transport_solar_agent', {'solar_agent_user': '******', 'solar_agent_password': '******'})[0] transports = resource.load('transports%s' % i) ssh_transport = resource.load('ssh_transport%s' % i) transports_for_solar_agent = vr.create('transports_for_solar_agent%s' % i, 'resources/transports')[0] # install solar_agent with ssh signals.connect(transports_for_solar_agent, solar_agent_transport, {}) signals.connect(ssh_transport, transports_for_solar_agent, {'ssh_key': 'transports:key', 'ssh_user': '******', 'ssh_port': 'transports:port', 'name': 'transports:name'}) # add solar_agent to transports on this node signals.connect(solar_agent_transport, transports, {'solar_agent_user': '******', 'solar_agent_port': 'transports:port', 'solar_agent_password': '******', 'name': 'transports:name'})
def remove(name, tag, f): if name: resources = [sresource.load(name)] elif tag: resources = sresource.load_by_tags(set(tag)) else: resources = sresource.load_all() for res in resources: res.remove(force=f) if f: click.echo('Resource %s removed from database' % res.name) else: click.echo('Resource %s will be removed after commiting changes.' % res.name)
def backtrack(resource, input, values, real_values): r = sresource.load(resource) db_obj = r.db_obj def single(resource, name, get_val=False): db_obj = sresource.load(resource).db_obj se = db_obj.inputs._single_edge(name) se = tuple(se) if not se: if get_val: return dict(resource=resource, name=name, value=db_obj.inputs[name]) else: return dict(resource=resource, name=name) l = [] for (rname, rinput), _, meta in se: l.append(dict(resource=resource, name=name)) val = single(rname, rinput, get_val) if meta and isinstance(val, dict): val['meta'] = meta l.append(val) return l inps = {} if input: inps[input] = single(resource, input, values) else: for _inp in db_obj.inputs: inps[_inp] = single(resource, _inp, values) for name, values in inps.iteritems(): click.echo(yaml.safe_dump({name: values}, default_flow_style=False)) if real_values: click.echo('! Real value: %r\n' % sresource.load(resource).db_obj.inputs[name])
def _update_inputs_connections(res_obj, args, old_connections, new_connections): res_obj.update(args) removed = [] for item in old_connections: if item not in new_connections: removed.append(item) added = [] for item in new_connections: if item not in old_connections: added.append(item) for emitter, _, receiver, _ in removed: emmiter_obj = resource.load(emitter) receiver_obj = resource.load(receiver) signals.disconnect(emmiter_obj, receiver_obj) for emitter, emitter_input, receiver, receiver_input in added: emmiter_obj = resource.load(emitter) receiver_obj = resource.load(receiver) signals.connect(emmiter_obj, receiver_obj, {emitter_input: receiver_input})
def test_load(self): sample_meta_dir = self.make_resource_meta(""" id: sample handler: ansible version: 1.0.0 input: value: schema: int value: 0 """) sample = self.create_resource('sample', sample_meta_dir, {'value': 1}) sample_l = resource.load('sample') self.assertDictEqual(sample.args, sample_l.args) self.assertListEqual(list(sample.tags), list(sample_l.tags))
def insert(self, changed_resources, changes_graph): changed_resources.append(self.parent) # TODO: solve this circular import problem from solar.core import resource try: loaded_resource = resource.load(self.parent) except KeyError: # orm throws this error when we're NOT using resource there location_id = None else: location_id = loaded_resource.args['location_id'] changes_graph.add_node(self.parent_node, status='PENDING', target=location_id, errmsg=None, type='solar_resource', args=[self.parent, self.parent_action])
def run(dry_run_mapping, dry_run, action, tags): if dry_run: dry_run_executor = executors.DryRunExecutor( mapping=json.loads(dry_run_mapping)) resources = filter(lambda r: Expression(tags, r.tags).evaluate(), orm.DBResource.all()) for r in resources: resource_obj = sresource.load(r['id']) actions.resource_action(resource_obj, action) if dry_run: click.echo('EXECUTED:') for key in dry_run_executor.executed: click.echo('{}: {}'.format( click.style(dry_run_executor.compute_hash(key), fg='green'), str(key)))
def single(resource, name, get_val=False): db_obj = sresource.load(resource).db_obj se = db_obj.inputs._single_edge(name) se = tuple(se) if not se: if get_val: return dict(resource=resource, name=name, value=db_obj.inputs[name]) else: return dict(resource=resource, name=name) l = [] for (rname, rinput), _, meta in se: l.append(dict(resource=resource, name=name)) val = single(rname, rinput, get_val) if meta and isinstance(val, dict): val['meta'] = meta l.append(val) return l
def action(dry_run_mapping, dry_run, action, resource): if dry_run: dry_run_executor = executors.DryRunExecutor( mapping=json.loads(dry_run_mapping)) click.echo('action {} for resource {}'.format(action, resource)) r = sresource.load(resource) try: actions.resource_action(r, action) except errors.SolarError as e: log.debug(e) sys.exit(1) if dry_run: click.echo('EXECUTED:') for key in dry_run_executor.executed: click.echo('{}: {}'.format( click.style(dry_run_executor.compute_hash(key), fg='green'), str(key)))
def show(name, tag, json, color): if name: resources = [sresource.load(name)] elif tag: resources = sresource.load_by_tags(set(tag)) else: resources = sresource.load_all() echo = click.echo_via_pager if json: output = json.dumps([r.to_dict() for r in resources], indent=2) echo = click.echo else: if color: formatter = lambda r: r.color_repr() else: formatter = lambda r: unicode(r) output = '\n'.join(formatter(r) for r in resources) if output: echo(output)