def get_node_default_facts(env, nodes=None): facts = env.get_default_facts('deployment', nodes=nodes) common = None node_facts = [] for fact in facts: if fact.get("uid") == "common": common = fact else: node_facts.append(fact) if not common: return node_facts return [helpers.merge_dicts(copy.deepcopy(common), n) for n in node_facts]
def post_restore_action(self, context): data, _ = docker.run_in_container( "nailgun", ["cat", "/usr/share/fuel-openstack-metadata/openstack.yaml"], stdout=subprocess.PIPE) fixtures = yaml.load(data) base_release_fields = fixtures[0]['fields'] for fixture in fixtures[1:]: release = helpers.merge_dicts( base_release_fields, fixture['fields']) self.__post_data_to_nailgun( "/api/v1/releases/", release, context.password) subprocess.call([ "fuel", "release", "--sync-deployment-tasks", "--dir", "/etc/puppet/", ])
def _post_restore_action(self): data, _ = docker.run_in_container( "nailgun", ["cat", magic_consts.OPENSTACK_FIXTURES], stdout=subprocess.PIPE) fixtures = yaml.load(data) base_release_fields = fixtures[0]['fields'] for fixture in fixtures[1:]: release = helpers.merge_dicts(base_release_fields, fixture['fields']) self.__post_data_to_nailgun("/api/v1/releases/", release, self.context.user, self.context.password) subprocess.call([ "fuel", "release", "--sync-deployment-tasks", "--dir", "/etc/puppet/", "--user", self.context.user, "--password", self.context.password ]) sql_run_prams = [ "sudo", "-u", "postgres", "psql", "nailgun", "--tuples-only", "-c" ] results, _ = docker.run_in_container( "postgres", sql_run_prams + ["select id, generated from attributes;"], stdout=subprocess.PIPE) results = results.strip() values = [] sql = 'update attributes as a set generated = b.generated ' \ 'from (values {0}) as b(id, generated) where a.id = b.id;' for line in results.split("\n"): c_id, c_data = line.split("|", 1) data = json.loads(c_data) data["deployed_before"] = {"value": True} values.append((c_id, json.dumps(data))) if values: sql = sql.format(','.join( ["({0}, '{1}')".format(*v) for v in values])) docker.run_in_container("postgres", sql_run_prams + [sql], stdout=subprocess.PIPE)
def _post_restore_action(self): data, _ = docker.run_in_container( "nailgun", ["cat", magic_consts.OPENSTACK_FIXTURES], stdout=subprocess.PIPE) fixtures = yaml.load(data) base_release_fields = fixtures[0]['fields'] for fixture in fixtures[1:]: release = helpers.merge_dicts( base_release_fields, fixture['fields']) self.__post_data_to_nailgun( "/api/v1/releases/", release, self.context.user, self.context.password) subprocess.call( [ "fuel", "release", "--sync-deployment-tasks", "--dir", "/etc/puppet/", ], env=self.context.get_credentials_env()) values = [] for line in self._run_sql_in_container( "select id, generated from attributes;"): c_id, c_data = line.split("|", 1) data = json.loads(c_data) data["deployed_before"] = {"value": True} values.append("({0}, '{1}')".format(c_id, json.dumps(data))) if values: self._run_sql_in_container( 'update attributes as a set generated = b.generated ' 'from (values {0}) as b(id, generated) ' 'where a.id = b.id;'.format(','.join(values)) ) self._create_links_on_remote_logs()
def test_merge_dicts(mocker, base, update, result): assert result == helpers.merge_dicts(base, update)
def extend(obj): if 'extend' in obj: obj['extend'] = extend(obj['extend']) return helpers.merge_dicts(obj['extend'], obj) return obj