def export_to_file(self, posts_list, photo_list_path): with open(photo_list_path, "w", encoding = 'utf-8') as f: if "json" in photo_list_path: json_str = json.dumps(posts_list, ensure_ascii=True, indent=3, separators=(',', ': ')) f.write(json_str.encode('utf-8', 'xmlcharrefreplace').decode("utf-8").replace(r"\\\u", r"\\u")) else: yaml.safe_dump(posts_list, yml_file)
def _rewrite(self): if self._protect_rewrites: raise RuntimeError( "The default file is not allowed to be overwritten. Please copy a file using " "config.copy_file(<path>) for your use.") with open(self._path, "w") as fd: yaml.safe_dump(self._config, fd)
def dump_yaml(test, outfile): import oyaml as yaml yaml.safe_dump(test['metadata'], outfile, default_flow_style=False) yaml.safe_dump({'test_cases': test['test_cases']}, outfile, default_flow_style=False)
def deep_set(self, keys, value=None): """ A helper function for setting values in the config without a chain of `set()` calls. Usage: mongo_conn = conf.get('db.mongo.MONGO_CONNECTION_STRING', "https://localhost:3232") :param keys: A string representing the value's path in the config. :param value: value to be set. """ pointer = self._conf_dict inner_dict = pointer end = len(keys) - 1 for index, component in enumerate(keys): if index < end or value is None: inner_dict = inner_dict.setdefault(component, {}) else: if component not in inner_dict.keys() or type( inner_dict[component]) != dict: inner_dict[component] = value else: inner_dict[component].update(value) with open(self.config_path, "w") as stream: yaml.safe_dump(dict(self._conf_dict), stream, default_flow_style=False)
def score_teams(cumulative_xp, req, args): """ Sort the top n teams, write data to YAML file. Tradeoff Notes: The YAML library here doesn't support Unicode, unlike the latest PyYAML, but it does retain sort order for the yaml.dump() function, which is - for IMHO kind of esoteric reasons - not fixed in the current PyYAML. I'm also not sure what I'm missing with the all the dashes in the output here. Possibly I need to use simpler data structures/a better library (or just write line by line with proper indentation). Maybe just because I don't write out YAML much... """ _arg_n = args.numteams # remove entry for team "0" if 0 in cumulative_xp: del cumulative_xp[0] top_teams = nlargest(_arg_n, cumulative_xp, key=cumulative_xp.get) _team_list = [] _player_list = [] for _team_id in top_teams: try: team_name = requests.get(uri + "/teams/" + str(_team_id)).json() _team_list.append({ "team_name": team_name["name"], "team_wins": team_name["wins"], "team_losses": team_name["losses"], "team_experience": int(cumulative_xp[_team_id]), }) logging.debug("Added {} to team list.".format(team_name["name"])) except: logging.error("Team {} could not be found, skipping.".format( team_name["name"])) logging.info("Could not find team {} ".format(team_name["name"])) _player_list = [] for player in req: if player["team_id"] == _team_id: _player_list.append({ "persona": player["personaname"], "experience": int(check_experience(player)), "country": player["country_code"], }) _team_list.append(_player_list) try: if args.output == stdout: print(yaml.dump(_team_list, default_flow_style=False)) else: with open(args.output, "w+") as outfile: yaml.safe_dump(_team_list, outfile, default_flow_style=False) logging.debug("Wrote file successfully to {}".format(outfile)) except: logging.error("Failed writing output to yaml file.")
def change_imgname(imgname, project_dir): for device in ['cpu', 'gpu']: yml_path = os.path.join(project_dir, 'docker', 'docker-compose-{}.yml'.format(device)) with open(yml_path, 'r') as f: dcyml = yaml.safe_load(f) dcyml['services']['experiment']['image'] = imgname with open(yml_path, 'w') as f: yaml.safe_dump(dcyml, f)
def savefile(data: Any, file: str, safe=True, **kwargs) -> None: """ Save data to yaml file. """ if safe: with open(file, 'w') as f: yaml.safe_dump(data=data, stream=f, **kwargs) else: with open(file, 'w') as f: yaml.dump(data=data, stream=f, **kwargs)
def save_to_yaml(content, filepath): """Save a dictionary to a YAML file.""" with open(filepath, 'w') as stream: yaml.safe_dump(content, stream, indent=2, default_flow_style=False, default_style='', explicit_start=True, encoding='utf-8', allow_unicode=True)
def write_yaml_file(filename, d, overwrite=False): """ Accepts filepath, dictionary. Writes dictionary in yaml to file path, recursively creating path if necessary """ if not os.path.exists(os.path.dirname(filename)) and overwrite is False: try: os.makedirs(os.path.dirname(filename)) except OSError as exc: if exc.errno != errno.EEXIST: raise logging.debug("Writing yaml file {}".format(filename)) logging.debug(d) with open(filename, 'w+') as f: yaml.safe_dump(d, f, default_flow_style=False)
def run(self): # Remove Orgname from vars.yml # Replace org-name with org in all vpn files # Remove 'org' arg for vpn role call in vpn.yml for item in self.inventory: inventory_path = "inventory/{}".format(item) with self.YamlEditor('{}/config/local/vars.yml'.format( inventory_path)) as vars_yml: vars_yml.remove('org-name') vars_yml.remove('secrets_bucket') vars_yml.write() if os.path.exists( "{}/resources/admin-environment/".format(inventory_path)): with self.YamlEditor( "{}/resources/admin-environment/vpn.yml".format( inventory_path)) as vpn_yml: data = vpn_yml.get_data() try: del data[2]['roles'][0]['org'] except (KeyError, IndexError) as e: logging.error(e) self.overwrite_file( "{}/resources/admin-environment/vpn.yml".format( inventory_path), yaml.safe_dump(data, default_flow_style=False)) with self.YamlEditor( "{}/resources/admin-environment/env.yml".format( inventory_path)) as env_yml: env_yml['env'] = "{{ org }}" env_yml[ 'open_vpn_host'] = "vpn-{{ org }}.{{ canonical_zone }}" env_yml.write() with self.YamlEditor( "{}/resources/admin-environment/env.yml".format( inventory_path)) as env_yml: data = env_yml.get_data() try: del data['vpn_bucket'] except KeyError, e: pass self.overwrite_file( "{}/resources/admin-environment/env.yml".format( inventory_path), yaml.safe_dump(data, default_flow_style=False))
def set(self, key, value): """ A helper function for setting values in the config without a chain of `set()` calls. Usage: mongo_conn = conf.get('db.mongo.MONGO_CONNECTION_STRING', "https://localhost:3232") :param key: A string representing the value's path in the config. :param value: value to be set. """ self.data.set(key, value) yaml_file = {"cloudmesh": self.data.copy()} with open(self.config_path, "w") as stream: yaml.safe_dump(yaml_file, stream, default_flow_style=False)
def remove(self, path, key_to_remove): """ :return: """ pointer = self._conf_dict inner_dict = pointer for key in path: inner_dict = inner_dict[key] try: inner_dict.pop(key_to_remove) except KeyError: print("{} doesn't exist to remove.".format(key_to_remove)) with open(self.config_path, "w") as stream: yaml.safe_dump(dict(self._conf_dict), stream, default_flow_style=False)
def set(self, key, value): """ A helper function for setting values in the config without a chain of `set()` calls. Usage: mongo_conn = conf.get('db.mongo.MONGO_CONNECTION_STRING', "https://localhost:3232") :param key: A string representing the value's path in the config. :param value: value to be set. """ # BUG: dict and set operations are different self._conf_dict.set(key, value) with open(self.config_path, "w") as stream: yaml.safe_dump(dict(self._conf_dict), stream, default_flow_style=False)
def data_to_yaml(data, **options): opts = dict(indent=4, default_flow_style=False) opts.update(options) if 'Dumper' not in opts: return yaml.safe_dump(data, **opts) else: return yaml.dump(data, **opts)
def set(self, key, value): """ A helper function for setting the default cloud in the config without a chain of `set()` calls. Usage: mongo_conn = conf.set('db.mongo.MONGO_CONNECTION_STRING', "https://localhost:3232") :param key: A string representing the value's path in the config. :param value: value to be set. """ if value.lower() in ['true', 'false']: value = value.lower() == 'true' try: if "." in key: keys = key.split(".") # # create parents # parents = keys[:-1] location = self.data for parent in parents: if parent not in location: location[parent] = {} location = location[parent] # # create entry # location[keys[len(keys) - 1]] = value else: self.data[key] = value except KeyError: path = self.config_path Console.error( "The key '{key}' could not be found in the yaml file '{path}'".format( **locals())) sys.exit(1) except Exception as e: print(e) sys.exit(1) yaml_file = self.data.copy() with open(self.config_path, "w") as stream: yaml.safe_dump(yaml_file, stream, default_flow_style=False)
def set(self, name='counter', value=None): """ sets a counter associated with a particular user :param name: name of the counter :param value: the value :return: """ # checking if the value is an int if isinstance(value, int): self.counters.__setitem__(name, value) with open(self.counter_file_path, "w") as stream: yaml.safe_dump(self.counters.copy(), stream, default_flow_style=False) elif value is None: raise ValueError("The value for the counter cannot be empty") else: raise ValueError("The value for the counter must be of type int")
def main(): with open("insta_config.yaml", "r") as f: config_data = yaml.safe_load(f) pb_token = config_data.get("pushbullet_token", "") log = Log(config_data["logs_folder"]) log.init_pushbullet(pb_access_token=pb_token) log.set_pb_logging_level(levels_list=["s"]) try: scraper_bot = ScraperBot(metadata_path=config_data["metadata_path"], export_path=config_data["export_path"], tmp_files_folder=config_data["tmp_files_folder"], log=log) profile_metadata = scraper_bot.scrape_profile_metadata("gabryxx7", {'cookie':config_data['cookie']}, config_data["query_hash"], max_pages=-1) posts_list = scraper_bot.download_profile_media(profile_metadata["posts_data"], config_data["photo_base_path"]) with open("photo-list.yml", "w") as f: yaml.safe_dump(posts_list, f) except Exception as e: log.e("main", f"Exception in the main loop: {e}\n{traceback.format_exc()}") log.stop()
def set(self, key, value): # BUG """ A helper function for setting values in the config without a chain of `set()` calls. Usage: mongo_conn = conf.get('db.mongo.MONGO_CONNECTION_STRING', "https://localhost:3232") :param key: A string representing the value's path in the config. :param value: value to be set. """ self.data['cloudmesh']['default']['cloud'] = value print("Setting env parameter cloud to: " + self.data['cloudmesh']['default']['cloud']) yaml_file = self.data.copy() with open(self.config_path, "w") as stream: print("Writing update to cloudmesh.yaml") yaml.safe_dump(yaml_file, stream, default_flow_style=False)
def _display_data(data, output): if output == JSON: for line in data: click.echo(line) if output == TABLE: click.echo(pretty_table(data)) if output == YAML: click.echo(oyaml.safe_dump(data, default_flow_style=False))
def save_state(self): kanban_dict = {} for column_name, column in self.column_dict.items(): kanban_dict[column_name] = [] for row in range(column.count()): column_item = column.item(row).text() kanban_dict[column_name].append(column_item) if len(kanban_dict[column_name]) == 0: kanban_dict[column_name] = None output_dict = self.full_yaml output_dict["kanban_state"] = kanban_dict with open(self.todo_filename, "w") as file_dump: yaml.safe_dump(output_dict, file_dump, default_flow_style=False, line_break="\r")
def persist_state(self, state): try: self._kubernetes_client.patch_namespaced_config_map( self._configmap_name, self._configmap_namespace, {'data': { "yaml_data": oyaml.safe_dump(state) }}) except ApiException as e: self.logger.error( f"Error while writing state to configmap '{self._configmap_name}' in namespace '{self._configmap_namespace}': {str(e)}" )
def save(self, path=None, backup=True): """ # # not tested # saves th dic into the file. It also creates a backup if set to true The backup filename appends a .bak.NO where number is a number that is not yet used in the backup directory. :param path: :type path: :return: :rtype: """ path = path_expand(path or self.location.config()) if backup: destination = backup_name(path) shutil.copyfile(path, destination) yaml_file = self.data.copy() with open(self.config_path, "w") as stream: yaml.safe_dump(yaml_file, stream, default_flow_style=False)
def change_imgname(project_dir): path = project_dir.split('/') project_name = path[-1] inp = input( 'Docker image name [default: {}]:'.format(project_name)).lower() if inp != "": imgname = inp else: imgname = project_name for device in ['cpu', 'gpu']: yml_path = os.path.join( project_dir, 'docker', 'docker-compose-{}.yml'.format(device)) with open(yml_path, 'r') as f: dcyml = yaml.safe_load(f) dcyml['services']['experiment']['image'] = imgname with open(yml_path, 'w') as f: yaml.safe_dump(dcyml, f)
def cli(file, input, set, output): result = OrderedDict() if file is not None: output = file fileyaml = yaml.safe_load(file) result = fileyaml for a_file in input: oneyaml = yaml.safe_load(a_file) result = deep_merge(result, oneyaml) for a_set in set: oneyaml = string_to_dict(a_set) result = deep_merge(result, oneyaml) if output is not None: output.seek(0) output.truncate() yaml.safe_dump(result, output, default_flow_style=False) else: yaml.safe_dump(result, sys.stdout, default_flow_style=False)
def create(key, secret, url, profile): cred_folder() setting_path = cred_path() try: with open(setting_path, "r") as f: my_dict = yaml.safe_load(f) except Exception as e: print(e) os.remove(setting_path) if not os.path.exists(setting_path): my_dict = {profile: {"key": key, "secret": secret, "url": url}} else: with open(setting_path, "r") as f: my_dict = yaml.safe_load(f) my_dict[profile] = {"key": key, "secret": secret, "url": url} with open(setting_path, "w") as f: yaml.safe_dump(my_dict, f) return "created value"
def update_config(config, env, version): path = config.format(env) updated = False obj = None with io.open(path, 'r') as f: obj = oyaml.safe_load(f) if 'services' not in obj: print("No services listed in '{}'".format(path)) return for _, service in obj['services'].items(): for k, v in service.items(): if k == 'image' and v.startswith(IMAGE_BASENAME): service['image'] = ':'.join([v.split(':')[0], version]) updated = True if not updated: print("No service images modified.") return with io.open(path, 'w') as f: oyaml.safe_dump(obj, stream=f)
def get_tuto_metata(self): """Return the string corresponding to the tutorial metadata.""" metadata = collections.OrderedDict() metadata['title'] = self.title metadata['zenodo_link'] = self.zenodo_link metadata['questions'] = self.questions metadata['objectives'] = self.objectives metadata['time_estimation'] = self.time metadata['key_points'] = self.key_points metadata['contributors'] = self.contributors return yaml.safe_dump(metadata, indent=2, default_flow_style=False, default_style='', explicit_start=False)
def get_tuto_metata(self): """Return the string corresponding to the tutorial metadata.""" metadata = collections.OrderedDict() metadata['title'] = self.title metadata['zenodo_link'] = self.zenodo_link metadata['questions'] = self.questions metadata['objectives'] = self.objectives metadata['time_estimation'] = self.time metadata['key_points'] = self.key_points metadata['contributors'] = self.contributors return yaml.safe_dump( metadata, indent=2, default_flow_style=False, default_style='', explicit_start=False)
def main(): # Parsing user input parser = argparse.ArgumentParser() parser.add_argument( '-i','--input_filename', nargs='?', type=str, required=True, help='Input file (json).' ) parser.add_argument( '-m','--match', nargs='?', type=str, default='', help='Term to match.' ) parser.add_argument( '-c','--column', nargs='?', type=str, default='title', help='Column to match.' ) args = parser.parse_args() df = pd.read_json(args.input_filename) divs = re.compile(r'<[^>]*>') nls = re.compile(r'[\\n<>]') if args.match: founds = df[df[args.column].str.contains(args.match)] for index,found in founds.iterrows(): out = yaml.safe_dump(found.to_dict()) out = divs.sub('',out) out = nls.sub('',out) print out print 80*'-' else: print df
def create_competition_dump(competition_pk, keys_instead_of_files=True): yaml_data = {"version": "2"} try: # -------- SetUp ------- logger.info(f"Finding competition {competition_pk}") comp = Competition.objects.get(pk=competition_pk) zip_buffer = BytesIO() zip_name = f"{comp.title}-{comp.created_when.isoformat()}.zip" zip_file = zipfile.ZipFile(zip_buffer, "w") # -------- Main Competition Details ------- for field in COMPETITION_FIELDS: if hasattr(comp, field): value = getattr(comp, field, "") if field == 'queue' and value is not None: value = str(value.vhost) yaml_data[field] = value if comp.logo: logger.info("Checking logo") try: yaml_data['image'] = re.sub(r'.*/', '', comp.logo.name) zip_file.writestr(yaml_data['image'], comp.logo.read()) logger.info(f"Logo found for competition {comp.pk}") except OSError: logger.warning( f"Competition {comp.pk} has no file associated with the logo, even though the logo field is set." ) # -------- Competition Terms ------- yaml_data['terms'] = 'terms.md' zip_file.writestr('terms.md', comp.terms) # -------- Competition Pages ------- yaml_data['pages'] = [] for page in comp.pages.all(): temp_page_data = {} for field in PAGE_FIELDS: if hasattr(page, field): temp_page_data[field] = getattr(page, field, "") page_file_name = f"{slugify(page.title)}-{page.pk}.md" temp_page_data['file'] = page_file_name yaml_data['pages'].append(temp_page_data) zip_file.writestr(temp_page_data['file'], page.content) # -------- Competition Tasks/Solutions ------- yaml_data['tasks'] = [] yaml_data['solutions'] = [] task_solution_pairs = {} tasks = [task for phase in comp.phases.all() for task in phase.tasks.all()] index_two = 0 # Go through all tasks for index, task in enumerate(tasks): task_solution_pairs[task.id] = { 'index': index, 'solutions': { 'ids': [], 'indexes': [] } } temp_task_data = { 'index': index } for field in TASK_FIELDS: data = getattr(task, field, "") if field == 'key': data = str(data) temp_task_data[field] = data for file_type in PHASE_FILES: if hasattr(task, file_type): temp_dataset = getattr(task, file_type) if temp_dataset: if temp_dataset.data_file: if keys_instead_of_files: temp_task_data[file_type] = str(temp_dataset.key) else: try: temp_task_data[file_type] = f"{file_type}-{task.pk}.zip" zip_file.writestr(temp_task_data[file_type], temp_dataset.data_file.read()) except OSError: logger.error( f"The file field is set, but no actual" f" file was found for dataset: {temp_dataset.pk} with name {temp_dataset.name}" ) else: logger.warning(f"Could not find data file for dataset object: {temp_dataset.pk}") # Now for all of our solutions for the tasks, write those too for solution in task.solutions.all(): # for index_two, solution in enumerate(task.solutions.all()): # temp_index = index_two # IF OUR SOLUTION WAS ALREADY ADDED if solution.id in task_solution_pairs[task.id]['solutions']['ids']: for solution_data in yaml_data['solutions']: if solution_data['key'] == solution.key: solution_data['tasks'].append(task.id) break break # Else if our index is already taken elif index_two in task_solution_pairs[task.id]['solutions']['indexes']: index_two += 1 task_solution_pairs[task.id]['solutions']['indexes'].append(index_two) task_solution_pairs[task.id]['solutions']['ids'].append(solution.id) temp_solution_data = { 'index': index_two } for field in SOLUTION_FIELDS: if hasattr(solution, field): data = getattr(solution, field, "") if field == 'key': data = str(data) temp_solution_data[field] = data if solution.data: temp_dataset = getattr(solution, 'data') if temp_dataset: if temp_dataset.data_file: try: temp_solution_data['path'] = f"solution-{solution.pk}.zip" zip_file.writestr(temp_solution_data['path'], temp_dataset.data_file.read()) except OSError: logger.error( f"The file field is set, but no actual" f" file was found for dataset: {temp_dataset.pk} with name {temp_dataset.name}" ) else: logger.warning(f"Could not find data file for dataset object: {temp_dataset.pk}") # TODO: Make sure logic here is right. Needs to be outputted as a list, but what others can we tie to? temp_solution_data['tasks'] = [index] yaml_data['solutions'].append(temp_solution_data) index_two += 1 # End for loop for solutions; Append tasks data yaml_data['tasks'].append(temp_task_data) # -------- Competition Phases ------- yaml_data['phases'] = [] for phase in comp.phases.all(): temp_phase_data = {} for field in PHASE_FIELDS: if hasattr(phase, field): if field == 'start' or field == 'end': temp_date = getattr(phase, field) if not temp_date: continue temp_date = temp_date.strftime("%Y-%m-%d") temp_phase_data[field] = temp_date elif field == 'max_submissions_per_person': temp_phase_data['max_submissions'] = getattr(phase, field) else: temp_phase_data[field] = getattr(phase, field, "") task_indexes = [task_solution_pairs[task.id]['index'] for task in phase.tasks.all()] temp_phase_data['tasks'] = task_indexes temp_phase_solutions = [] for task in phase.tasks.all(): temp_phase_solutions += task_solution_pairs[task.id]['solutions']['indexes'] temp_phase_data['solutions'] = temp_phase_solutions yaml_data['phases'].append(temp_phase_data) yaml_data['phases'] = sorted(yaml_data['phases'], key=lambda phase: phase['index']) # -------- Leaderboards ------- yaml_data['leaderboards'] = [] for index, leaderboard in enumerate(comp.leaderboards.all()): ldb_data = { 'index': index } for field in LEADERBOARD_FIELDS: if hasattr(leaderboard, field): ldb_data[field] = getattr(leaderboard, field, "") ldb_data['columns'] = [] for column in leaderboard.columns.all(): col_data = {} for field in COLUMN_FIELDS: if hasattr(column, field): value = getattr(column, field, "") if field == 'computation_indexes' and value is not None: value = value.split(',') if value is not None: col_data[field] = value ldb_data['columns'].append(col_data) yaml_data['leaderboards'].append(ldb_data) # ------- Finalize -------- logger.info(f"YAML data to be written is: {yaml_data}") comp_yaml = yaml.safe_dump(yaml_data, default_flow_style=False, allow_unicode=True, encoding="utf-8") logger.info(f"YAML output: {comp_yaml}") zip_file.writestr("competition.yaml", comp_yaml) zip_file.close() logger.info("Creating ZIP file") competition_dump_file = ContentFile(zip_buffer.getvalue()) logger.info("Creating new Data object with type competition_bundle") bundle_count = CompetitionDump.objects.count() + 1 temp_dataset_bundle = Data.objects.create( created_by=comp.created_by, name=f"{comp.title} Dump #{bundle_count} Created {comp.created_when.date()}", type='competition_bundle', description='Automatically created competition dump', # 'data_file'=, ) logger.info("Saving zip to Competition Bundle") temp_dataset_bundle.data_file.save(zip_name, competition_dump_file) logger.info("Creating new CompetitionDump object") temp_comp_dump = CompetitionDump.objects.create( dataset=temp_dataset_bundle, status="Finished", details="Competition Bundle {0} for Competition {1}".format(temp_dataset_bundle.pk, comp.pk), competition=comp ) logger.info(f"Finished creating competition dump: {temp_comp_dump.pk} for competition: {comp.pk}") except ObjectDoesNotExist: logger.info("Could not find competition with pk {} to create a competition dump".format(competition_pk))
def create(self): reporter = ResourceReporter() resource = Resource('kubeconf', 'Kubernetes configuration file', Status.not_exist, resource_id=self.kubeconf) reporter.progress(resource) try: if os.path.isfile(self.kubeconf): import oyaml as yaml with open(self.kubeconf, 'r') as cf: kc = yaml.load(cf) clusters = self._get_components(kc, 'clusters') cs = [c for c in clusters if c.get('name') == self.cluster_info.name] if not cs: clusters.append(OrderedDict([ ('cluster', OrderedDict([ ('certificate-authority-data', self.cluster_info.cert), ('server', self.cluster_info.endpoint), ])), ('name', self.cluster_info.name), ])) else: for c in cs: c['cluster']['server'] = self.cluster_info.endpoint c['cluster']['certificate-authority-data'] = self.cluster_info.cert users = self._get_components(kc, 'users') us = [u for u in users if u.get('name') == self.user] if not us: users.append(OrderedDict([ ('name', self.user), ('user', OrderedDict([ ('exec', OrderedDict([ ('apiVersion', 'client.authentication.k8s.io/v1alpha1'), ('command', self.heptio), ('args', ['token', '-i', self.cluster_info.name]) ]))]))])) else: for u in users: u['user'] = OrderedDict([ ('exec', OrderedDict([ ('apiVersion', 'client.authentication.k8s.io/v1alpha1'), ('command', self.heptio), ('args', ['token', '-i', self.cluster_info.name]) ]))]) contexts = self._get_components(kc, 'contexts') cs = [c for c in contexts if c.get('context', {}).get('cluster') == self.cluster_info.name and c.get('context', {}).get('user') == self.user] if not cs: contexts.append(OrderedDict([ ('context', OrderedDict([ ('cluster', self.cluster_info.name), ('namespace', 'default'), ('user', self.user), ])), ('name', self.cluster_info.name), ])) kc['current-context'] = self.cluster_info.name with open(self.kubeconf, 'w') as cf: cf.write(yaml.safe_dump(kc, default_flow_style=False)) else: s = Environment().from_string(KubeConfig.KUBE_CONFIG_YAML).render(ci=self.cluster_info, user=self.user, heptio=self.heptio) with open(self.kubeconf, 'w') as cf: cf.write(s) resource.status = Status.created resource.resource_id = self.kubeconf reporter.succeed(resource) except Exception as e: resource.status = Status.failed reporter.fail(resource) raise EKSCliException(e) return