def execute(): newRecordName = '{}-record.py'.format(helpers.date_stamp()) settings['record'] = newRecordName content = "settings = {}".format(helpers.glue_updated_record(settings)) newRecordBlueprint = '''{ "current" : "", "projects" : {} }''' print print "===========================================" print print "Creating new record file: {}".format(newRecordName) helpers.write_file(settings['record_url'] + newRecordName, newRecordBlueprint) print ''' Updating settings file: ... "record" : "{}" ... '''.format(newRecordName) print "===========================================" print # print os.path.realpath(__file__).replace('archive.py', '') + 'settings.py' # print json.dumps(content) helpers.write_file( os.path.realpath(__file__).replace('archive.py', '') + 'settings.py', content)
def execute(PROJECT): record = helpers.load_record() current = record['current'] recordedTime = helpers.time_stamp() def verify_exists(VAL, RECORD): for item in record['projects']: if item == VAL: return True return False if verify_exists(PROJECT, record): print "Switching to Project: {}".format(PROJECT) record['current'] = PROJECT content = helpers.glue_updated_record(record) helpers.write_file(helpers.recordPath, content) else: record['current'] = PROJECT record['projects'][PROJECT] = helpers.create_new_project(recordedTime) record['projects'][PROJECT]['description'] = helpers.add_a_description( ) content = helpers.glue_updated_record(record) helpers.write_file(helpers.recordPath, content)
def generate_py_component(path, name, props): props = list(props['properties'].items()) trait_list = set([traitlet_mapping[ptype] for pname, ptype in props] + ['Unicode']) traits = ', '.join(trait_list) prop_lines = [ f"{pname} = {traitlet_mapping[ptype]}().tag(sync=True)" for pname, ptype in props ] props_str = '\n '.join(prop_lines) content = fix_indentation(f""" from traitlets import ({traits}) from ipyvue import VueWidget from ._version import semver class {name}(VueWidget): _model_name = Unicode('{name}Model').tag(sync=True) _view_name = Unicode('{view_name}').tag(sync=True) _view_module = Unicode('{jupyter_extension_name}').tag(sync=True) _model_module = Unicode('{jupyter_extension_name}').tag(sync=True) _view_module_version = Unicode(semver).tag(sync=True) _model_module_version = Unicode(semver).tag(sync=True) {props_str} __all__ = ['{name}'] """) write_file(path, content)
def test_create_from(): ipvc = get_environment() ipvc.repo.init() filename1 = REPO / 'test_file.txt' write_file(filename1, 'hello world') ipvc.stage.add() ipvc.stage.commit('msg1') filename2 = REPO / 'test_file2.txt' write_file(filename2, 'hello world2') ipvc.stage.add() ipvc.stage.commit('msg2') ipvc.branch.create('test', from_commit='@head~') commits = ipvc.branch.history() assert len(commits) == 1 filename1.stat() with pytest.raises(FileNotFoundError): filename2.stat() os.remove(filename1) ret = ipvc.diff.run() ipvc.print_ipfs_profile_info()
def write_temp_doxy(self, contents): #print "CONTENTS", contents[:1000] if os.path.exists( self.conf.temp_doxy_path): os.remove(self.conf.temp_doxy_path) h.write_file( self.conf.temp_doxy_path, contents) if self.V > 0: print "> Wrote temp doy file: %s" % self.conf.temp_doxy_path
def test_create_a_file(basedir): wt = os.path.join(basedir, 'create_a_file') init_dir(wt) os.chdir(wt) write_file(wt, 'greeting', 'Hello, world!\n') greeting = os.path.join(wt, 'greeting') assert os.path.exists(greeting)
def main(): args = parser() torrentfiles = list() magnets = dict() source = args.input if source.endswith('/') or source.endswith('.'): for tfile in listdir(source): if tfile.endswith('.torrent'): torrentfiles.append(tfile) else: torrentfiles.append(source) for torrent in torrentfiles: magnets[torrent] = get_magnet(torrent) output = args.output if output is None: for torrent in magnets: print('%s\t%s' %(torrent, magnets[torrent])) else: if output.endswith('.json'): write_json(output, magnets) elif output.endswith('.html'): write_file(output, get_html(magnets)) else: print('error: output was not correct. use file.{json,html} as parameter')
def generate_html_report( compare_results, unreachable_results, reference_flows ): # compare_results.frame().columns = COMPARE_RESULTS_COLUMN_NAMES compare_results = format_df( compare_results.frame(), COMPARE_RESULTS_COLUMN_NAMES, COMPARE_RESULTS_COLUMN_ORDER, "Reference Flow Index", ) unreachable_results = format_df( unreachable_results.frame(), DIFF_RESULTS_COLUMN_NAMES ) add_html_errors_pd_frame(compare_results) add_html_errors_pd_frame(unreachable_results) html_compare_results = format_html_table( compare_results.to_html(index=False, escape=False) ) html_unreachable_results = format_html_table( unreachable_results.to_html(index=False, escape=False) ) rendered_report = render_report( "./acl_auditor/report.j2", html_compare_results, html_unreachable_results, reference_flows, ) write_file("data/report.html", rendered_report)
def write_header_html(self): template_header = h.read_file( self.conf.ETC + "fg_docx_header.html" ) template_header = template_header.replace("___NAV_ITEMS___", self.get_navigation() ) template_header = template_header.replace("___REPO_LINK___", self.get_repo_link() ) template_header = template_header.replace("___REPO_BRANCH___", self.get_branch() ) h.write_file( self.conf.work_dir + "fg_docx_header.html", template_header)
def execute(REPRESS_STATUS): record = helpers.load_record() current = record['current'] if current == '': msg.no_current_project() else: recordedTime = helpers.time_stamp() obj = {} # print json.dumps(record, sort_keys=True, indent=4) for item in record['projects'][current]['time']: if item['end'] == '': obj = item if len(obj) <= 0 and not REPRESS_STATUS: newItem = helpers.new_time_obj(recordedTime) record['projects'][current]['time'].append(newItem) content = helpers.glue_updated_record(record) helpers.write_file(helpers.recordPath, content) msg.tracking_message(newItem, current) elif REPRESS_STATUS: msg.new_project_tracking(current, recordedTime) else: msg.already_tracking(current)
def execute(): record = helpers.load_record() current = record['current'] if current == "": msg.no_current_project() else: obj = {} spentList = [] for item in record['projects'][current]['time']: spentList.append(item['spent']) if item['end'] == '': obj = item if len(obj) > 0: recordedTime = helpers.time_stamp() recordedDate = helpers.date_stamp() obj['end'] = recordedTime obj['spent'] = helpers.time_spent(obj['start'], obj['end']) obj['spent_date'] = recordedDate content = helpers.glue_updated_record(record) helpers.write_file(helpers.recordPath, content) msg.untracking_message(obj) else: msg.nothing_being_tracked()
def test_remove(): ipvc = get_environment() ipvc.repo.init() testdir = REPO / 'testdir1' / 'testdir2' testdir.mkdir(parents=True) test_file = testdir / 'test_file.txt' write_file(test_file, 'hello world') changes = ipvc.stage.add(test_file) assert isinstance(changes, list) and len(changes) == 1 and changes[0]['Type'] == 0 assert changes[0]['Path'] == '' ipvc.stage.commit('msg') time.sleep(1) # resolution of modification timestamp is a second write_file(test_file, 'hello world2') changes = ipvc.stage.add(test_file) assert isinstance(changes, list) and len(changes) == 1 and changes[0]['Type'] == 2 assert changes[0]['Path'] == '' changes = ipvc.stage.remove(test_file) assert isinstance(changes, list) and len(changes) == 1 and changes[0]['Type'] == 2 assert changes[0]['Path'] == '' head_stage, stage_workspace = ipvc.stage.status() assert isinstance( head_stage, list) and len(head_stage) == 0 and len(stage_workspace) == 1
def test_mv_rm(): ipvc = get_environment() ipvc.repo.init() test_file = REPO / 'test_file.txt' write_file(test_file, 'hello_world') ipvc.stage.add(test_file) with pytest.raises(RuntimeError): ipvc.repo.mv(REPO2, REPO2) with pytest.raises(RuntimeError): ipvc.repo.mv(REPO, REPO) with pytest.raises(RuntimeError): IPVC(Path('/'), NAMESPACE).repo.mv(REPO, None) assert ipvc.repo.mv(REPO, REPO2) == True assert not REPO.exists() assert REPO2.exists() try: h = ipvc.ipfs.files_stat(ipvc.repo.get_mfs_path(REPO2))['Hash'] except: h = None assert h is not None assert ipvc.repo.rm(REPO2) == True assert REPO2.exists() # should still exist on filesystem try: h = ipvc.ipfs.files_stat(ipvc.repo.get_mfs_path(REPO2))['Hash'] except: h = None assert h is None # should not exist on MFS
def main(): """Shows basic usage of the Drive v3 API. Prints the names and ids of the first 10 files the user has access to. """ creds = None # The file token.pickle stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. creds = h.read_file("credentials/GDrive_token.txt")[0] # If there are no (valid) credentials available, let the user log in. if not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( 'credentials/credentials.json', SCOPES) creds = flow.run_local_server() # Save the credentials for the next run h.write_file("credentials/GDrive_token.txt", creds) service = build('drive', 'v3', credentials=creds) # Call the Drive v3 API results = service.files().list( pageSize=10, fields="nextPageToken, files(id, name)").execute() items = results.get('files', []) if not items: print('No files found.') else: print('Files:') for item in items: print(u'{0} ({1})'.format(item['name'], item['id']))
def write_info_file(self): dic = dict(color= self.conf.color, version=self.get_version(), title=self.conf.title, project=self.conf.proj, date_updated=datetime.datetime.strftime(datetime.datetime.utcnow(),"%Y-%m-%d %H:%M:%S") ) h.write_file(self.conf.json_info_path, json.dumps(dic) )
def execute(): record = helpers.load_record() current = record['current'] workLogList = helpers.work_log_list(record, current) if len(workLogList) != 0: jira = JIRA('https://contentwatch.atlassian.net', basic_auth=(passwordHide['username'], passwordHide['password'])) issue = jira.issue(current) msg.jira_item_being_logged(issue.fields.summary) print "----------------------------------" for item in workLogList: print 'Time: {time}'.format( time=helpers.time_worked(item['spent'])) print for item in record['projects'][current]['time']: if item['spent'] != '': if 'jira_recorded' in item: if item['jira_recorded'] == 'False': timeWorked = helpers.time_worked(item['spent']) if timeWorked != '0h 00m': # print helpers.jira_start_date_format2(item['start']) jira.add_worklog(current, timeSpent=timeWorked, timeSpentSeconds=None, adjustEstimate=None, newEstimate=None, reduceBy=None, comment=helpers.work_log_comment( item['spent_date'], timeWorked), started=None, user=None) item['jira_recorded'] = 'True' else: timeWorked = helpers.time_worked(item['spent']) if timeWorked != '0h 00m': # print helpers.jira_start_date_format2(item['start']) jira.add_worklog(current, timeSpent=timeWorked, timeSpentSeconds=None, adjustEstimate=None, newEstimate=None, reduceBy=None, comment=helpers.work_log_comment( item['spent_date'], timeWorked), started=None, user=None) item['jira_recorded'] = 'True' content = helpers.glue_updated_record(record) helpers.write_file(helpers.recordPath, content) print msg.process_completed() else: msg.nothing_to_log()
def generate_archive(posts): print("Generating archive...") _ensure_dir(ARCHIVE_DIR_PATH) contents = populate_template(ARCHIVE_TEMPLATE_NAME, data={ "page_title": "Archive", "posts": posts }) write_file(ARCHIVE_PATH, contents=contents)
def profile(): import helpers import os utilDir = helpers.path('util') if not os.path.exists(utilDir + '/profiles/profile.py'): snippet = '''{\n\t"settings" : {\n\n\t\t}\n}''' helpers.run_command('mkdir {}/profiles'.format(utilDir), False) helpers.write_file(utilDir + '/profiles/profile.py', snippet) print("\nprofile added!\n") msg.done
def generate_playlist_page(playlist_id): print("Generating playlist page...") _ensure_dir(PLAYLIST_DIR_PATH) contents = populate_template( PLAYLIST_TEMPLATE_NAME, data={ "page_title": "Playlist", "playlist_id": playlist_id, }, ) write_file(PLAYLIST_PATH, contents=contents)
def test_git_hash_object(basedir): wt = os.path.join(basedir, 'test_git_hash_object') init_dir(wt) os.chdir(wt) write_file(wt, 'greeting', 'Hello, world!\n') output = subprocess.run('git hash-object greeting'.split(), stdout=PIPE, stderr=STDOUT) hash_value = output.stdout.decode("ascii").strip() # print("(", hash_value, ")") # hash_value: af5626b4a114abcb82d63db7c8082c3c4756e51b assert len(hash_value) == 40
def generate_cache_file(): print("Generating cache file...") cache = { "access_token": SPOTIFY_ACCESS_TOKEN, "token_type": "Bearer", "expires_in": 3600, "refresh_token": SPOTIFY_REFRESH_TOKEN, "scope": "user-library-read", "expires_at": 0, } contents = json.dumps(cache) write_file(SPOTIFY_CACHE_PATH, contents=contents)
def test_clone(): ipvc = get_environment() ipvc.repo.init(name='myrepo') id1 = ipvc.id.create(key='id1', use=True) test_file = REPO / 'test_file.txt' write_file(test_file, 'hello world') ipvc.stage.add(test_file) ipvc.stage.commit(message='msg') ipvc.repo.publish() ipvc.repo.rm() write_file(test_file, 'other text') ipvc.repo.clone(f'{id1}/myrepo') with open(test_file, 'r') as f: assert f.read() == 'hello world'
def test_status(): ipvc = get_environment() ipvc.repo.init() test_file = REPO / 'test_file.txt' write_file(test_file, 'hello world') head_stage, stage_workspace = ipvc.stage.status() assert len(head_stage) == 0 assert len(stage_workspace) == 1 and stage_workspace[0]['Type'] == 0 ipvc.stage.add() head_stage, stage_workspace = ipvc.stage.status() assert len(head_stage) == 1 and head_stage[0]['Type'] == 0 assert len(stage_workspace) == 0
def execute(): record = helpers.load_record() selection = helpers.select_project(record) if selection != 'x': record['current'] = selection content = helpers.glue_updated_record(record) helpers.write_file(helpers.recordPath, content) msg.switching_project(selection) else: msg.process_completed()
def generate_pages(posts): print("Generating pages...") for post in posts: print(f"Generating page: post {post['date']}...") post_dir_path = ARCHIVE_DIR_PATH / post["date"] _ensure_dir(post_dir_path) post_page_path = post_dir_path / "index.html" contents = populate_template( PAGE_TEMPLATE_NAME, data={ "page_title": f"Archive: {post['date']}", "post": post }, ) write_file(post_page_path, contents=contents)
def alias(): import json import helpers actionList = json.loads( helpers.read_file('{}/{}'.format(helpers.path('util'), 'action-list.json'))) bashrcFilepath = helpers.run_command_output('cd {} && cd ../'.format( helpers.path('util'))) + '.baconrc' contents = helpers.read_file(bashrcFilepath) pat = re.compile('alias {}='.format()) match = re.search(pat, contents) formattedAlias = '\nalias {}="python {}/actions.py"'.format( actionList['alias'], helpers.path('util')) if not match: contents += formattedAlias helpers.write_file(bashrcFilepath, contents)
def main(): storage_data = apricot.get_storage_status() today = str(date.today()) write_file(storage_data, f"{today}.json") discord_data = set_member_level(apricot) try: previous_storage_data = read_file("previous_data.json") storage_data = previous_storage_data except: previous_storage_data = storage_data write_file(storage_data, f"previous_data.json") free_units, unit_state = calc_storage(storage_data, previous_storage_data) assignments = save_storage_data_to_file(storage_data, free_units, unit_state) print(assignments) post_data_to_site(assignments, apricot)
def helpers(): import helpers # get bacon filepath baconHelpersFilepath = helpers.run_command_output( 'cd {} && cd ../ && pwd'.format(helpers.path('util'))).replace( '\n', '') + '/bacon/template/helpers.py' utilityHelpersFilepath = '/{}/{}'.format(helpers.path('util'), 'helpers.py') # get target helpers content content = helpers.read_file(utilityHelpersFilepath) customHelpers = content.split( "# custom helpers start here\n# =========================")[1] # get default helpers template from bacon newDefaultHelpers = helpers.read_file(baconHelpersFilepath) # pack content and save newContent = newDefaultHelpers + customHelpers # print(newContent) helpers.write_file(utilityHelpersFilepath, newContent) msg.done()
def generate_js_component(path, name, props): props = list(props['properties'].items()) prop_lines = [f"{pname}: null," for pname, _ in props] props_str = '\n '.join(prop_lines) import_variant = \ 'LinkedComponent' if single_component_mode else f"{{ {name} as LinkedComponent }}" content = fix_indentation(f""" /* eslint camelcase: off */ import {{ VueModel }} from 'jupyter-vue'; import '../../vue_project_dist_mirror/component.css'; import {import_variant} from '../../vue_project_dist_mirror/component.umd'; export class {name}Model extends VueModel {{ defaults() {{ return {{ ...super.defaults(), ...{{ _model_name: '{name}Model', _view_name: '{view_name}', _view_module: '{jupyter_extension_name}', _model_module: '{jupyter_extension_name}', _view_module_version: '^{jupyter_extension_version}', _model_module_version: '^{jupyter_extension_version}', {props_str} }}, }}; }} getVueTag() {{ return LinkedComponent; }} }} {name}Model.serializers = {{ ...VueModel.serializers, }}; """) write_file(path, content)
def test_the_beauty_of_commits(basedir): wt = os.path.join(basedir, 'test_the_beauty_of_commits') init_dir(wt) os.chdir(wt) write_file(wt, 'greeting', 'Hello, world!\n') git_init(wt) git_add(wt, 'greeting') git_commit(wt, 'Added greeting') """ masterブランチのHEADつまり最新のものとして参照されているコミットを調べよう """ o = subprocess.run("git branch -v".split(), stdout=PIPE, stderr=STDOUT) # print_git_msg(o) msg = o.stdout.decode('ascii').strip() """ * master 444a7a7 Added my greeting """ commit_hash = msg.split()[2] # e.g, commit_hash == '444a7a7' # commitオブジェクトのhash値の先頭7桁が得られた assert re.match(r'\w{7}', commit_hash) """ commitオブジェクトのhash値を使って ワーキングツリーをリセットすることができる。 エリアスHEADはここで指定されたcommitオブジェクトを指すように変更される。 """ o = subprocess.run("git reset --hard".split() + [commit_hash], stdout=PIPE, stderr=STDOUT) # print_git_msg(o) # HEAD is now at 2c495c1 Added my greeting """ git reset --hard commitId はワーキングツリーのなかに現在ある すべての変更内容を消去する。 commitのIDを指定してワーキングツリーを戻す方法がもうひとつある。 git checkout だ。こちらはワーキングツリーの変更を消去しない。 またHEADが指すcommitIDはgit checkoutによって変更されない。 """ o = subprocess.run("git checkout".split() + [commit_hash], stdout=PIPE, stderr=STDOUT)
def test_introducing_the_blob(basedir): wt = os.path.join(basedir, 'test_introducing_the_blob') init_dir(wt) os.chdir(wt) write_file(wt, 'greeting', 'Hello, world!\n') git_init(wt) git_add(wt, 'greeting') git_commit(wt, 'Added greeting') # output = subprocess.run('git cat-file -t af5626b'.split(), stdout=PIPE, stderr=STDOUT) msg = output.stdout.decode("ascii").strip() assert 'blob' in msg # output = subprocess.run('git cat-file blob af5626b'.split(), stdout=PIPE, stderr=STDOUT) msg = output.stdout.decode("ascii").strip() assert 'Hello, world!' in msg
def execute(): baconBitsPath = helpers.run_command_output('cd {} && cd ../ && pwd'.format(helpers.path('util')), False).replace('\n', '') baconrcFile = baconBitsPath + '/.baconrc' DATA = helpers.read_file(baconrcFile) utilList = os.listdir(baconBitsPath) addPerks = helpers.kv_set(settings, 'perks') count = 0 # print(utilList) # print(addPerks) APPENDED_DATA_STR = DATA for item in utilList: path = baconBitsPath + '/' + item try: alias = helpers.get_alias(path) except: alias = False if alias: aliasStr = 'alias {ALIAS}="python {PATH}/actions.py"'.format(ALIAS= alias, PATH= path) # print(aliasStr) pat = re.compile(aliasStr) match = re.search(pat, DATA) if not match: count += 1 print('\nadding alias: {}'.format(alias)) APPENDED_DATA_STR += '\n' + aliasStr if addPerks == "True" or addPerks == "true": aliasStrGoto = ''' elif [ $1 = "{ALIAS}" ]; then cd {PATH} #~~~ bacon:goto placeholder'''.format(ALIAS= alias, PATH= path) aliasStrShowme = ''' elif [ $1 = "{ALIAS}" ]; then open {PATH} #~~~ bacon:showme placeholder'''.format(ALIAS= alias, PATH= path) APPENDED_DATA_STR = APPENDED_DATA_STR.replace(' #~~~ bacon:goto placeholder', aliasStrGoto) APPENDED_DATA_STR = APPENDED_DATA_STR.replace(' #~~~ bacon:showme placeholder', aliasStrShowme) if count > 0: helpers.write_file(baconrcFile, APPENDED_DATA_STR) else: print("\n:: Nothing to add ::") msg.done()
def execute(): baconBitsPath = helpers.run_command_output( 'cd {} && cd ../ && pwd'.format(helpers.path('util')), False).replace('\n', '') baconrcFile = baconBitsPath + '/.baconrc' DATA = helpers.read_file(baconrcFile) utilList = os.listdir(baconBitsPath) count = 0 # print(utilList) MODIFIED_DATA_STR = DATA for item in utilList: path = baconBitsPath + '/' + item try: alias = helpers.get_alias(path) except: alias = False if alias: aliasStr1 = ''' elif [ $1 = "{ALIAS}" ]; then cd {PATH} #~~~ bacon:goto placeholder'''.format(ALIAS=alias, PATH=path) aliasStr2 = ''' elif [ $1 = "{ALIAS}" ]; then open {PATH} #~~~ bacon:showme placeholder'''.format(ALIAS=alias, PATH=path) pat = re.compile( 'elif \[ \$1 = "{ALIAS}" \]; then'.format(ALIAS=alias)) match = re.search(pat, DATA) if not match: count += 1 print('\nadding utility to goto and showme: {}'.format(alias)) MODIFIED_DATA_STR = MODIFIED_DATA_STR.replace( ' #~~~ bacon:goto placeholder', aliasStr1) MODIFIED_DATA_STR = MODIFIED_DATA_STR.replace( ' #~~~ bacon:showme placeholder', aliasStr2) if count > 0: helpers.write_file(baconrcFile, MODIFIED_DATA_STR) else: print("\n:: Nothing to add ::") msg.done()
def test_history(): ipvc = get_environment() ipvc.repo.init() write_file(REPO / 'test_file.txt', 'hello world') ipvc.stage.add() ipvc.stage.commit('commit message') try: ipvc.ipfs.files_stat( ipvc.repo.get_mfs_path(REPO, 'master', branch_info='head/data/bundle')) except: assert False try: ipvc.ipfs.files_stat( ipvc.repo.get_mfs_path(REPO, 'master', branch_info='head/data/parent')) except: assert False try: metadata = ipvc.repo.mfs_read_json( ipvc.repo.get_mfs_path(REPO, 'master', branch_info='head/data/commit_metadata')) except: assert False commits = ipvc.branch.history() assert len(commits) == 1 assert ipvc.branch.show(Path('@head')) == 'test_file.txt' assert ipvc.branch.show(Path('@head/test_file.txt')) == 'hello world' ipvc.print_ipfs_profile_info()
def write(info_dict): filename = os.path.join(os.getcwd(), "resource", "API_resource", "hospital_list.json") helpers.write_file(filename, yaml.safe_dump(info_dict, allow_unicode=True))
def prepare(self): if self.V > 0: print "\n=================================================================================" print "# Processing: %s" % self.conf.proj print "=================================================================================" ##=========================================== if self.V > 1: print "> Checking if temp/work_dir exists: %s" % self.conf.work_dir if not os.path.exists(self.conf.work_dir): if self.V > 1: print " > creating temp/work_dir path: %s" % self.conf.work_dir os.mkdir(self.conf.work_dir) else: if self.V > 1: print " > path Exists temp/work_dir path: %s" % self.conf.work_dir ##=========================================== # nuke and recreate build: if not self.conf.is_main: if self.V > 0: print "> checking build directory exits: %s" % self.conf.build_dir #if os.path.exists(self.conf.build_dir): #if self.V > 1: # print "\t nuking build directory: %s" % self.conf.build_dir #shutil.rmtree(build_dir) #if self.V > 1: # print " > creating build directory: %s" % self.conf.build_dir #os.mkdir(build_dir) ## Copy file # @ see copy_config self.copy_files() #print nav_str #sys.exit(0) #### copy required file if self.V > 0: print "> Copying build files:" for f in ["fg_xstyle.css", "fg_docx_footer.html"]: if self.V > 0: print "> copied: %s" % f shutil.copyfile( self.conf.ETC + f , self.conf.work_dir + f ) self.write_header_html() ############################################## ## Create temp doxy string and write to file ## READ default dox_file_contents = self.get_doxy_file() ## Add the extra stuff doxy vars from config if self.V > 0: print "> Checking doxy vars from config.yaml" xover = self.get_doxy_args() ## MAIN project extras if self.conf.is_main: h.write_file(self.conf.work_dir + "projects_index.html", self.get_projects_table_html()) h.write_file( self.conf.work_dir + "project_pages.cpp", self.get_projects_pages_cpp()) ## Append and override the main settings from here xover.append('PROJECT_NAME="%s"' % self.conf.proj) ## get version no from yaml, or source file self.version = self.get_version() xover.append('PROJECT_NUMBER="%s"' % self.version) xover.append('PROJECT_BRIEF="%s"' % self.conf.title) xover.append('OUTPUT_DIRECTORY=' + self.conf.build_dir ) xover.append('HTML_OUTPUT=%s' % "./") xover.append('GENERATE_TAGFILE=' + self.conf.build_dir + self.conf.proj + ".tag") xover.append('HTML_HEADER = fg_docx_header.html') xover.append('HTML_FOOTER = fg_docx_footer.html') xover.append('HTML_EXTRA_STYLESHEET = "fg_xstyle.css"') xover.append('TREEVIEW_WIDTH = 250') #xover.append('CLASS_DIAGRAMS = ') xover.append('HAVE_DOT = ') #xover.append('FILTER_SOURCE_FILES: YES') py_processor = self.conf.ETC + "doxypy.py" #xover.append( 'FILTER_PATTERNS = "*.py=%s"' % py_processor ) #xover.append('INPUT_FILTER: "%sdoxypy.py" ' % self.conf.ROOT) xover.append('WARN_LOGFILE = %s' % self.conf.build_dir + "warnings.txt") if self.conf.tags: tag_list = [] for t in self.conf.tags: tag_list.append( self.conf.BUILD + t + "/" + t + ".tag=../" + t ) xover.append("TAGFILES = %s \n" % " ".join(tag_list) ) dox_override = "\n".join(xover) if self.V > 0: print "> Overides for fg-docs output" for oo in xover: print " > " + oo ## make config string and write to file dox_config_str = dox_file_contents + dox_override self.write_temp_doxy(dox_config_str)
def export(self): helpers.write_file(self.output_file, self.fitgraph) pass
def write(info_dict): filename = os.path.join(os.getcwd(), 'resource', 'API_resource', 'hospital_list.json') output = json.dumps(info_dict, ensure_ascii=False, indent=2) helpers.write_file(filename=filename, content=output)
def write(info_dict): filename = os.path.join(os.getcwd(), 'resource', 'API_resource', 'hospital_list.yml') helpers.write_file(filename, yaml.safe_dump(info_dict, allow_unicode=True))