def add_changelog_template(token: str, patch_version: int, cwd: str) -> None: branch_name = changelog_branch_name(patch_version) git_command("checkout", "-b", branch_name, cwd=cwd) execute_command("python", "changelog.py", "--token", token, cwd=cwd) git_command("add", "_posts", cwd=cwd) git_command("commit", "-m", f"Changelog {patch_version}", cwd=cwd) git_command("push", "origin", branch_name, cwd=cwd)
def pull_branch(branch): output = execute_command('git checkout ' + branch) if output.find('error:') > -1: print_in_color("no branch exist for this name".upper(), 'red') return execute_command('git pull origin ' + branch) print_in_color('succesfully pulled latest '.upper() + branch + ' IN REPO', 'green')
def _update_locally(self) -> None: execute_command("cargo", "build", "--manifest-path", "attributes-info/Cargo.toml", "--package", "attributes-info") execute_command( "cargo", "run", "--manifest-path", "attributes-info/Cargo.toml", "--package", "attributes-info", "--bin", "attributes-info", )
def set_cap_source(cap_source, option): """ Set the Caption Source. Option: 'Mic' (microphone) or 'Mix' (soundcard output). """ def get_numid_str(s): """ extract the X value from numid=X,... """ back = None result = re.search(r'numid=(\d+),', s) if result: back = result.group(1) return back if len(cap_source) == 0: return # else id_str = get_numid_str(cap_source[0]) option_id_str = None if id_str: for e in cap_source: result = re.search(r'Item #(\d+) '+"'"+option+"'", e) if result: option_id_str = result.group(1) break if id_str and option_id_str: command = "amixer cset %s %s" % (cap_source[0], option_id_str) sys.stdout.write( execute_command(command) )
def set_cap_switch(cap_switch, option): """ set Caption Switch to 'on' or 'off' """ if len(cap_switch) == 0: return # else command = "amixer cset %s %s" % (cap_switch[0], option) sys.stdout.write( execute_command(command) )
def set_cap_switch(cap_switch, option): """ set Caption Switch to 'on' or 'off' """ if len(cap_switch) == 0: return # else command = "amixer cset %s %s" % (cap_switch[0], option) sys.stdout.write(execute_command(command))
def set_cap_source(cap_source, option): """ Set the Caption Source. Option: 'Mic' (microphone) or 'Mix' (soundcard output). """ def get_numid_str(s): """ extract the X value from numid=X,... """ back = None result = re.search(r'numid=(\d+),', s) if result: back = result.group(1) return back if len(cap_source) == 0: return # else id_str = get_numid_str(cap_source[0]) option_id_str = None if id_str: for e in cap_source: result = re.search(r'Item #(\d+) ' + "'" + option + "'", e) if result: option_id_str = result.group(1) break if id_str and option_id_str: command = "amixer cset %s %s" % (cap_source[0], option_id_str) sys.stdout.write(execute_command(command))
def unpushed_commit_message(): command_result = common.execute_command( 'git log --branches --not --remotes --pretty=format:%h:%aE:%s') if command_result.status != 0: return [] else: return command_result.out.decode().split('\n')
def export(self): # print(self.out_dir) # print ("A7eeeh: ", os.pathsep.join(self.out_dir.split(os.pathsep)[:-1])) cmd = "%s --no-adf --output %s %s" % (EXPORTER_SCRIPT, '/'.join(self.out_dir.split('/')[:-1]), self.accession) # print(cmd) out, err = execute_command(cmd) if 'error' in err.lower(): print('EXPORT ERROR %s\n' % self.accession, out, err)
def gitlog(rev, rev2, options=''): command = 'git log --first-parent ' + options + ' ' + rev + '..' + rev2 result = common.execute_command(command) if result.status == 0: return result.out.decode() raise Exception('Error executing "%s"', command)
def git_command(*args, print_stdout=True, check=True, cwd: Optional[str] = None) -> str: return execute_command("git", *args, print_stdout=print_stdout, check=check, cwd=cwd)
def delete_these_branches(branches): print_in_color("Started Deleting Operation", 'green') for branch in branches: output = execute_command('git branch -D ' + branch) if "error" in output: print_in_color( output, 'red', "") else: print_in_color( output, 'orange', "") print_in_color("Completed Deleting Operation", 'green')
def run(self): logging.debug("Start") if self.force: delete_command = "java -jar %s -o TRANKLUCATE -s %s -u %s -p %s '%s'" % ( SUBMISSION_TOOL_PATH, SUBMISSION_SERVER, SUBMISSION_USERNAME, SUBMISSION_PASSWORD, self.accession) print(delete_command) del_out, del_err = execute_command(delete_command) print(del_out) print(del_err) self.convert() command = "java -jar %s -o createupdate -s %s -u %s -p %s -d %s" % ( SUBMISSION_TOOL_PATH, SUBMISSION_SERVER, SUBMISSION_USERNAME, SUBMISSION_PASSWORD, self.page_tab_filename) print(command) out, err = execute_command(command) print(out) print(err) logging.debug("end")
def commit_in_path(old_path=None, new_path=None): git_command = 'git log --first-parent --pretty=format:%h:%aE:%s' if old_path is not None and len(old_path) > 0: git_command += ' ' + old_path if new_path is not None and len(new_path) > 0: git_command += '..' + new_path command_result = common.execute_command(git_command) if command_result.status != 0: return [] else: return command_result.out.decode().split('\n')
def run(self, ): if self.force: delete_command = "java -jar %s -o TRANKLUCATE -s %s -u %s -p %s '%s'" % ( SUBMISSION_TOOL_PATH, SUBMISSION_SERVER, SUBMISSION_USERNAME, SUBMISSION_PASSWORD, self.accession) # print(delete_command) del_out, del_err = execute_command(delete_command) # print(del_out) # print(del_err) # copied_files = self.convert_experiment_to_page_tab() command = "java -jar %s -o createupdate -s %s -u %s -p %s -d %s" % ( SUBMISSION_TOOL_PATH, SUBMISSION_SERVER, SUBMISSION_USERNAME, SUBMISSION_PASSWORD, self.page_tab_filename) # print(command) # print("args: ", # SUBMISSION_TOOL_PATH, SUBMISSION_SERVER, SUBMISSION_USERNAME, SUBMISSION_PASSWORD, self.page_tab_filename) out, err = execute_command(command) if 'error' in err.lower() or 'error' in out.lower(): print(self.accession) print(out) print(err) print('=' * 50)
def main(): parser = argparse.ArgumentParser() parser.add_argument("--token", type=str, required=True, help="github token") args = parser.parse_args() repo = env("GITHUB_REPOSITORY") nightly_branch = get_branch(repo, args.token, NIGHTLY_BRANCH) if nightly_branch is not None: print("Repo already has nightly branch") return git_command("checkout", "-b", NIGHTLY_BRANCH) output = execute_command("rustc", "-V") match_result = RUSTC_VERSION_RE.match(output) date = match_result.group(1) with open(CHECK_WORKFLOW_PATH) as f: workflow_text = f.read() result = re.search(WORKFLOW_RUSTC_VERSION_RE, workflow_text) if result is None: raise ValueError("Failed to find the current version of nightly rust") new_workflow_text = re.sub(WORKFLOW_RUSTC_VERSION_RE, f"\\g<1>{date}\\g<2>", workflow_text) if new_workflow_text == workflow_text: print("The latest nightly rustc version is already used") return with open(CHECK_WORKFLOW_PATH, "w") as f: f.write(new_workflow_text) if has_git_changes(): git_command("add", CHECK_WORKFLOW_PATH) git_command("commit", "-m", ":arrow_up: nightly") git_command("push", "origin", NIGHTLY_BRANCH) pull_request = create_pull_request(repo, args.token, NIGHTLY_BRANCH, ":arrow_up: nightly") add_assignee(repo, args.token, pull_request["number"], DEFAULT_ASSIGNEE) else: print("Everything is up to date")
def is_alive(self): if self.submitted: out, err = execute_command('bjobs %s' % self.job_id) # print(out, err) status = [i for i in out.split('\n')[1].split(' ') if i != ''][2] # print(status) if status.lower() == 'done': self.extract_output() return False if status.lower() == 'run' or status.lower() == 'pend': return True else: self.extract_output() raise Exception('Job %s exited with status %s' % (self.job_id, status)) else: raise Exception('Job has not been submitted yet')
def _update_locally(self): output = execute_command("rustc", "-V") match_result = RUSTC_VERSION_RE.match(output) date = match_result.group(1) with open(CHECK_WORKFLOW_PATH) as f: workflow_text = f.read() result = re.search(WORKFLOW_RUSTC_VERSION_RE, workflow_text) if result is None: raise ValueError( "Failed to find the current version of nightly rust") new_workflow_text = re.sub(WORKFLOW_RUSTC_VERSION_RE, f"\\g<1>{date}\\g<2>", workflow_text) if new_workflow_text == workflow_text: print("The latest nightly rustc version is already used") return with open(CHECK_WORKFLOW_PATH, "w") as f: f.write(new_workflow_text)
def get_captures(): """ get the three Capture lines """ cap_source = cap_switch = cap_volume = [] output = execute_command("amixer contents") li = output.split("numid=") lines = [] for e in li: e = e.rstrip("\n") if len(e) == 0: continue lines.append(("numid=" + e).split("\n")) for e in lines: if "Capture Source" in e[0]: cap_source = e elif "Capture Switch" in e[0]: cap_switch = e elif "Capture Volume" in e[0]: cap_volume = e return (cap_source, cap_switch, cap_volume)
def get_captures(): """ get the three Capture lines """ cap_source = cap_switch = cap_volume = [] output = execute_command("amixer contents") li = output.split("numid=") lines = [] for e in li: e = e.rstrip("\n") if len(e) == 0: continue lines.append( ("numid=" + e).split("\n") ) for e in lines: if "Capture Source" in e[0]: cap_source = e elif "Capture Switch" in e[0]: cap_switch = e elif "Capture Volume" in e[0]: cap_volume = e return (cap_source, cap_switch, cap_volume)
def delete_feature_branches(path, protected_branches): os.chdir(converted_path(path)) output = execute_command('git branch') branches = output.replace('*', '') BRANCHES_TO_BE_SPARED = protected_branches for branch in BRANCHES_TO_BE_SPARED: branches = branches.replace(" "+branch+"\n", '') BRANCHES_TO_BE_DELETED = list(filter(lambda item: len(item) != 0, map( lambda item: item.strip(), branches.split('\n')))) if(len(BRANCHES_TO_BE_DELETED) == 0): print_in_color("No branch To Delete", 'green') print_in_color("PROTECTED BRANCHES :: ", 'green', '') print_in_color(BRANCHES_TO_BE_SPARED, 'orange') else: print_in_color( "Are You Sure You Want To delete These Branches ? :: ", 'orange', '') print_in_color(BRANCHES_TO_BE_DELETED, 'red') print_in_color("Enter Y To Delete :: ", 'green', '') answer = input() if(answer.upper() == 'Y'): delete_these_branches(BRANCHES_TO_BE_DELETED) else: print_in_color("CANCELLED THE OPERATION", 'red')
def codingstyle(files, enable_reformat, check_lgpl, check_commits_date): source_patterns = common.get_option('codingstyle-hook.source-patterns', default='*.cpp *.cxx *.c').split() header_patterns = common.get_option('codingstyle-hook.header-patterns', default='*.hpp *.hxx *.h').split() misc_patterns = common.get_option('codingstyle-hook.misc-patterns', default='*.cmake *.txt *.xml *.json').split() code_patterns = source_patterns + header_patterns include_patterns = code_patterns + misc_patterns sort_includes = common.get_option('codingstyle-hook.sort-includes', default="true", type='--bool') == "true" global repoRoot repoRoot = common.get_repo_root() if repoRoot is None: common.warn("Cannot find 'fw4spl' repository structure") parent_repo = "" else: parent_repo = os.path.abspath(os.path.join(repoRoot, os.pardir)) fw4spl_configured_projects = common.get_option('codingstyle-hook.additional-projects', default=None) fw4spl_projects = [] if fw4spl_configured_projects is None: # no additional-projects specified in config file. Default is parent repository folder fw4spl_projects.append(parent_repo) else: fw4spl_projects = fw4spl_configured_projects.split(";") # adds current repository folder to the additional-projects specified in config file. fw4spl_projects.append(repoRoot) # normalize pathname fw4spl_projects = list(map(os.path.normpath, fw4spl_projects)) # remove duplicates fw4spl_projects = list(set(fw4spl_projects)) global UNCRUSTIFY_PATH if common.g_uncrustify_path_arg is not None and len(common.g_uncrustify_path_arg) > 0: UNCRUSTIFY_PATH = common.g_uncrustify_path_arg else: UNCRUSTIFY_PATH = common.get_option('codingstyle-hook.uncrustify-path', default=UNCRUSTIFY_PATH, type='--path').strip() common.note('Using uncrustify: ' + UNCRUSTIFY_PATH) if common.execute_command(UNCRUSTIFY_PATH + ' -v -q').status != 0: common.error('Failed to launch uncrustify.\n') return [] checked = set() reformatted_list = [] sortincludes.find_libraries_and_bundles(fw4spl_projects) ret = False count = 0 reformat_count = 0 for f in files: if f in checked or not any(f.fnmatch(p) for p in include_patterns): continue content = f.contents if not common.binary(content): # Do this last because contents of the file will be modified by uncrustify # Thus the variable content will no longer reflect the real content of the file file_path = os.path.join(repoRoot, f.path) if os.path.isfile(file_path): res = format_file(file_path, enable_reformat, code_patterns, header_patterns, misc_patterns, check_lgpl, sort_includes, f.status, check_commits_date) count += 1 if res == FormatReturn.Modified: reformatted_list.append(f.path) reformat_count += 1 elif res == FormatReturn.Error: # Error in reformatting ret = True checked.add(f) common.note('%d file(s) checked, %d file(s) reformatted.' % (count, reformat_count)) return ret, reformatted_list
def format_file(source_file, enable_reformat, code_patterns, header_patterns, misc_patterns, check_lgpl, sort_includes, status, check_commits_date): # Invoke uncrustify for source code files if any(fnmatch(source_file, p) for p in code_patterns): common.trace('Launching uncrustify on : ' + source_file) config_file = os.path.join(os.path.dirname(__file__), 'uncrustify.cfg') ret = FormatReturn() # Fix license year if check_lgpl is True: ret.add(fix_license_year(source_file, enable_reformat, status, check_commits_date)) # Sort headers if sort_includes is True: ret.add(sortincludes.sort_includes(source_file, enable_reformat)) if any(fnmatch(source_file, p) for p in header_patterns): ret.add(fix_header_guard(source_file, enable_reformat)) # Uncrustify command = UNCRUSTIFY_PATH + ' -c ' + config_file + ' -q %s ' + source_file if enable_reformat is True: # Check first uncrustify = common.execute_command(command % '--check') if uncrustify.status != 0: uncrustify = common.execute_command(command % '--replace --no-backup --if-changed') if uncrustify.status != 0: common.error('Uncrustify failure on file: ' + source_file) common.error(uncrustify.out.decode()) return FormatReturn.Error ret.add(FormatReturn.Modified) else: uncrustify = common.execute_command(command % '--check') if uncrustify.status != 0: common.error('Uncrustify failure on file: ' + source_file) return FormatReturn.Error return ret.value # Replace only YEAR, TAB, CRLF and CR for miscellaneous files elif any(fnmatch(source_file, p) for p in misc_patterns): common.trace('Parsing: ' + source_file + ' to replace CR, CRLF and TABs') str_old_file = open(source_file, 'rb').read() str_new_file = re.sub('\t', ' ', str_old_file.decode()) tmp_str = re.sub('\r\n', '\n', str_new_file) str_new_file = re.sub('\r', '\n', tmp_str) if str_old_file.decode() == str_new_file: return FormatReturn.NotModified # Something has been changed, write the new file open(source_file, 'wb').write(str_new_file.encode()) return FormatReturn.Modified
def main(): args = extract_args() dir_path = os.path.join(LOAD_DIR, args.accession.split('-')[1], args.accession) tmp_dir = os.path.join(MAGIC_DIR, args.accession) skip_copy = args.skip_copy skip_remove = args.skip_remove if os.path.exists(tmp_dir): print(execute_command('chmod 777 -R ' + tmp_dir)) print(execute_command('rm -rf ' + tmp_dir)) try: os.mkdir(tmp_dir) if skip_copy: print('skipping copy') execute_command('cp -r %s/*.txt %s' % (dir_path, tmp_dir)) else: execute_command('cp -r %s/* %s' % (dir_path, tmp_dir)) # copy_tree(dir_path, tmp_dir) for f in os.listdir(tmp_dir): f = os.path.join(tmp_dir, f) if os.path.isdir(f): continue f_name = os.path.split(f)[-1] if f_name.endswith('.zip'): print("extracting:: " + f_name + ' to ' + tmp_dir) execute_command("Unzip %s -d %s" % (f, tmp_dir)) # zip_file = zipfile.ZipFile(f, 'r') # zip_file.extractall(tmp_dir) # zip_file.close() converter = MAGETABConverter(args.accession, tmp_dir) file_lists = converter.page_tab.export() bst_usr = register_user(args.accession) # truncate_dir(tmp_dir) # move_dir(tmp_dir, args.accession, MAGIC_DIR, True) files = '' if file_lists: files = ' -a ' + ','.join(file_lists) submit = 'submit' if args.asyn: submit = 'submitAsync' command = "java -jar {jar} {submit} -s {server} -u {user} -p {password} -i {page_tab} {files}". \ format(jar=BST_CLI_PATH, submit=submit, server=SUBMISSION_SERVER, user=SUBMISSION_USERNAME, password=SUBMISSION_PASSWORD, page_tab=os.path.join(converter.out_dir, args.accession + '.pagetab.tsv'), files=files) print(command) out, err = execute_command(command) print(out) print(err) update_owner(args.accession, bst_usr['id']) # remove_ftp_dir(MAGIC_DIR + '/' + args.accession) if not skip_remove: execute_command('chmod 777 -R ' + tmp_dir) execute_command('rm -rf ' + tmp_dir) # remove_dir(magic_dir, tmp_dir) except Exception as e: print(e) if not skip_remove: execute_command('chmod 777 -R ' + tmp_dir) execute_command('rm -rf ' + tmp_dir) raise
def submit(self): out, err = execute_command(self.cmd) if out: self.job_id = out.split('<')[1].split('>')[0] self.submitted = True
def __init__(self, accession, out_dir, skip_remove=False): self.accession = accession self.out_dir = out_dir if not os.path.exists(self.out_dir): os.makedirs(self.out_dir) self.adf_dict = {} self.adf_dir = os.path.join(ARRAY_DIR, accession.split('-')[1], accession) self.page_tab_filename = os.path.join(self.out_dir, self.accession + '.pagetab.tsv') self.files = [] self.get_ftp_files() self.copy_to_tmp() self.adf_file_path = os.path.join(self.out_dir, self.accession + '.adf.txt') fix_encoding(self.adf_file_path) self.adf_content = [] self.parse_adf() # self.score = score_adf(self.adf_content) release_date = datetime.datetime.now().date().isoformat() if 'Public Release Date' in self.adf_dict.keys(): release_date = self.adf_dict['Public Release Date'][0] elif 'ArrayExpressReleaseDate' in self.adf_dict.keys(): release_date = self.adf_dict['ArrayExpressReleaseDate'][0] self.page_tab = PageTab(title=self.adf_dict['Array Design Name'][0], accession=self.accession, release_date=release_date, out_dir=self.out_dir) if not self.adf_dict.get('Provider', ['']): self.adf_dict['Provider'] = [''] section_fields = OrderedDict([ ("Title", self.adf_dict['Array Design Name'][0]), ("Provider", self.adf_dict.get('Provider', [''])[0]), ("Description", self.adf_dict.get('Description', [''])[0]), ("Organism", self.adf_dict.get('Organism', [''])[0]), ("MIAME Compliant ADF", 'Yes' if self.adf_dict['score']['score'] else 'No'), ("Link", [ Field(name='Link', value=self.accession, extras=[["Type", "BioStudies Search"], ["Description", "Studies using this array"]]) ]) ]) table = None if self.files: table = [['Files', 'Type', 'Description']] + self.files self.page_tab.sections.append( Section(name="Array", section_id='s-' + self.accession, section_fields=section_fields, table=table)) bst_usr = register_user(self.accession) # truncate_dir(self.out_dir) # move_dir(self.out_dir, self.accession, MAGIC_DIR, True) self.page_tab.export() command = "java -jar {jar} -s {server} -u {user} -p {password} -i {page_tab} {files}". \ format(jar=SUBMISSION_TOOL_PATH, server=SUBMISSION_SERVER, user=SUBMISSION_USERNAME, password=SUBMISSION_PASSWORD, page_tab=self.page_tab_filename, files='') update_owner(self.accession, bst_usr['id']) print(command) out, err = execute_command(command) print(out) print(err) if not skip_remove: execute_command('chmod 777 -R ' + self.out_dir) execute_command('rm -rf ' + self.out_dir)
def main(): error_unload = {} error_load = {} unload_accs = os.listdir(os.path.join(STAGING_DIR, 'unload')) load_accs = os.listdir(os.path.join(STAGING_DIR, 'load')) unload_accs = [acc for acc in unload_accs if acc not in load_accs] for acc in unload_accs: print('unloading:', acc) delete_command = "java -jar %s delete -s %s -u %s -p %s %s" % ( BST_CLI_PATH, SUBMISSION_SERVER, SUBMISSION_USERNAME, SUBMISSION_PASSWORD, acc) # print(delete_command) # print(execute_command(delete_command)) out, err = execute_command(delete_command) if out is None: out = '' if err is None: err = '' j_out = out + err if 'error' in j_out.lower() or 'exception' in j_out.lower(): error_unload[acc] = j_out continue os.remove(os.path.join(STAGING_DIR, 'unload', acc)) # break # os.remove(os.path.join(STAGING_DIR, 'unload', acc)) runner_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'lsf', 'runner.py') if load_accs: # print('loading:', acc) log_f_name = 'DUAL_' + datetime.datetime.utcnow().isoformat() load_command = "export PYTHONPATH=\"${PYTHONPATH}:/nfs/biostudies/pyBiostudies\"; export " \ "LD_LIBRARY_PATH=/nfs/biostudies/instantclient_12_2:$LD_LIBRARY_PATH;source " \ "/nfs/biostudies/pyBiostudies/virtual_env/bin/activate;python %s -f -sync -acc %s -lg %s" % ( runner_path, ' '.join(load_accs), log_f_name) print(load_command) print(execute_command(load_command)) log_path = os.path.join(settings.LOG, log_f_name + ".csv") if os.path.exists(log_path): lg_file = open(log_path, 'r') log = lg_file.read() lg_file.close() lines = log.split('\n') new_lines = [] new_line = '' for i in range(len(lines)): if lines[i].startswith('E-'): if new_line: new_lines.append(new_line) new_line = lines[i] else: new_line += lines[i] for l in new_lines: cells = l.split('\t') acc = cells[0] err = cells[-1] if err.lower() != 'loaded': error_load[acc] = err email = "" if error_unload: email = "There's been error unloading the following from Biostudies:\n" for k, v in error_unload.items(): email += '%s: %s\n' % (k, v) email += '\n\n' if error_load: email = "There's been error loading the following into Biostudies:\n" for k, v in error_load.items(): email += '%s: %s\n' % (k, v) if email: send_email(from_email="PyBiostudies<pybiostudies.ebi.ac.uk", to_emails=['*****@*****.**'], subject="Biiostudies Dual Load Error", body=email) for acc in load_accs: if acc not in error_load.keys(): os.remove(os.path.join(STAGING_DIR, 'load', acc))