def start(): check_output('%s qdisc add dev %s root handle %i: htb default %i'\ % (TC, args.interface, ROOT_Q_HANDLE, DEFAULT_CLASS)) # make a default class for normal traffic check_output('%s class replace dev %s parent %i: classid %i:%i htb rate 1000mbit ceil 1000mbit'\ % (TC, args.interface, ROOT_Q_HANDLE, ROOT_Q_HANDLE, DEFAULT_CLASS))
def install_filters(links_file): with open(links_file, 'r') as linksf: for line in strip_comments(linksf): elems = line.split(' ') check_output('%s update %s %s -c %i'\ % (TC_SETUP, elems[0], elems[2], int(elems[1].split('link')[1]))) linksf.closed
def start_network(): if network_running(): logging.getLogger(__name__).info("Some network components already running...") stop_network() logging.getLogger(__name__).info("Starting simulated network...") # Create fake NICs logging.getLogger(__name__).info("Creating network interfaces...") autogen_click_conf(get_topo_file("servers"), get_topo_file("clients"), get_topo_file("dns")) if os.path.isfile(CLICK): run_bg("%s %s" % (CLICK, CLICK_CONF)) else: run_bg("%s %s" % (CLICK_LOCAL, CLICK_CONF)) # Set up traffic shaping logging.getLogger(__name__).info("Enabling traffic shaping...") try: check_output("%s start" % TC_SETUP) install_filters(get_topo_file("bottlenecks")) except Exception as e: logging.getLogger(__name__).error(e) # Launch apache instances logging.getLogger(__name__).info("Configuring apache...") try: configure_apache(get_server_ip_list()) restart_apache() except Exception as e: logging.getLogger(__name__).error(e) logging.getLogger(__name__).info("Network started.")
def start_network(): if network_running(): logging.getLogger(__name__).info('Some network components already running...') stop_network() logging.getLogger(__name__).info('Starting simulated network...') # Create fake NICs logging.getLogger(__name__).info('Creating network interfaces...') autogen_click_conf(get_topo_file('servers'), get_topo_file('clients'), get_topo_file('dns')) run_bg('%s %s' % (CLICK, CLICK_CONF)) # Set up traffic shaping logging.getLogger(__name__).info('Enabling traffic shaping...') try: check_output('%s start' % TC_SETUP) install_filters(get_topo_file('bottlenecks')) except Exception as e: logging.getLogger(__name__).error(e) # Launch apache instances logging.getLogger(__name__).info('Configuring apache...') try: configure_apache(get_server_ip_list()) restart_apache() except Exception as e: logging.getLogger(__name__).error(e) logging.getLogger(__name__).info('Network started.')
def stop_network(): logging.getLogger(__name__).info('Stopping simulated network...') # stop apache instances logging.getLogger(__name__).info('Stopping apache...') try: reset_apache(get_server_ip_list()) restart_apache() except Exception as e: logging.getLogger(__name__).error(e) # Stop traffic shaping logging.getLogger(__name__).info('Disabling traffic shaping...') try: check_output('%s stop' % TC_SETUP) except Exception as e: logging.getLogger(__name__).error(e) # Destroy fake NICs logging.getLogger(__name__).info('Destroying network interfaces...') try: check_both('killall -9 click', shouldPrint=False) time.sleep(0.1) except: pass logging.getLogger(__name__).info('Network stopped.')
def run_events(self, events_file=None, bg=False): cmd = '%s %s run' % (NETSIM, self.topo_dir) if events_file: cmd += ' -e %s' % events_file if bg: run_bg(cmd) else: check_output(cmd)
def execute_event(event): logging.getLogger(__name__).info("Updating link: %s" % " ".join(event)) try: check_output("%s update -c %i -b %s -l %s" % (TC_SETUP, int(event[1].split("link")[1]), event[2], event[3])) if args.log: with open(args.log, "a") as logfile: logfile.write("%f %s %s %s\n" % (time.time(), event[1], bw_to_kbps(event[2]), lat_to_ms(event[3]))) logfile.closed except Exception as e: logging.getLogger(__name__).error(e)
def pdb_to_top_and_crds(force_field, pdb, basename, solvent_buffer=10.0): """ Creates CHARMM .coor and .psf file for NAMD simulation. """ solv_dir = basename + '.solvate' save_dir = os.getcwd() pdb = os.path.abspath(pdb) util.goto_dir(solv_dir) # Remove all but protein heavy atoms in a single clean conformation stripped_pdb = basename + '.clean.pdb' pdbtext.clean_pdb(pdb, stripped_pdb) # Make input script for psfgen psfgen_psf = basename+'.psfgen.psf' psfgen_pdb = basename+'.psfgen.pdb' script = module_load_script script += make_chain_loading_script(stripped_pdb, basename) script += make_disulfide_script(stripped_pdb) script += write_script script = script % { # load the included CHARMM2 atom topologies 'topology': os.path.join(data.data_dir, 'charmm22.topology'), 'out_pdb': psfgen_pdb, 'out_psf': psfgen_psf } psfgen_in = basename+".psfgen.in" open(psfgen_in, "w").write(script) data.binary('psfgen', psfgen_in, basename+'.psfgen') util.check_output(psfgen_psf) util.check_output(psfgen_pdb) solvate_psf(psfgen_psf, psfgen_pdb, basename, solvent_buffer) psf = basename+'.psf' coor = basename+'.coor' pdb = basename+'.pdb' os.rename(pdb, coor) convert_restart_to_pdb(basename, pdb) shutil.copy(psf, save_dir) shutil.copy(coor, save_dir) shutil.copy(pdb, save_dir) os.chdir(save_dir) return psf, coor
def read_top(top): """ Returns a list of (mass, charge, chain_id) for the atoms in the topology file. """ util.check_output(top) lines = open(top).readlines() atoms = [] is_chain_topologies = False chain=" " top_dir = os.path.dirname(top) for l in lines: if not is_chain_topologies: if 'chain topologies' in l: is_chain_topologies = True continue if l.startswith("#include"): itp = l.split()[1][1:-1] itp = os.path.join(top_dir, itp) if os.path.isfile(itp): full_chain_name = os.path.splitext(itp)[0] chain = full_chain_name.split('_')[-1] these_atoms = read_top(itp, chain) atoms.extend(these_atoms) if l.startswith(";"): break is_atoms = False qtot = None for l in lines: if not is_atoms: if '[ atoms ]' in l: is_atoms = True continue if l.startswith('['): break if l.startswith(";"): continue if not l.strip(): continue words = l.split() n = int(words[0]) res_num = int(words[2]) res_type = words[3] q = float(words[6]) mass = float(words[7]) atoms.append((mass, q, chain)) return atoms
def update(): # Figure out which traffic class we're updating if args.traffic_class: traffic_class = args.traffic_class elif args.ip_pair: traffic_class = class_for_ip_pair(args.ip_pair) else: traffic_class = DEFAULT_CLASS # Update the queues for the traffic class with the new BW/latency check_output('%s class replace dev %s parent %i: classid %i:%i htb rate %s ceil %s'\ % (TC, args.interface, ROOT_Q_HANDLE, ROOT_Q_HANDLE, traffic_class,\ args.bandwidth, args.bandwidth)) check_output('%s qdisc replace dev %s parent %i:%i handle %i: netem delay %s'\ % (TC, args.interface, ROOT_Q_HANDLE, traffic_class, traffic_class,\ args.latency)) # Update the rules mapping IP address pairs to the traffic class if args.ip_pair: U32='%s filter replace dev %s protocol ip parent %i: prio 1 u32'\ % (TC, args.interface, ROOT_Q_HANDLE) check_output('%s match ip dst %s match ip src %s flowid %i:%i' % (U32, args.ip_pair[0], args.ip_pair[1], ROOT_Q_HANDLE, traffic_class)) check_output('%s match ip dst %s match ip src %s flowid %i:%i' % (U32, args.ip_pair[1], args.ip_pair[0], ROOT_Q_HANDLE, traffic_class))
def solvate_psf(in_psf, in_pdb, basename, solvent_buffer=10.0): """ Uses VMD to add explicit waters to a .psf topology file """ parms = { 'in_psf': in_psf, 'in_pdb': in_pdb, 'name': basename, 'solvent_buffer': solvent_buffer, } tcl = basename + '.vmd.tcl' open(tcl, 'w').write(solvate_vmd_script % parms) data.binary('vmd', '-dispdev text -eofexit', basename+'.vmd', tcl) util.check_output(basename+'.vmd.pdb') util.check_output(basename+'.pdb')
def _regenerate_dependency_cache(self): """Regenerate the Maven dependencies for this project. This normally happens when the cached dependencies are determined to be stale.""" # Bootstrap with our skeleton environment shutil.copytree(os.path.join(os.path.dirname(__file__), "skeleton"), self.__cached_project_root) # Use skeleton environment Maven to copy dependencies into output dir cmd = ". %s; which mvn" % os.path.join(self.__cached_project_root, "environment.source") logger.info("Detecting environment mvn via `%s`", cmd) env_mvn = util.check_output(cmd, shell=True, cwd=self.__cached_project_root) if env_mvn.endswith("\n"): env_mvn = env_mvn[:-1] env_mvn = env_mvn + " " + self.__maven_flags cached_m2_repo = os.path.join(self.__cached_project_root, Packager._MAVEN_REL_ROOT) settings_xml = os.path.join(self.__cached_project_root, "settings.xml") # copy-dependencies copy_deps_flags = "" if self.__maven_repo is not None: copy_deps_flags += "-Dmaven.repo.local=%s" % self.__maven_repo quiet_flag = "-q" if self.__verbose: quiet_flag = "" cmd = """%s --settings %s %s dependency:copy-dependencies -Dmdep.useRepositoryLayout=true -Dmdep.copyPom -DoutputDirectory=%s %s""" cmd = cmd % (env_mvn, settings_xml, quiet_flag, cached_m2_repo, copy_deps_flags) Packager.__shell(cmd, self.__project_root) # mvn test without running tests cmd = """%s --settings %s %s -Dmaven.repo.local=%s -Dmaven.artifact.threads=100 surefire:test -DskipTests""" cmd = cmd % (env_mvn, settings_xml, quiet_flag, cached_m2_repo) Packager.__shell(cmd, self.__project_root)
def run(in_parms): """ Runs a NAMD simulation using the PDBREMIX in_parms dictionary. """ parms = copy.deepcopy(in_parms) name = parms['output_basename'] # load the included CHARMM2 energy parameters parms['parameter'] = os.path.join(data.data_dir, 'charmm22.parameter') parms['psf_type'] = 'paraTypeCharmm on' # copy over input xsc and topology files (same basename) xsc = parms['topology'].replace('.psf', '.xsc') if os.path.isfile(xsc): shutil.copy(xsc, name + '.in.xsc') parms['xsc'] = name + '.in.xsc' else: parms['xsc'] = '' shutil.copy(parms['topology'], name + '.psf') parms['topology'] = name + '.psf' # copy over coordinates shutil.copy(parms['input_crds'], name + '.in.coor') parms['input_crds'] = name + '.in.coor' # copy over velocities if 'input_vels' in parms and parms['input_vels']: shutil.copy(parms['input_vels'], name + '.in.vel') parms['input_vels'] = name + '.in.vel' else: parms['input_vels'] = '' # copy over restraint coordinates if 'restraint_pdb' in parms and parms['restraint_pdb']: shutil.copy(parms['restraint_pdb'], name + '.restraint.coor') parms['restraint_pdb'] = name + '.restraint.coor' else: parms['restraint_pdb'] = '' namd_in = name + ".namd2.in" open(namd_in, "w").write(make_namd_input_file(parms)) data.binary('namd2', namd_in, name + '.namd2') top, crds, vels = get_restart_files(name) util.check_output(top) util.check_output(crds)
def execute(self): try: self._output = util.check_output(self._command_line(), shell=True) self.reset() except subprocess.CalledProcessError as e: print e return False return True
def soup_from_top_gro(top, gro, skip_solvent=False): """ Returns a Soup built from GROMACS restart files. If skip_solvent=True, will skip all solvent molecules. """ util.check_output(top) util.check_output(gro) soup = pdbatoms.Soup() soup.remaining_text = "" soup.n_remaining_text = 0 atoms = [] # Read from .gro because .top does not contain water # residue information, which is "inferred" lines = open(gro, 'r').readlines() for i_line, line in enumerate(lines[2:-1]): atom = AtomFromGroLine(line) if skip_solvent and atom.res_type == "SOL": soup.remaining_text = "".join(lines[i_line+2:-1]) soup.n_remaining_text = len(lines[i_line+2:-1]) break atoms.append(atom) soup.box = [float(w) for w in lines[-1].split()] for atom, (mass, q, chain_id) in zip(atoms, read_top(top)): atom.mass = mass atom.charge = q curr_res_num = -1 for a in atoms: if curr_res_num != a.res_num: res = pdbatoms.Residue( a.res_type, a.chain_id, a.res_num) soup.append_residue(res.copy()) curr_res_num = a.res_num soup.insert_atom(-1, a) convert_to_pdb_atom_names(soup) protein.find_chains(soup) return soup
def link_jars(libs, directory): makedirs(directory) cp = check_output(['buck', 'audit', 'classpath'] + libs) for j in cp.strip().splitlines(): if j not in jars: jars.add(j) n = path.basename(j) if j.startswith('buck-out/gen/gerrit-'): n = j.split('/')[2] + '-' + n symlink(path.join(root, j), path.join(directory, n))
def make_restraint_script(pdb, force=100.0): """ Generates sander input fragment that specifies the atoms that will be restrained. The function reads a PDB file that was generated from the topology functions above, and uses the B-factor field B>0 to determine which atom is to be restrained. The atoms will be restrained by a spring of force in kcal/mol/angs**2 """ util.check_output(pdb) script = "Restrained atoms from %s\n" % pdb script += "%s\n" % force script += restraint_script for i, atom in enumerate(pdbatoms.read_pdb(pdb)): if atom.bfactor > 0.0: script += "ATOM %d %d\n" % (i+1, i+1) script += "END\n" script += "END\n" return script
def show(): print '=============== Queue Disciplines ===============' check_output('%s -s qdisc show dev %s' % (TC, args.interface)) print '\n================ Traffic Classes ================' check_output('%s -s class show dev %s' % (TC, args.interface)) print '\n==================== Filters ====================' check_output('%s -s filter show dev %s' % (TC, args.interface))
def build_from_project(project_root): retcode = subprocess.call("git show-ref --quiet", shell=True, cwd=project_root) if retcode != 0: raise Exception("Directory %s is not a git repository" % project_root) grind_git_hash = util.check_output("git show-ref --head -s HEAD", shell=True, cwd=os.path.dirname(__file__)) if grind_git_hash.endswith("\n"): grind_git_hash = grind_git_hash[:-1] git_hash = util.check_output("git show-ref --head -s HEAD", shell=True, cwd=project_root) if git_hash.endswith("\n"): git_hash = git_hash[:-1] git_branch = "(no branch)" try: git_branch = util.check_output("git symbolic-ref HEAD", shell=True, cwd=project_root) except: # Ignore an error here, can happen if we're on a detached HEAD pass if git_branch.endswith("\n"): git_branch = git_branch[:-1] # Hash the deps file to look for changes deps_checksum = None return Manifest(grind_git_hash, os.path.normpath(project_root), git_branch, git_hash, datetime.datetime.now(), deps_checksum)
def disulfide_script_and_rename_cysteines(in_pdb, out_pdb): """ Returns the tleap script for disulfide bonds in the in_pdb file. This function opens in_pdb in a Soup, and searches for CYS residues where the SG-SG distance < 3 angs. These residues are then renamed to CYX and written to out_pdb. The disulfide bonds are then returned in a .tleap script fragment. """ soup = pdbatoms.Soup(in_pdb) script = " # disulfide bonds\n" n = len(soup.residues()) for i in range(n): for j in range(i+1, n): if soup.residue(i).type in 'CYS' and soup.residue(j).type in 'CYS': p1 = soup.residue(i).atom('SG').pos p2 = soup.residue(j).atom('SG').pos if v3.distance(p1, p2) < 3.0: soup.residue(i).set_type('CYX') soup.residue(j).set_type('CYX') script += "bond pdb.%d.SG pdb.%d.SG\n" % (i+1, j+1) soup.write_pdb(out_pdb) util.check_output(out_pdb) return script
def get_rev(repo, rev): validate_rev(rev) return check_output(["git", "-C", repo, "rev-parse", rev], stdin=DEVNULL).decode(PREFERREDENCODING).strip()
cmd = mvn(args.a) + [ 'deploy:deploy-file', '-DrepositoryId=%s' % args.repository, '-Durl=%s' % args.url, ] + common else: print("unknown action -a %s" % args.a, file=stderr) exit(1) for spec in args.s: artifact, packaging_type, src = spec.split(':') cmds = cmd + [ '-DartifactId=%s' % artifact, '-Dpackaging=%s' % packaging_type, '-Dfile=%s' % src, ] try: check_output(cmds) except Exception as e: cmds_str = ' '.join(pipes.quote(c) for c in cmds) print("%s command failed: `%s`: %s" % (args.a, cmds_str, e), file=stderr) exit(1) with open(args.o, 'w') as fd: if args.repository: print('Repository: %s' % args.repository, file=fd) if args.url: print('URL: %s' % args.url, file=fd) print('Version: %s' % args.v, file=fd)
def install_filters(links_file): with open(links_file, "r") as linksf: for line in strip_comments(linksf): elems = line.split(" ") check_output("%s update %s %s -c %i" % (TC_SETUP, elems[0], elems[2], int(elems[1].split("link")[1]))) linksf.closed
def restart_apache_script(script): check_output('%s restart' % script, shouldPrint=False)
def restart_apache_binary(bin): check_output('%s -k restart' % bin, shouldPrint=True)
jar = ['-Dpackaging=jar'] src = ['-Dpackaging=java-source'] cmd = { 'deploy': ['mvn', 'deploy:deploy-file', '-DrepositoryId=gerrit-api-repository', '-Durl=%s' % URL], 'install': ['mvn', 'install:install-file'], } try: check_output(cmd[action] + plugin + common + jar + ['-Dfile=%s' % plugin_jar]) check_output(cmd[action] + plugin + common + src + ['-Dfile=%s' % plugin_src]) check_output(cmd[action] + extension + common + jar + ['-Dfile=%s' % extension_jar]) check_output(cmd[action] + extension + common +
def run(in_parms): """ Run a GROMACS simulations using the PDBREMIX parms dictionary. """ parms = copy.deepcopy(in_parms) basename = parms['output_basename'] # Copies across topology and related *.itp files, with appropriate # filename renaming in #includes top = basename + '.top' in_top = parms['topology'] shutil.copy(in_top, top) in_name = os.path.basename(in_top).replace('.top', '') in_dir = os.path.dirname(in_top) file_tag = "%s/%s_*itp" % (in_dir, in_name) new_files = [top] for f in glob.glob(file_tag): new_f = os.path.basename(f) new_f = new_f.replace(in_name, basename) shutil.copy(f, new_f) new_files.append(new_f) for f in new_files: replace_include_file(f, in_name + "_", basename + "_") # Copy over input coordinates/velocities in_gro = basename + '.in.gro' shutil.copy(parms['input_crds'], in_gro) # Generates a postiional-restraint topology file if parms['restraint_pdb']: # 1kcal*mol*A**-2 = 4.184 kJ*mol*(0.1 nm)**-2 kcalmolang2_to_kJmolnm2 = 400.184 open(basename + '_posre.itp', 'w').write( make_restraint_itp( parms['restraint_pdb'], parms['restraint_force'] * kcalmolang2_to_kJmolnm2)) # Generate .mdp file based on parms in_mdp = basename + '.grompp.mdp' open(in_mdp, 'w').write(make_mdp(parms)) # Now run .grompp to generate this .tpr file tpr = basename + '.tpr' # .mdp to save complete set of parameters mdp = basename + '.mdrun.mdp' data.binary( 'grompp', '-f %s -po %s -c %s -p %s -o %s' \ % (in_mdp, mdp, in_gro, top, tpr), basename + '.grompp') util.check_files(tpr) # Run simulation with the .tpr file data.binary( 'mdrun', '-v -deffnm %s' % (basename), basename + '.mdrun') top, crds, vels = get_restart_files(basename) util.check_output(top) util.check_output(crds) # Cleanup delete_backup_files(basename)
def run(in_parms): """ Run a AMBER simulations using the PDBREMIX in_parms dictionary. """ parms = copy.deepcopy(in_parms) basename = parms['output_basename'] # Copies across topology file input_top = parms['topology'] util.check_files(input_top) new_top = basename + '.top' shutil.copy(input_top, new_top) # Copies over coordinate/velocity files input_crd = parms['input_crds'] util.check_files(input_crd) if input_crd.endswith('.crd'): new_crd = basename + '.in.crd' else: new_crd = basename + '.in.rst' shutil.copy(input_crd, new_crd) # Decide on type of output coordinate/velocity file if 'n_step_minimization' in parms: rst = basename + ".crd" else: rst = basename + ".rst" # Construct the long list of arguments for sander trj = basename + ".trj" vel_trj = basename + ".vel.trj" ene = basename + ".ene" inf = basename + ".inf" sander_out = basename + ".sander.out" sander_in = basename + ".sander.in" args = "-O -i %s -o %s -p %s -c %s -r %s -x %s -v %s -e %s -inf %s" \ % (sander_in, sander_out, new_top, new_crd, rst, trj, vel_trj, ene, inf) # Make the input script script = make_sander_input_file(parms) # If positional restraints if parms['restraint_pdb']: # Generate the AMBER .crd file that stores the constrained coordinates pdb = parms['restraint_pdb'] soup = pdbatoms.Soup(pdb) ref_crd = basename + '.restraint.crd' write_soup_to_rst(soup, ref_crd) util.check_output(ref_crd) # Add the restraints .crd to the SANDER arguments args += " -ref %s" % ref_crd # Add the restraint forces and atom indices to the SANDER input file script += make_restraint_script(pdb, parms['restraint_force']) open(sander_in, "w").write(script) # Run the simulation data.binary('sander', args, basename) # Check if output is okay util.check_output(sander_out, ['FATAL']) top, crds, vels = get_restart_files(basename) util.check_output(top) util.check_output(crds)
def run_tleap( force_field, pdb, name, solvent_buffer=0.0, excess_charge=0): """ Generates AMBER topology and coordinate files from PDB. Depending on whether excess_charge is non-zero, will also generate counterions. If solvent_buffer is non-zero, will generate explicit waters, otherwise, no waters generated. No waters is used for implicit solvent simulations. """ util.check_output(pdb) # Remove all but protein heavy atoms in a single clean conformation tleap_pdb = name + '.clean.pdb' pdbtext.clean_pdb(pdb, tleap_pdb) # The restart files to be generated top = name + '.top' crd = name + '.crd' # Dictionary to substitute into tleap scripts params = { 'top': top, 'crd': crd, 'pdb': tleap_pdb, 'data_dir':data.data_dir, 'solvent_buffer': solvent_buffer, } # use best force-field for the 2 versions of AMBER author has tested if 'AMBER11' in force_field: params['amber_ff'] = "leaprc.ff99SB" elif 'AMBER14' in force_field: params['amber_ff'] = "leaprc.ff14SB" elif 'AMBER8' in force_field: params['amber_ff'] = "leaprc.ff96" else: raise Exception("Don't know which version of AMBER(8|11|14) to use.") # make the tleap input script script = force_field_script # check for a few non-standard residue that have been included residues = [r.type for r in pdbatoms.Soup(tleap_pdb).residues()] if 'PHD' in residues: leaprc = open("%s/phd.leaprc" % data.data_dir).read() script += leaprc if 'ZNB' in residues: leaprc = open("%s/znb.leaprc" % data.data_dir).read() script += leaprc script += "pdb = loadpdb %(pdb)s\n" script += disulfide_script_and_rename_cysteines(tleap_pdb, tleap_pdb) if 'GBSA' not in force_field: # Add explicit waters as not GBSA implicit solvent if excess_charge != 0: # Add script to add counterions, must specify + or - if excess_charge > 0: script += "addions pdb Cl- 0\n" else: script += "addions pdb Na+ 0\n" solvent_buffer = 10 params['solvent_buffer'] = solvent_buffer script += explicit_water_box_script script += save_and_quit_script script = script % params # Now write script to input file tleap_in = name + ".tleap.in" open(tleap_in, "w").write(script) # Now run tleap with tleap_in data.binary('tleap', "-f "+tleap_in, name+'.tleap') # Check output is okay if os.path.isfile('leap.log'): os.rename('leap.log', name + '.tleap.log') util.check_output(name+'.tleap.log', ['FATAL']) util.check_output(top) util.check_output(crd) return top, crd
def get_file_contents(repo, rev, path): validate_rev(rev) return check_output(["git", "-C", repo, "show", rev + ":" + path], stdin=DEVNULL)
elif 'deploy' == args.a: cmd = mvn(args.a) + [ 'deploy:deploy-file', '-DrepositoryId=%s' % args.repository, '-Durl=%s' % args.url, ] + common else: print("unknown action -a %s" % args.a, file=stderr) exit(1) for spec in args.s: artifact, packaging_type, src = spec.split(':') cmds = cmd + [ '-DartifactId=%s' % artifact, '-Dpackaging=%s' % packaging_type, '-Dfile=%s' % src, ] try: check_output(cmds) except Exception as e: cmds_str = ' '.join(pipes.quote(c) for c in cmds) print("%s command failed: `%s`: %s" % (args.a, cmds_str, e), file=stderr) exit(1) with open(args.o, 'w') as fd: if args.repository: print('Repository: %s' % args.repository, file=fd) if args.url: print('URL: %s' % args.url, file=fd) print('Version: %s' % args.v, file=fd)