def run_autobahn(): """ Spawn the autobahn test suite in a subprocess """ import os.path cmd = ['wstest -m fuzzingclient -s %s/autobahn.json' % ( os.path.dirname(__file__),)] wstest = Popen(cmd, stderr=PIPE, stdout=PIPE, shell=True) if wstest.wait(): # something went wrong, it's boom time. stdout, stderr = wstest.communicate(None) sys.stderr.write(stderr) sys.stderr.flush() sys.stdout.write(stdout) sys.stderr.flush() raise RuntimeError # parse the generated report to see if we have failures chk = Popen( 'fgrep gevent_websocket reports/clients/index.html | grep Fail', stdout=PIPE, shell=True) stdout, stderr = chk.communicate(None) if stdout: sys.stderr.write('Autobahn test failures:\n' + stdout) raise SystemExit(1)
def test_ia_upload(): # upload from stdin. cmd = ('echo "Hello World!" |' 'ia upload iacli-test-item - --remote-name="stdin.txt" --size-hint=8') proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 cmd = ('echo "Hello World!" |' 'ia upload iacli-test-item -') proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 1 assert stderr == '--remote-name is required when uploading from stdin.\n' # upload file. cmd = 'ia upload iacli-test-item setup.py' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 # upload debug. cmd = 'ia upload iacli-test-item setup.py --debug' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 # upload non-200 status_code. cmd = ('echo "Hello World!" |' 'ia upload iacli-test-item - --remote-name="iacli-test-item_meta.xml"') proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert stderr == 'error "AccessDenied" (403): Access Denied\n' assert proc.returncode == 1
def test_ia_mine(): with open('testlist.txt', 'w') as fp: fp.write('\n'.join(['nasa', 'iacli-test-item'])) cmd = 'ia mine testlist.txt --cache' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 cmd = 'ia mine testlist.txt --output=d.json' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 cmd = 'ia mine testlist.txt' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 # Test ids from stdin. cmd = 'echo "nasa" | ia mine -' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 try: os.remove('testlist.txt') os.remove('d.json') os.remove('nasa_meta.json') os.remove('iacli-test-item_meta.json') except OSError: pass
def save_weather_data( location, filename ): if location == NOT_FOUND_LOCATION: location_not_found = True location = DEFAULT_LOCATION else: location_not_found = False p = Popen( [ WEGO, '-location=%s' % location ], stdout=PIPE, stderr=PIPE ) stdout, stderr = p.communicate() if p.returncode != 0: error( stdout + stderr ) dirname = os.path.dirname( filename ) if not os.path.exists( dirname ): os.makedirs( dirname ) if location_not_found: stdout += NOT_FOUND_MESSAGE open( filename, 'w' ).write( stdout ) p = Popen( [ "bash", ANSI2HTML, "--palette=solarized", "--bg=dark" ], stdin=PIPE, stdout=PIPE, stderr=PIPE ) stdout, stderr = p.communicate( stdout ) if p.returncode != 0: error( stdout + stderr ) open( filename+'.html', 'w' ).write( stdout )
def logs_downloader(logs_queued, recipient): # Build tar command with tar command and logs sent by client archive_path = '%ssrc/rockstor/logs/' % settings.ROOT_DIR archive_file = 'requested_logs.tgz' # If log download requested by Log Reader serve a personalized tgz # file with log file name if (recipient == 'reader_response'): archive_file = '%s.tgz' % logs_queued[0] archive_path += archive_file download_command = [] download_command.extend(self.tar_utility) download_command.append(archive_path) # Get every log location via logs dictionary for log in logs_queued: download_command.append(self.build_log_path(log)) # Build download archive download_process = Popen(download_command, bufsize=1, stdout=PIPE) download_process.communicate() # Return ready state for logs archive download specifying recipient # (logManager or LogDownloader) self.emit('logsdownload', { 'key': 'logManager:logsdownload', 'data': { 'archive_name': '/logs/%s' % archive_file, 'recipient': recipient } })
def test_ia_metadata_exists(): nasa_files = [ 'NASAarchiveLogo.jpg', 'globe_west_540.jpg', 'nasa_reviews.xml', 'nasa_meta.xml', 'nasa_archive.torrent', 'nasa_files.xml' ] cmd = 'ia ls nasa' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() output = [x.strip() for x in stdout.split('\n')] assert all(f in output for f in nasa_files) assert proc.returncode == 0 cmd = 'ia ls nasa --glob="*torrent"' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert stdout == 'nasa_archive.torrent\r\n' assert proc.returncode == 0 cmd = 'ia ls nasa --all --verbose' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 cmd = 'ia ls nasa --location' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0
def run(self, command_line, split_lines, pipe_as_input): logging.info("command line: %s", command_line) if pipe_as_input is None: process = Popen(shlex.split(command_line), \ stdout=PIPE, stderr=PIPE) (output, error_out) = process.communicate() self.exit_code = process.wait() else: process = Popen(shlex.split(command_line), \ stdin=PIPE, stdout=PIPE, stderr=PIPE) (output, error_out) = process.communicate( input=pipe_as_input.encode("utf-8")) self.exit_code = process.wait() if split_lines: self.lines = output.splitlines() else: self.output = output.decode("utf-8") self.error_out = error_out return self.exit_code
def get_moon(location, html=False, lang=None): date = None if '@' in location: date = location[location.index('@') + 1:] location = location[:location.index('@')] cmd = [PYPHOON] if date: try: dateutil.parser.parse(date) except: pass else: cmd += [date] env = os.environ.copy() if lang: env['LANG'] = lang print cmd p = Popen(cmd, stdout=PIPE, stderr=PIPE, env=env) stdout = p.communicate()[0] if html: p = Popen(["bash", ANSI2HTML, "--palette=solarized", "--bg=dark"], stdin=PIPE, stdout=PIPE, stderr=PIPE) stdout, stderr = p.communicate(stdout) if p.returncode != 0: error(stdout + stderr) return stdout
def test_ia_upload(): # upload from stdin. cmd = ( 'echo "Hello World!" |' 'ia upload iacli-test-item - --remote-name="stdin.txt" --size-hint=8') proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 cmd = ('echo "Hello World!" |' 'ia upload iacli-test-item -') proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 1 assert stderr == '--remote-name is required when uploading from stdin.\n' # upload file. cmd = 'ia upload iacli-test-item setup.py' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 # upload debug. cmd = 'ia upload iacli-test-item setup.py --debug' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 # upload non-200 status_code. cmd = ( 'echo "Hello World!" |' 'ia upload iacli-test-item - --remote-name="iacli-test-item_meta.xml"') proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert stderr == 'error "AccessDenied" (403): Access Denied\n' assert proc.returncode == 1
def test_ia_metadata_exists(): nasa_files = ['NASAarchiveLogo.jpg', 'globe_west_540.jpg', 'nasa_reviews.xml', 'nasa_meta.xml', 'nasa_archive.torrent', 'nasa_files.xml'] cmd = 'ia ls nasa' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() output = [x.strip() for x in stdout.split('\n')] assert all(f in output for f in nasa_files) assert proc.returncode == 0 cmd = 'ia ls nasa --glob="*torrent"' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert stdout == 'nasa_archive.torrent\r\n' assert proc.returncode == 0 cmd = 'ia ls nasa --all --verbose' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 cmd = 'ia ls nasa --location' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0
def save_weather_data( location, filename ): if location == NOT_FOUND_LOCATION: location_not_found = True location = DEFAULT_LOCATION else: location_not_found = False p = Popen( [ WEGO, '--city=%s' % location ], stdout=PIPE, stderr=PIPE ) stdout, stderr = p.communicate() if p.returncode != 0: error( stdout + stderr ) dirname = os.path.dirname( filename ) if not os.path.exists( dirname ): os.makedirs( dirname ) if location_not_found: stdout += NOT_FOUND_MESSAGE open( filename, 'w' ).write( stdout ) p = Popen( [ "bash", ANSI2HTML, "--palette=solarized", "--bg=dark" ], stdin=PIPE, stdout=PIPE, stderr=PIPE ) stdout, stderr = p.communicate( stdout ) if p.returncode != 0: error( stdout + stderr ) open( filename+'.html', 'w' ).write( stdout )
def logs_downloader(logs_queued, recipient): # Build tar command with tar command and logs sent by client archive_path = "%ssrc/rockstor/logs/" % settings.ROOT_DIR archive_file = "requested_logs.tgz" # If log download requested by Log Reader serve a personalized tgz # file with log file name if recipient == "reader_response": archive_file = "%s.tgz" % logs_queued[0] archive_path += archive_file download_command = [] download_command.extend(self.tar_utility) download_command.append(archive_path) # Get every log location via logs dictionary for log in logs_queued: download_command.append(self.build_log_path(log)) # Build download archive download_process = Popen(download_command, bufsize=1, stdout=PIPE) download_process.communicate() # Return ready state for logs archive download specifying recipient # (logManager or LogDownloader) self.emit( "logsdownload", { "key": "logManager:logsdownload", "data": { "archive_name": "/logs/%s" % archive_file, "recipient": recipient, }, }, )
def logs_downloader(logs_queued, recipient): # Build tar command with tar command and logs sent by client archive_path = '%ssrc/rockstor/logs/' % settings.ROOT_DIR archive_file = 'requested_logs.tgz' # If log download requested by Log Reader serve a personalized tgz # file with log file name if (recipient == 'reader_response'): archive_file = '%s.tgz' % logs_queued[0] archive_path += archive_file download_command = [] download_command.extend(self.tar_utility) download_command.append(archive_path) # Get every log location via logs dictionary for log in logs_queued: download_command.append(self.build_log_path(log)) # Build download archive download_process = Popen(download_command, bufsize=1, stdout=PIPE) download_process.communicate() # Return ready state for logs archive download specifying recipient # (logManager or LogDownloader) self.emit( 'logsdownload', { 'key': 'logManager:logsdownload', 'data': { 'archive_name': '/logs/%s' % archive_file, 'recipient': recipient } })
def test_invoke(): # Run a subprocess through Popen to make sure # libev is handling SIGCHLD. This could *probably* be simplified to use # just hub.loop.install_sigchld p = Popen("true", stdout=PIPE, stderr=PIPE) gevent.sleep(0) p.communicate() gevent.sleep(0)
def test_ia_metadata_exists(): cmd = 'ia metadata --exists iacli_test-doesnotexist' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 1 cmd = 'ia metadata --exists nasa' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0
def test_ia_search(): cmd = 'ia search iacli-test-item --sort=date:asc' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 cmd = 'ia search "identifier:iacli-test-item" --number-found' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert stdout == '1\n' assert proc.returncode == 0
def bot_start_worker(): global botx_subp sub = Popen([ '/Users/timesking/Projects/PlanX/ServerX/src/vcs.taiyouxi.net/botx/botx mbot -s 0 replay' ], stdout=PIPE, shell=True) botx_subp = sub # for l in sub.stdout.readline(): # logger.info(l) logger.info("bot start") sub.communicate()
def test_ia_metadata_modify(): # Modify test item. valid_key = "foo-{k}".format(k=int(time())) cmd = 'ia metadata --modify="{k}:test_value" iacli_test_item'.format(k=valid_key) proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 # Submit illegal modification. cmd = 'ia metadata --modify="-foo:test_value" iacli_test_item' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 1 assert "Illegal tag name" in stderr
def analyze(cls, pcap_path, name=None, description=None, keep_pcap=True): """ Given a PCAP path on disk; analyze that pcap with Zeek storing the results in a directory deterministically identified by hashing the pcap file. :param pcap_path: The path to the pcap file on disk :param name: The name of the pcap (short descriptor) :param description: A long description for the pcap :param keep_pcap: If True, we'll save a copy of the pcap to disk after analysis :return: A ZeekReplay instance """ if name: name = re.sub("[^0-9a-zA-Z]+", "", name)[0:64] if description: description = description[0:1024] environment_variables = utilities.get_environment_file_dict() install_directory = environment_variables.get('ZEEK_HOME') scripts_directory = environment_variables.get('ZEEK_SCRIPTS') pcap_replay_id = utilities.get_filepath_md5_hash(pcap_path) replay_session = os.path.join(REPLAY_ROOT, str(pcap_replay_id)) utilities.makedirs(replay_session) zeek_bin_path = os.path.join(install_directory, 'bin', 'zeek') zeek_scripts_config = config.ScriptConfigManager(scripts_directory) command = 'cd {}; {} -r {} {} -C'.format( replay_session, zeek_bin_path, pcap_path, ' '.join(zeek_scripts_config.list_enabled_scripts())) child = Popen(command, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True) child.communicate() # Write our metadata to disk with open(os.path.join(replay_session, '.metadata'), 'a') as meta_f: meta_f.write( json.dumps({ 'time': time.time(), 'name': name, 'description': description }) + '\n') # Copy over the pcap if we want to keep it. if keep_pcap: shutil.copy(pcap_path, os.path.join(replay_session, pcap_replay_id + '.pcap')) return cls(pcap_replay_id)
def test_ia_metadata_modify(): # Modify test item. valid_key = "foo-{k}".format(k=int(time())) cmd = 'ia metadata --modify="{k}:test_value" iacli_test_item'.format( k=valid_key) proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 0 # Submit illegal modification. cmd = 'ia metadata --modify="-foo:test_value" iacli_test_item' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() assert proc.returncode == 1 assert "Illegal tag name" in stderr
def copy_untar_ref_db(source_file, destination, logger): """ Copies and uncompresses gzipped tar file containing reference database to destination. """ workdir = os.path.dirname(destination) tar_call = [utils.resolve_executable("tar"), "-xf", source_file] if source_file.lower().endswith((".tar.gz", ".tar", ".tgz")): logger.info("It appears reference DB '%s' is in tar/gz format", source_file) logger.info("Extracting database tarball...") shutil.copy(source_file, destination) tar = Popen(tar_call, stdout=PIPE, stderr=PIPE, cwd=workdir) logger.debug("tar call:{}".format(tar_call)) tar_stream_data = tar.communicate() if tar.returncode is not 0: logger.error("tar returncode {}".format(tar.returncode)) logger.error( "tar stdout: {}\ntar stderr: {}".format(tar_stream_data)) raise PipelineError("tar exited with return code {}".format( tar.returncode)) else: logger.info("Untar of reference DB successful.") elif source_file.lower().endswith((".gz")): logger.info("It appears reference DB '%s' is in gz format", source_file) logger.info("Gunzipping database file...") shutil.copy(source_file, destination) gunzip_call = [utils.resolve_executable("gunzip"), source_file] gunzip = Popen(gunzip_call, stdout=PIPE, stderr=PIPE, cwd=workdir) logger.debug("gunzip call:{}".format(tar_call)) gunzip_stream_data = gunzip.communicate() if gunzip.returncode is not 0: logger.error("gunzip returncode {}".format(gunzip.returncode)) logger.error("gunzip stdout: {}\ngunzip stderr: {}".format( gunzip_stream_data)) raise PipelineError("gunzip exited with return code {}".format( gunzip.returncode)) else: logger.info("Gunzip of reference DB successful.") else: logger.error( "Don't know what to do with {}, it does not look like a (gzipped) tar file" .format(source_file)) raise FileFormatError(source_file) return destination
def get_doc_index(document, filepath): words = {} command = 'tesseract ' + filepath + ' -l hrv stdout' #text = subprocess.check_output(command, shell=True) print '>>> ', command sub = Popen([command], stdout=PIPE, shell=True) text, err = sub.communicate() print '>>> ', command, 'DONE' # extract page num from file path match = re.search('.*xl-(\d+).png', filepath) if match: page = int(match.groups()[0]) for word in text.strip().split(): # skip small words if len(word) <= 2: continue if word in words: document_dict = words[word] if document['name'] in document_dict: pages_set = words[word][document['name']] pages_set.add(page) words[word][document['name']] = pages_set else: words[word][document['name']] = set([page]) else: # init word words[word] = {document['name']: set([page])} return len(words)
def link_alive_tcp( link, remote_ip ): """ Returns status of the link. If the link is alive, returns its rtt to the remote_ip. Use this method to check if the link is alive: $ nc -v -s 192.168.0.101 -w 1 1.1.1.1 35501 nc: connect to 1.1.1.1 port 35501 (tcp) timed out: Operation now in progress $ nc -v -s 192.168.0.101 -w 1 5.9.243.189 35501 nc: connect to 5.9.243.189 port 35501 (tcp) failed: Connection refused """ if link[0] in 'el': source_ip = ip_of_interface( link ) if source_ip is None: log("Can't detect IP address of %s" % link) return cmd = ['nc', '-v', '-s', source_ip, '-w', '1', remote_ip, '35501' ] p = Popen(cmd, stdout=PIPE, stderr=STDOUT) stdout = p.communicate()[0] if 'Connection refused' in stdout: return '1' return None
def _get_page(self, topic, request_options=None): cmd = self._get_command(topic, request_options=request_options) if cmd: proc = Popen(cmd, stdout=PIPE, stderr=PIPE) answer = proc.communicate()[0].decode('utf-8') return answer return ""
def _safe_popen(cmd): while True: try: _logger.debug(u'[cmd] ' + cmd) if PY3: cmd_parts = shlex.split(cmd) else: # shlex can not handle unicode in Python 2, # refer to: https://stackoverflow.com/questions/14218992/shlex-split-still-not-supporting-unicode cmd_parts = shlex.split(cmd.encode(locale.getpreferredencoding())) pipe = Popen(cmd_parts, stdout=PIPE, stderr=PIPE, shell=False) data, err = pipe.communicate() # err is a multi-bytes string if pipe.returncode != 0 and not u'W200017'.encode('utf-8') in err: # W200017: svn property not found _logger.error(u'Exec cmd failed: %s, ret: %s\nstdout: %s\nstderr: %s\n', cmd, pipe.returncode, data, err) sys.exit(-1) return data, err except (OSError, IOError) as ex: if ex.errno == 24: # open fd count exceeds system limits sleep(1) # wait for subprocess finished which will increase available fds continue else: raise except Exception as ex: _logger.error(u'Exec cmd failed: %s,\nex: %s', cmd, ex) sys.exit(-1) # make sure any exception crash
def ping(ip, task_id): loss = latency = None p = Popen(['ping', '-c', '3', ip], stdout=PIPE, stderr=PIPE) out, err = p.communicate() out = out.split('\n') for line in out: line = line.rstrip() match = re.match('.* ([0-9]+)% packet loss.*', line) if match: loss = match.group(1) match = re.match('.*([0-9\.]+)/([0-9\.]+)/([0-9\.])+/([0-9\.]+) ms.*', line) if match: latency = match.group(2) ping = Ping() ping.ip = ip ping.task_id = uuid.UUID(task_id) if loss: ping.loss = loss if latency: ping.latency = float(latency) ping.save()
def get_supported_codecs(): encoder = get_encoder_name() command = [encoder, "-codecs"] res = Popen(command, stdout=PIPE, stderr=PIPE) output = res.communicate()[0].decode("utf-8") if res.returncode != 0: return [] if sys.platform == 'win32': output = output.replace("\r", "") rgx = re.compile(r"^([D.][E.][AVS.][I.][L.][S.]) (\w*) +(.*)") decoders = set() encoders = set() for line in output.split('\n'): match = rgx.match(line.strip()) if not match: continue flags, codec, name = match.groups() if flags[0] == 'D': decoders.add(codec) if flags[1] == 'E': encoders.add(codec) return (decoders, encoders)
def encrypt_file(filename, passwd): """ encrypt file and return new file :param filename: :param passwd: :return: """ from gevent.subprocess import Popen, PIPE rc = -1 target_des = get_temp_filename() + '.des' try: cmd = "tar -zcf - %s | openssl des3 -salt -k %s | dd of=%s" % ( filename, passwd, target_des) # cmd_list = cmd.split(' ') # rc = gevent.subprocess.call(cmd_list) sub = Popen([cmd], stdout=PIPE, shell=True) out, err = sub.communicate() if not os.path.exists(target_des): target_des = '' except: traceback.print_exc() target_des = '' return target_des
def _htmlize(ansi_output, title, parsed_query): """Return HTML representation of `ansi_output`. Use `title` as the title of the page. Format page according to query parameters from `parsed_query`.""" cmd = ["bash", ANSI2HTML, "--palette=solarized"] if not parsed_query.get('inverted_colors'): cmd += ["--bg=dark"] proc = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate(ansi_output.encode("utf-8")) stdout = stdout.decode("utf-8") stderr = stderr.decode("utf-8") if proc.returncode != 0: error(stdout + stderr) if parsed_query.get('inverted_colors'): stdout = stdout.replace( '<body class="">', '<body class="" style="background:white;color:#777777">') title = "<title>%s</title>" % title opengraph = _get_opengraph(parsed_query) stdout = re.sub("<head>", "<head>" + title + opengraph, stdout) return stdout
def execute_cmd_with_output(cmd, stdin=None): log.debug("Running command: %r" % cmd) p = Popen(cmd, bufsize=-1, stdout=PIPE, stderr=PIPE, stdin=stdin) (msgs, errs) = p.communicate() if p.returncode != 0: raise Exception('Failed to run command') return (msgs, errs)
def create(self, usernames, passwords, target_iqn, target_lun, size, initiator_iqn_list): # NB: initiator_iqn_list needs to be a comma separated list of initiator iqn strings self.logger.debug("Preparing to execute create()") timeout = Timeout(self.script_timeout) process = Popen(self.scriptfile_path + " -c -q" + " -u " + usernames + " -p " + passwords + " -s " + size + " -m " + target_lun + " -t " + target_iqn + " -i " + initiator_iqn_list, stdout=PIPE, shell=True) output = "Create operation exceeded execution timeout.\n" returncode = 1 timeout.start() try: output = process.communicate()[0] returncode = process.returncode except Timeout: process.kill() self.logger.warn( "Process %s servicing create() " + "exceeded execution timeout and was terminated.", process.pid) if process.returncode is not None: returncode = process.returncode finally: timeout.cancel() return [output, returncode]
def ipv6_supported(): """Checks whether we can support IPv6 on this host. :returns tuple[bool,str]: supported, reason for lack of support or None. """ if not os.path.exists("/proc/sys/net/ipv6"): return False, "/proc/sys/net/ipv6 is missing (IPv6 compiled out?)" try: check_call(["which", "ip6tables"]) except FailedSystemCall: return False, ("ip6tables not installed; Calico IPv6 support requires " "Linux kernel v3.3 or above and ip6tables v1.4.14 or " "above.") try: # Use -C, which checks for a particular rule. We don't expect the rule # to exist but iptables will give us a distinctive error if the # rpfilter module is missing. proc = Popen(["ip6tables", "-C", "FORWARD", "-m", "rpfilter"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() if "Couldn't load match" in err: return False, ( "ip6tables is missing required rpfilter match module; " "Calico IPv6 support requires Linux kernel v3.3 or " "above and ip6tables v1.4.14 or above.") except OSError: return False, "Failed to execute ip6tables" return True, None
def _wego_wrapper(location, parsed_query): lang = parsed_query['lang'] location_name = parsed_query['override_location_name'] cmd = [WEGO, '--city=%s' % location] if parsed_query.get('inverted_colors'): cmd += ['-inverse'] if parsed_query.get('use_ms_for_wind'): cmd += ['-wind_in_ms'] if parsed_query.get('narrow'): cmd += ['-narrow'] if lang and lang in SUPPORTED_LANGS: cmd += ['-lang=%s'%lang] if parsed_query.get('use_imperial', False): cmd += ['-imperial'] if location_name: cmd += ['-location_name', location_name] proc = Popen(cmd, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() stdout = stdout.decode("utf-8") stderr = stderr.decode("utf-8") return stdout, stderr, proc.returncode
def encrypt_file(filename, passwd): """ encrypt file and return new file :param filename: :param passwd: :return: """ rc = -1 target_des = filename + '.des' try: cmd = "tar -zcf - %s | openssl des3x -salt -k %s | dd of=%s" % ( filename, passwd, target_des) cmd_list = cmd.split(' ') sub = Popen([cmd], stdout=PIPE, shell=True) out, err = sub.communicate() print err print sub # print cmd_list # rc = gevent.subprocess.call(cmd_list) except: traceback.print_exc() print 'encrypt_file call return:', rc if rc != 0: target_des = '' return target_des
def html_wrapper(data): p = Popen([ "bash", ANSI2HTML, "--palette=xterm", "--bg=dark" ], stdin=PIPE, stdout=PIPE, stderr=PIPE) data = data.encode('utf-8') stdout, stderr = p.communicate(data) if p.returncode != 0: error(stdout + stderr) return stdout.decode('utf-8')
def try_get_video_name(self, video_url): print "Getting name for %s"%video_url cmd=['youtube-dl', '--get-title',video_url] temp = Popen(cmd, stdout=PIPE) out, err = temp.communicate() print out return out
def ipv6_supported(): """Checks whether we can support IPv6 on this host. :returns tuple[bool,str]: supported, reason for lack of support or None. """ if not os.path.exists("/proc/sys/net/ipv6"): return False, "/proc/sys/net/ipv6 is missing (IPv6 compiled out?)" try: check_call(["which", "ip6tables"]) except FailedSystemCall: return False, ("ip6tables not installed; Calico IPv6 support requires " "Linux kernel v3.3 or above and ip6tables v1.4.14 or " "above.") try: # Use -C, which checks for a particular rule. We don't expect the rule # to exist but iptables will give us a distinctive error if the # rpfilter module is missing. proc = Popen(["ip6tables", "-C", "FORWARD", "-m", "rpfilter"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() if "Couldn't load match" in err: return False, ( "ip6tables is missing required rpfilter match module; " "Calico IPv6 support requires Linux kernel v3.3 or " "above and ip6tables v1.4.14 or above." ) except OSError: return False, "Failed to execute ip6tables" return True, None
def gunzip_copy(source_file, destination, logger): """ Takes a source file and a destination and determines whether to gunzip the file before moving it to the destination. """ if source_file.lower().endswith((".gz")): logger.info("File %s seems gzipped, uncompressing to node...", source_file) # Remove the .gz suffix and redirect stdout to this file destination = destination[:-3] outfile = open(destination, "w") gunzip_call = [utils.resolve_executable("gunzip"), source_file, "-c"] gunzip = Popen(gunzip_call, stdout=outfile, stderr=PIPE) gunzip_stream_data = gunzip.communicate() outfile.close() if gunzip.returncode is not 0: logger.error("Could not gunzip {} to node".format(source_file)) logger.error("gunzip stdout: {}\ngunzip stderr: {}".format( gunzip_stream_data)) raise PipelineError("gunzip exited with return code {}".format( gunzip.returncode)) else: logger.info("Successfully gunzipped %s to node.", source_file) else: # It is probably not compressed (at least not with gzip) try: logger.info("Copying %s to node...", source_file) shutil.copy(source_file, destination) logger.info("Successfully copied %s to node.", source_file) except OSError, message: logger.error("File copy error: %s", message)
def worker(): print 'start' sub = Popen(['sleep 10'], stdout=PIPE, shell=True) # sub = Popen(['top'], stdout=PIPE, shell=True) out, err = sub.communicate() print 'end' return out.rstrip()
def get_moon(parsed_query): location = parsed_query['orig_location'] html = parsed_query['html_output'] lang = parsed_query['lang'] hemisphere = parsed_query['hemisphere'] date = None if '@' in location: date = location[location.index('@') + 1:] location = location[:location.index('@')] cmd = [globals.PYPHOON] if lang: cmd += ["-l", lang] if not hemisphere: cmd += ["-s", "south"] if date: try: dateutil.parser.parse(date) except Exception as e: print("ERROR: %s" % e) else: cmd += [date] p = Popen(cmd, stdout=PIPE, stderr=PIPE) stdout = p.communicate()[0] stdout = stdout.decode("utf-8") if parsed_query.get('no-terminal', False): stdout = globals.remove_ansi(stdout) if html: p = Popen( ["bash", globals.ANSI2HTML, "--palette=solarized", "--bg=dark"], stdin=PIPE, stdout=PIPE, stderr=PIPE) stdout, stderr = p.communicate(stdout.encode("utf-8")) stdout = stdout.decode("utf-8") stderr = stderr.decode("utf-8") if p.returncode != 0: globals.error(stdout + stderr) return stdout
def fetch(url): sys.stdout.write(".") sys.stdout.flush() p = Popen(['curl', '-w', '@curl-format.txt', '-o', '/dev/null', '-s', url], stdout=PIPE) out, err = p.communicate() out = re.sub(r'(\d+),(\d+)', r'\1.\2', out) return json.loads(out)
def ping(ip, number_of_packages, current_order, speed, host, data, requesting_application, requesting_application_name): command = 'ping -i {0} -q -W {1} -c {2} {3}'.format(speed, speed * number_of_packages, number_of_packages, ip) if DEBUG: print(command) sub = Popen([command], stdout=PIPE, stderr=STDOUT, shell=True) out, err = sub.communicate() data[current_order] = [ip, out, time.time(), host, command, requesting_application, requesting_application_name]
def _download_by_curl(self, method, url, headers, file_path): header_str = ' '.join([ "%s: %s" % (k, v) for k,v in headers.items() ]) request_cmd = 'curl -J -k -b sid=%s -H "%s" -X %s -o %s %s -s' % \ (self.cookies['sid'], header_str, method, file_path, url) sub = Popen(request_cmd, stdout=PIPE, shell=True) response, err = sub.communicate() return response
def test_ia_metadata_formats(): cmd = 'ia metadata --formats iacli_test_item' proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() test_output_set = set([ "Text", "Archive BitTorrent", "Metadata", ]) assert set(stdout[:-1].split('\n')) == test_output_set
def worker(code): m = judgeMarket(code) print 'start process %s.%s' % (code, m) target_file = os.path.join(DATA_DIR, '%s.csv' % code) if os.path.exists(target_file): print '%s quote file already exists' % code else: cmd = "wget -t 10 %s%s.%s -O %s" % (WGET_URL, code, m, target_file) print 'fetch data : %s' % cmd sub = Popen([cmd], stdout=PIPE, shell=True) out, err = sub.communicate() print out.rstrip()
def html_wrapper(data): """ Convert ANSI text `data` to HTML """ proc = Popen( ["bash", ANSI2HTML, "--palette=solarized", "--bg=dark"], stdin=PIPE, stdout=PIPE, stderr=PIPE) data = data.encode('utf-8') stdout, stderr = proc.communicate(data) if proc.returncode != 0: error(stdout + stderr) return stdout.decode('utf-8')
def static_reader(reader, log_path): if (valid_log(log_path)): # Log file exist and greater than 0, perform data collecting # Build reader command read_command = build_reader_command(reader) # Define popen process and once completed split stdout by lines reader_process = Popen(read_command, bufsize=1, stdout=PIPE) log_content = reader_process.communicate()[0] log_contentsize = getsizeof(log_content) log_content = log_content.splitlines(True) # Starting from content num of lines decide if serve it 1 # line/time or in 200 lines chunks reader_type = 'fast' if (len(log_content) <= 200) else 'slow' chunk_size = logs_loader[reader_type]['lines'] reader_sleep = logs_loader[reader_type]['sleep'] log_content_chunks = [log_content[x:x+chunk_size] for x in xrange(0, len(log_content), chunk_size)] # noqa F821 total_rows = len(log_content) else: # Log file missing or size 0, gently inform user # Log not exist or empty so we send fake values for rows, # chunks, etc to uniform data on existing functions and avoid # client side extra checks log_content = 'Selected log file is empty or doesn\'t exist' log_content = log_content.splitlines(True) total_rows = 1 log_contentsize = getsizeof(log_content) log_content_chunks = [] log_content_chunks.append(log_content) reader_sleep = 0 # Serve each chunk with emit and sleep before next one to avoid # client side browser overload current_rows = 0 for data_chunks in log_content_chunks: chunk_content = ''.join(data_chunks) current_rows += len(data_chunks) self.emit('logcontent', { 'key': 'logManager:logcontent', 'data': { 'current_rows': current_rows, 'total_rows': total_rows, 'chunk_content': chunk_content, 'content_size': log_contentsize } }) gevent.sleep(reader_sleep)
def process_video(self, video_url, timestamp=None): print "Playing %s"%video_url cmd=['youtube-dl', '-g','-f best',video_url] temp = Popen(cmd, stdout=PIPE) out, err = temp.communicate() video_direct = out cmd=['omxplayer', '-o', 'hdmi', '--vol', '-2000', video_direct[:-1]] print " ".join(cmd) self.sub = Popen(cmd, stdin=PIPE) omxdbus = OmxDbus() self.sub.communicate() self.sub = None self.task.put({'play_complete':timestamp})
def _upload_by_curl(self, method, url, headers, files): header_str = ' '.join([ "%s: %s" % (k, v) for k,v in headers.items() ]) request_cmd = 'curl -k -b sid=%s -H "%s" -X %s -T %s %s -s' % \ (self.cookies['sid'], header_str, method, files, url) sub = Popen(request_cmd, stdout=PIPE, shell=True) response, err = sub.communicate() if not response: error = 'Did not receive response from server after ' \ 'running command \'%s\'' % request_cmd raise SelfServerException(error) return response
def ip_of_interface( iface ): cmd = [ '/sbin/ip', 'addr', 'show', 'dev', iface ] p = Popen(cmd, shell=False, stdout=PIPE, stderr=STDOUT) stdout = p.communicate()[0] if p.returncode: return None for line in stdout.splitlines(): if ' inet ' in line: try: return line.strip().split()[1].split('/')[0] except: return None
def change_default_gw( new_gw ): cmds = """ set -x /etc/init.d/openvpn stop OLD_GW=`ip route show | awk '/default/{print \$3}'` echo OLD_GW=,$OLD_GW, [ -z "$OLD_GW" ] || ip route delete default via $OLD_GW ip route add default via %(new_gw)s /etc/init.d/openvpn start """ % locals() p = Popen( ["sh", "-s"], shell=False, stdin=PIPE, stdout=PIPE, stderr=STDOUT ) output = p.communicate( cmds )[0] if p.returncode: log( 'Non-exit return code. Output:\n' + output ) return False return True
def _get_tldr(topic): cmd = ["tldr", topic] proc = Popen(cmd, stdout=PIPE, stderr=PIPE) answer = proc.communicate()[0] fixed_answer = [] for line in answer.splitlines(): line = line[2:] if line.startswith('-'): line = '# '+line[2:] elif line == "": pass elif not line.startswith(' '): line = "# "+line fixed_answer.append(line) answer = "\n".join(fixed_answer) + "\n" return answer.decode('utf-8')
def get_git_refs(): if DISABLE_NEW_EXTENSIONS: return 'Disabled', 403 git_endpoint = request.values.get('ep', None) if git_endpoint is None: return jsonify(error={'message': 'Missing endpoint'}), 400 if not git_endpoint.endswith('.git'): return jsonify(error={'message': 'Invalid git endpoint'}), 400 git_path = config.get('MINEMELD_GIT_PATH', None) if git_path is None: return jsonify(error={'message': 'MINEMELD_GIT_PATH not set'}), 500 git_args = [git_path, 'ls-remote', '-t', '-h', git_endpoint] git_process = Popen( args=git_args, close_fds=True, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) timeout = Timeout(20.0) timeout.start() try: git_stdout, git_stderr = git_process.communicate() except Timeout: git_process.kill() return jsonify(error={'message': 'Timeout accessing git repo'}), 400 finally: timeout.cancel() if git_process.returncode != 0: LOG.error('Error running {}: {}'.format(git_args, git_stderr)) return jsonify(error={'message': 'Error running git: {}'.format(git_stderr)}), 400 return jsonify(result=[line.rsplit('/', 1)[-1] for line in git_stdout.splitlines()])
def do_task(**post_data): callback = post_data.get('callback_url', callback_url) acceptkey = post_data.get('accept_key', accept_key) task_id = post_data.get('task_id', 0) filepath = post_data.get('upfile').replace('\\', '/') filename = filepath.split('/')[-1] newFile = os.path.join(uploadpath, filename) if not os.path.exists(uploadpath): os.mkdir(uploadpath) fout = open(newFile, 'w') fout.write(post_data.get('filename')) fout.close() #创建yum仓库索引 p = Popen( "cd %s && createrepo %s" % (rpmdir, yumname), shell=True, stdout=PIPE, stderr=PIPE) try: stdout, stderr = p.communicate() finally: p.stdout.close() p.stderr.close() rc = p.returncode mylggr.debug('task id %d return stdout %s ,stderr %s!' % (task_id, stdout, stderr)) return { 'task_id': task_id, 'callback_url': callback, 'accept_key': acceptkey, 'filename': filename, 'stdout': stdout, 'stderr': stderr, 'returncode': rc }
def _get_answer_for_question(topic): """ Find answer for the `topic` question. """ topic_words = topic.replace('+', ' ').strip().split() topic = " ".join(topic_words) lang = 'en' try: query_text = topic # " ".join(topic) query_text = re.sub('^[^/]*/+', '', query_text.rstrip('/')) query_text = re.sub('/[0-9]+$', '', query_text) query_text = re.sub('/[0-9]+$', '', query_text) detector = Detector(query_text) print("query_text = ", query_text) supposed_lang = detector.languages[0].code print("supposed lang = ", supposed_lang) if len(topic_words) > 2 or supposed_lang in ['az', 'ru', 'uk', 'de', 'fr', 'es', 'it']: lang = supposed_lang if supposed_lang.startswith('zh_') or supposed_lang == 'zh': lang = 'zh' elif supposed_lang.startswith('pt_'): lang = 'pt' if supposed_lang in ['ja', 'ko']: lang = supposed_lang except UnknownLanguage: print("Unknown language (%s)" % query_text) if lang != 'en': topic = ['--human-language', lang, topic] else: topic = [topic] cmd = ["/home/igor/cheat.sh/bin/get-answer-for-question"] + topic proc = Popen(cmd, stdout=PIPE, stderr=PIPE) answer = proc.communicate()[0].decode('utf-8') return answer