Example #1
0
	def check_call(*popenargs, **kwargs):
		retcode = subprocess.call(*popenargs, **kwargs)
		cmd = kwargs.get("args")
		if cmd is None: cmd = popenargs[0]
		if retcode: raise subprocess.CalledProcessError(retcode, cmd)
		return retcode
def process_mutant(key, replacementFileRel, replacementFile):
    """
    e.g.
    replacementFileRel  = exampleCopy.java/3.java
    replacementFile     = Example/../mutated/exampleCopy.java/3.java
    """
    options = optionParser()
    original_f = False
    try:
        symlink_nfs()
        LOGGER.debug("testing mutant file: " + replacementFileRel)

        try:
            if os.path.basename(options.buildPath) == "pom.xml":
                assert os.path.isfile(options.buildPath)
                buildDir = os.path.abspath(os.path.dirname(options.buildPath))
            else:
                assert os.path.isdir(options.buildPath)
                buildDir = os.path.abspath(options.buildPath)

        except AssertionError as exception:
            LOGGER.debug("build system working directory should be a directory.")


        # check if we have separate test-suite
        if options.testCommand != "***dummy***":
            separateTestSuite = True
            if options.testPath == "***dummy***":
                testDir = buildDir
            else:
                try:
                    if os.path.basename(options.buildPath) == "pom.xml":
                        assert os.path.isfile(options.buildPath)
                        testDir = os.path.abspath(os.path.dirname(options.testPath))
                    else:
                        assert os.path.isdir(options.buildPath)
                        testDir = os.path.abspath(options.testPath)

                except AssertionError as exception:
                    LOGGER.debug("test project build system working directory should be a directory.")

        else:
            separateTestSuite = False


        success = False

        # run the build, store the results
        runOutputTest = ""

        with TimeIt.time_context('MutatorWrapper', 'readAndReplaceMutantFileForExecution'):
            # replace the original file with the mutant
            with open(replacementFile, 'r') as mutant_file:
                mutantFile = mutant_file.read()
            with open(os.path.join(options.sourcePath, key), 'r+') as f:
                f.seek(0)
                original_f = f.read()
                f.seek(0)
                f.write(mutantFile)
                f.truncate()

        commandString = options.buildCommand.split(',')
        if separateTestSuite:
            testCommandString = options.testCommand.split(',')

        try:
            # if we have timeout support, simply run the command with timeout support from subprocess32
            # if timeoutSupport:
            #     runOutput = subprocess.check_output(commandString, stderr=subprocess.STDOUT, cwd=buildDir,
            #                                         timeout=int(options.timeout))
            #     if separateTestSuite:
            #         runOutput += subprocess.check_output(testCommandString, stderr=subprocess.STDOUT, cwd=testDir,
            #                                         timeout=int(options.timeout))

            # else, run our alternative method
            # else:
            processKilled, processExitCode, runOutput = LittleDarwin.timeoutAlternative(commandString,
                                                                      workingDirectory=buildDir,
                                                                      timeout=int(options.timeout))

            # raise the same exception as the original check_output.
            if processKilled or processExitCode:
                raise subprocess.CalledProcessError(1 if processKilled else processExitCode, commandString,
                                                        runOutput)

            if separateTestSuite:
                processKilled, processExitCode, runOutputTest = LittleDarwin.timeoutAlternative(testCommandString,
                                                      workingDirectory=testDir, timeout=int(options.timeout))

                # raise the same exception as the original check_output.
                if processKilled or processExitCode:
                    raise subprocess.CalledProcessError(1 if processKilled else processExitCode,
                                                      commandString, "\n".join([runOutput, runOutputTest]))


            # if we are here, it means no exceptions happened, so lets add this to our success list.
            runOutput += "\n" + runOutputTest
            success = True

        # putting two exceptions in one except clause, specially when one of them is not defined on some
        # platforms does not look like a good idea; even though both of them do exactly the same thing.
        except subprocess.CalledProcessError as exception:
            runOutput = exception.output
            # oops, error. let's keep this as a failure.

        # except subprocess.TimeoutExpired as exception:
        #     runOutput = exception.output
        #     failureList.append(os.path.basename(replacementFile))

        # if there's a cleanup option, execute it. the results will be ignored because we don't want our process
        #  to be interrupted if there's nothing to clean up.
        if options.cleanUp != "***dummy***":
            subprocess.call(options.cleanUp.split(","), cwd=buildDir)
            if separateTestSuite:
                subprocess.call(options.cleanUp.split(","), cwd=testDir)

        # find path to write runOutput to
        targetTextOutputFile = os.path.splitext(replacementFile)[0] + ".txt"

        with TimeIt.time_context('MutatorWrapper', 'writeMutantBuildOutput'):
            # writing the build output to disk.
            with open(targetTextOutputFile, 'w') as content_file:
                content_file.write(runOutput)

        return {'key': key,
                'success': success,
                'replacementFile': replacementFile}
    finally:
        unlink_nfs()
        if original_f is not False:
            with open(os.path.join(options.sourcePath, key), 'r+') as f:
                f.seek(0)
                f.write(original_f)
                f.truncate()
Example #3
0
 def test_port_to_br_not_found(self, check_output):
     check_output.side_effect = subprocess.CalledProcessError(
         1, 'not found')
     self.assertEqual(ovs.port_to_br('br-lb'), None)
Example #4
0
 def check_popen_fail(*args, **kwargs):
     check_popen_pass(*args, **kwargs)
     raise subprocess.CalledProcessError(returncode=1, cmd=args[0])
Example #5
0
def run_pipe(*cmds, **kwargs):
    """
    Function to run several commands that pipe to each other in a python 
    aware way.

    Args:
        *cmds: any number of inputs of cmds (type list) to pipe together
        **kwargs: The only valid kwargs are output_file and return_output. Other
        arbitrary kwargs are ignored.
            output_file (str): filename to redirected stdout to at end of pipe
            return_output (bool): Whether to return stdout at end of pipe

    Note: 
        output_file and return_output kwargs are mutually exclusive.
        If output_file exists, it will be overwritten.

    Returns:
        Piped command is pretty-printed to stderr.
        output (str) is returned if return_output=True is passed as **kwarg.
        If output_file is passed as kwarg, stdout is redirected to the given file.

    Raises:
        TypeError: if *cmds specified are not of type list
        subprocess.CalledProcessError: if any subprocess in pipe returns exit
            code not 0.

    Examples:
        Usage 1: Pipe multiple commands together and print output to file
            example_cmd1 = ['dx', 'download', 'file-xxxx']
            example_cmd2 = ['gunzip']
            out_f = "somefilename.fasta"
            run_pipe(example_cmd1, example_cmd2, output_file=out_f)

            This function will print and execute the following command:
            'dx download file-xxxx | gunzip > somefilename.fasta'

        Usage 2: Pipe multiple commands together and return output
            example_cmd1 = ['gzip', 'file.txt']
            example_cmd2 = ['dx', 'upload', '-', '--brief']
            file_id = run_pipe(example_cmd1, example_cmd2, return_output=True)

            This function will print and execute the following command:
            'gzip file.txt | dx upload - --brief '
            and return the output.

        Usage 3: Pipe a single command with output to file
            run_pipe(['echo', 'hello world'], output_file='test2.txt')
            Note: This calls the run_cmd function instead of run_pipe.

        Usage 4: A command failing mid-pipe should return CalledProcessedError
            >>> run_pipe(['echo', 'hi:bye'], ['grep', 'blah'], ['cut', '-d', ':', '-f', '1'])
            Traceback (most recent call last):
                  ...
            CalledProcessError: Command '['grep', 'blah']' returned non-zero exit status 1
    """
    # parse kwargs
    output_file = kwargs.get('output_file')
    return_output = kwargs.get('return_output', False)

    # copy provided commands into a list instead of a generator
    cmds = [c for c in cmds]

    # check that cmds are lists and not strings
    if type(cmds[0]) is not list:
        raise TypeError('Commands in pipe must be of type list')

    num_cmd = len(cmds)
    # if only one command is provided, use run_cmd instead
    if num_cmd == 1:
        return run_cmd(cmds[0], return_output, output_file)

    # from here we can assume multiple commands are provided
    # pretty print the provided command
    cmd_str = list2cmdlines_pipe(*cmds)
    if output_file is not None:
        cmd_str += ' > {0}'.format(output_file)
    print(cmd_str, file=sys.stderr)

    # if multiple commands are provided, we need to pipe them together
    # initialize the first command
    cmd_process = []
    init_process = subprocess.Popen(cmds[0],
                                    stdout=subprocess.PIPE,
                                    stderr=sys.stderr)
    cmd_process.append(init_process)
    # run all commands except the last one by piping
    for i in range(1, num_cmd - 1):
        # start the next command
        prev_process = cmd_process[i - 1]
        process = subprocess.Popen(cmds[i],
                                   stdin=prev_process.stdout,
                                   stdout=subprocess.PIPE,
                                   stderr=sys.stderr)
        # close the previous command -- this will take care of the case when
        # cmd_process[i] closes before cmd_process[-1] is done
        cmd_process.append(process)
        prev_process.stdout.close()

    # run the last command
    output = None
    prev_process = cmd_process[-1]
    if output_file is not None:
        with open(output_file, "w") as fopen:
            subprocess.check_call(cmds[-1],
                                  stdin=prev_process.stdout,
                                  stdout=fopen,
                                  stderr=sys.stderr)
            prev_process.stdout.close()
    elif return_output is True:
        output = subprocess.check_output(cmds[-1],
                                         stdin=prev_process.stdout,
                                         stderr=sys.stderr)
        prev_process.stdout.close()
        output = output.strip()
    else:
        subprocess.check_call(cmds[-1],
                              stdin=prev_process.stdout,
                              stderr=sys.stderr)
        prev_process.stdout.close()

    # check that all intermediate commands finished successfully
    for i in range(len(cmd_process)):
        cmd = cmds[i]
        curr_proc = cmd_process[i]
        # Polling is needed first in order to set the returncode attr
        # run the command in an error aware way
        curr_proc.poll()
        returncode = curr_proc.returncode
        if returncode != 0 and returncode is not None:
            raise subprocess.CalledProcessError(returncode, cmd, output=None)

    # if return_output is True, then the variable output contains the subprocess
    # output. If return_output is False, the variable output should be set to 'None',
    # which is the same behavior as a simple `return` statement.
    return output
Example #6
0
 def raise_error(*args, **kwargs):
     raise subprocess.CalledProcessError(returncode=100, cmd="")
Example #7
0
 def testQueryFailureRaised(self):
   """Tests that a query failure is properly surfaced."""
   self._process_mock.side_effect = subprocess.CalledProcessError(1, 'cmd')
   with self.assertRaises(subprocess.CalledProcessError):
     queries.QueryBuilder('builder', 'ci', 'pixel', 'project', 5)
Example #8
0
    def test_plug_VIP(self, mock_pyroute2, mock_check_output, mock_ifaddress,
                      mock_interfaces):

        subnet_info = {
            'subnet_cidr': '10.0.0.0/24',
            'gateway': '10.0.0.1',
            'mac_address': '123'
        }

        # malformated ip
        rv = self.app.post('/' + api_server.VERSION + '/plug/vip/error',
                           data=json.dumps(subnet_info),
                           content_type='application/json')
        self.assertEqual(400, rv.status_code)

        # No subnet info
        rv = self.app.post('/' + api_server.VERSION + '/plug/vip/error')
        self.assertEqual(400, rv.status_code)

        # No interface at all
        mock_interfaces.side_effect = [[]]
        rv = self.app.post('/' + api_server.VERSION + "/plug/vip/203.0.113.2",
                           content_type='application/json',
                           data=json.dumps(subnet_info))
        self.assertEqual(404, rv.status_code)
        self.assertEqual(dict(details="No suitable network interface found"),
                         json.loads(rv.data.decode('utf-8')))

        # Two interfaces down
        mock_interfaces.side_effect = [['blah', 'blah2']]
        mock_ifaddress.side_effect = [['blabla'], ['blabla']]
        rv = self.app.post('/' + api_server.VERSION + "/plug/vip/203.0.113.2",
                           content_type='application/json',
                           data=json.dumps(subnet_info))
        self.assertEqual(404, rv.status_code)
        self.assertEqual(dict(details="No suitable network interface found"),
                         json.loads(rv.data.decode('utf-8')))

        # One Interface down, Happy Path
        mock_interfaces.side_effect = [['blah']]
        mock_ifaddress.side_effect = [[netifaces.AF_LINK], {
            netifaces.AF_LINK: [{
                'addr': '123'
            }]
        }]
        m = mock.mock_open()
        with mock.patch('%s.open' % BUILTINS, m, create=True):
            rv = self.app.post('/' + api_server.VERSION +
                               "/plug/vip/203.0.113.2",
                               content_type='application/json',
                               data=json.dumps(subnet_info))
            self.assertEqual(202, rv.status_code)
            m.assert_called_once_with('/etc/network/interfaces.d/blah.cfg',
                                      'w')
            handle = m()
            handle.write.assert_called_once_with(
                '\n# Generated by Octavia agent\n'
                'auto blah blah:0\n'
                'iface blah inet dhcp\n'
                'iface blah:0 inet static\n'
                'address 203.0.113.2\n'
                'broadcast 203.0.113.255\n'
                'netmask 255.255.255.0')
            mock_check_output.assert_called_with(['ifup', 'blah:0'], stderr=-2)

        mock_interfaces.side_effect = [['blah']]
        mock_ifaddress.side_effect = [[netifaces.AF_LINK], {
            netifaces.AF_LINK: [{
                'addr': '123'
            }]
        }]
        mock_check_output.side_effect = [
            'unplug1',
            subprocess.CalledProcessError(7, 'test', RANDOM_ERROR),
            subprocess.CalledProcessError(7, 'test', RANDOM_ERROR)
        ]
        m = mock.mock_open()
        with mock.patch('%s.open' % BUILTINS, m, create=True):
            rv = self.app.post('/' + api_server.VERSION +
                               "/plug/vip/203.0.113.2",
                               content_type='application/json',
                               data=json.dumps(subnet_info))
            self.assertEqual(500, rv.status_code)
            self.assertEqual(
                {
                    'details': RANDOM_ERROR,
                    'message': 'Error plugging VIP'
                }, json.loads(rv.data.decode('utf-8')))
Example #9
0
def execute(*command, **kwargs):
    """Helper method to shell out and execute a command through subprocess.
    :param attempts:        How many times to retry running the command.
    :param binary:          On Python 3, return stdout and stderr as bytes if
                            binary is True, as Unicode otherwise.
    :param check_exit_code: Single bool, int, or list of allowed exit
                            codes.  Defaults to [0].  Raise
                            :class:`CalledProcessError` unless
                            program exits with one of these code.
    :param command:         The command passed to the subprocess.Popen.
    :param cwd:             Set the current working directory
    :param env_variables:   Environment variables and their values that
                            will be set for the process.
    :param retry_interval:  Interval between execute attempts, in seconds
    :param shell:           whether or not there should be a shell used to
                            execute this command.
    :raises:                :class:`subprocess.CalledProcessError`
    """
    # pylint: disable=too-many-locals

    attempts = kwargs.pop("attempts", ATTEMPTS)
    binary = kwargs.pop('binary', False)
    check_exit_code = kwargs.pop('check_exit_code', [0])
    cwd = kwargs.pop('cwd', None)
    env_variables = kwargs.pop("env_variables", None)
    retry_interval = kwargs.pop("retry_interval", RETRY_INTERVAL)
    shell = kwargs.pop("shell", False)

    if cwd and not os.path.isdir(cwd):
        print("[w] Invalid value for cwd: {cwd}".format(cwd=cwd))
        cwd = None

    command = [str(argument) for argument in command]
    ignore_exit_code = False

    if isinstance(check_exit_code, bool):
        ignore_exit_code = not check_exit_code
        check_exit_code = [0]
    elif isinstance(check_exit_code, int):
        check_exit_code = [check_exit_code]

    while attempts > 0:
        attempts = attempts - 1
        try:
            process = subprocess.Popen(command,
                                       stdin=subprocess.PIPE,
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE,
                                       shell=shell,
                                       cwd=cwd,
                                       env=env_variables)
            result = process.communicate()
            return_code = process.returncode

            if six.PY3 and not binary and result is not None:
                # pylint: disable=no-member

                # Decode from the locale using using the surrogate escape error
                # handler (decoding cannot fail)
                (stdout, stderr) = result
                stdout = os.fsdecode(stdout)
                stderr = os.fsdecode(stderr)
            else:
                stdout, stderr = result

            if not ignore_exit_code and return_code not in check_exit_code:
                raise subprocess.CalledProcessError(returncode=return_code,
                                                    cmd=command,
                                                    output=(stdout, stderr))
            else:
                return (stdout, stderr)
        except subprocess.CalledProcessError:
            if attempts:
                time.sleep(retry_interval)
            else:
                raise
Example #10
0
    def test_haproxy(self, mock_remove, mock_subprocess, mock_rename,
                     mock_makedirs, mock_exists):
        mock_exists.return_value = True
        m = mock.mock_open()

        # happy case upstart file exists
        with mock.patch('%s.open' % BUILTINS, m, create=True):
            rv = self.app.put('/' + api_server.VERSION +
                              '/listeners/123/haproxy',
                              data='test')
            self.assertEqual(202, rv.status_code)
            m.assert_called_once_with('/var/lib/octavia/123/haproxy.cfg.new',
                                      'w')
            handle = m()
            handle.write.assert_called_once_with(six.b('test'))
            mock_subprocess.assert_called_once_with(
                "haproxy -c -f /var/lib/octavia/123/haproxy.cfg.new".split(),
                stderr=-2)
            mock_rename.assert_called_once_with(
                '/var/lib/octavia/123/haproxy.cfg.new',
                '/var/lib/octavia/123/haproxy.cfg')

        # exception writing
        m = mock.Mock()
        m.side_effect = Exception()  # open crashes
        with mock.patch('%s.open' % BUILTINS, m, create=True):
            rv = self.app.put('/' + api_server.VERSION +
                              '/listeners/123/haproxy',
                              data='test')
            self.assertEqual(500, rv.status_code)

        # check if files get created
        mock_exists.return_value = False
        m = mock.mock_open()

        # happy case upstart file exists
        with mock.patch('%s.open' % BUILTINS, m, create=True):
            rv = self.app.put('/' + api_server.VERSION +
                              '/listeners/123/haproxy',
                              data='test')
            self.assertEqual(202, rv.status_code)
            m.assert_any_call('/var/lib/octavia/123/haproxy.cfg.new', 'w')
            m.assert_any_call(util.UPSTART_DIR + '/haproxy-123.conf', 'w')
            handle = m()
            handle.write.assert_any_call(six.b('test'))
            # skip the template stuff
            mock_makedirs.assert_called_with('/var/lib/octavia/123')

        # unhappy case haproxy check fails
        mock_exists.return_value = True
        mock_subprocess.side_effect = [
            subprocess.CalledProcessError(7, 'test', RANDOM_ERROR)
        ]
        with mock.patch('%s.open' % BUILTINS, m, create=True):
            rv = self.app.put('/' + api_server.VERSION +
                              '/listeners/123/haproxy',
                              data='test')
            self.assertEqual(400, rv.status_code)
            self.assertEqual(
                {
                    'message': 'Invalid request',
                    u'details': u'random error'
                }, json.loads(rv.data.decode('utf-8')))
            m.assert_called_with('/var/lib/octavia/123/haproxy.cfg.new', 'w')
            handle = m()
            handle.write.assert_called_with(six.b('test'))
            mock_subprocess.assert_called_with(
                "haproxy -c -f /var/lib/octavia/123/haproxy.cfg.new".split(),
                stderr=-2)
            mock_remove.assert_called_once_with(
                '/var/lib/octavia/123/haproxy.cfg.new')
Example #11
0
    def test_plug_network(self, mock_check_output, mock_ifaddress,
                          mock_interfaces):
        port_info = {'mac_address': '123'}

        # No interface at all
        mock_interfaces.side_effect = [[]]
        rv = self.app.post('/' + api_server.VERSION + "/plug/network",
                           content_type='application/json',
                           data=json.dumps(port_info))
        self.assertEqual(404, rv.status_code)
        self.assertEqual(dict(details="No suitable network interface found"),
                         json.loads(rv.data.decode('utf-8')))

        # No interface down
        mock_interfaces.side_effect = [['blah']]
        mock_ifaddress.side_effect = [[netifaces.AF_INET]]
        rv = self.app.post('/' + api_server.VERSION + "/plug/network",
                           content_type='application/json',
                           data=json.dumps(port_info))
        self.assertEqual(404, rv.status_code)
        self.assertEqual(dict(details="No suitable network interface found"),
                         json.loads(rv.data.decode('utf-8')))
        mock_ifaddress.assert_called_once_with('blah')

        # One Interface down, Happy Path
        mock_interfaces.side_effect = [['blah']]
        mock_ifaddress.side_effect = [[netifaces.AF_LINK], {
            netifaces.AF_LINK: [{
                'addr': '123'
            }]
        }]
        m = mock.mock_open()
        with mock.patch('%s.open' % BUILTINS, m, create=True):
            rv = self.app.post('/' + api_server.VERSION + "/plug/network",
                               content_type='application/json',
                               data=json.dumps(port_info))
            self.assertEqual(202, rv.status_code)
            m.assert_called_once_with('/etc/network/interfaces.d/blah.cfg',
                                      'w')
            handle = m()
            handle.write.assert_called_once_with(
                '\n# Generated by Octavia agent\n'
                'auto blah blah:0\n'
                'iface blah inet dhcp')
            mock_check_output.assert_called_with(['ifup', 'blah'], stderr=-2)

        # same as above but ifup fails
        mock_interfaces.side_effect = [['blah']]
        mock_ifaddress.side_effect = [[netifaces.AF_LINK], {
            netifaces.AF_LINK: [{
                'addr': '123'
            }]
        }]
        mock_check_output.side_effect = [
            subprocess.CalledProcessError(7, 'test', RANDOM_ERROR),
            subprocess.CalledProcessError(7, 'test', RANDOM_ERROR)
        ]
        m = mock.mock_open()
        with mock.patch('%s.open' % BUILTINS, m, create=True):
            rv = self.app.post('/' + api_server.VERSION + "/plug/network",
                               content_type='application/json',
                               data=json.dumps(port_info))
            self.assertEqual(500, rv.status_code)
            self.assertEqual(
                {
                    'details': RANDOM_ERROR,
                    'message': 'Error plugging network'
                }, json.loads(rv.data.decode('utf-8')))
Example #12
0
    shutil.copytree(html_dir, target_dir, ignore=shutil.ignore_patterns('.*'))
    if travis:
        check_call(['git', 'config', '--global', 'user.name', 'amplbot'])
        check_call(['git', 'config', '--global', 'user.email', '*****@*****.**'])
    # Push docs to GitHub pages.
    check_call(['git', 'add', '--all'], cwd=repo)
    if call(['git', 'diff-index', '--quiet', 'HEAD'], cwd=repo):
        check_call(['git', 'commit', '-m', 'Update documentation'], cwd=repo)
        cmd = 'git push'
        if travis:
            cmd += ' https://[email protected]/fmtlib/fmtlib.github.io.git master'
        p = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT, cwd=repo)
        # Print the output without the key.
        print(p.communicate()[0].replace(os.environ['KEY'], '$KEY'))
        if p.returncode != 0:
            raise subprocess.CalledProcessError(p.returncode, cmd)
    exit(0)

standard = os.environ['STANDARD']
install_dir = os.path.join(fmt_dir, "_install")
build_dir = os.path.join(fmt_dir, "_build")
test_build_dir = os.path.join(fmt_dir, "_build_test")

# Configure the library.
makedirs_if_not_exist(build_dir)
cmake_flags = [
    '-DCMAKE_INSTALL_PREFIX=' + install_dir, '-DCMAKE_BUILD_TYPE=' + build,
    '-DCMAKE_CXX_STANDARD=' + standard
]

# Make sure the fuzzers still compile.
Example #13
0
def main():
    # If ARGSFILE_PATH is set this script is being invoked by rustc, which
    # thinks we are a linker. All we do now is write our argv to the specified
    # file and exit. Further processing is done by our grandparent process,
    # also this script but invoked by gn.
    argsfile_path = os.getenv("ARGSFILE_PATH")
    if argsfile_path is not None:
        return capture_args(argsfile_path)

    # Prepare the environment for rustc.
    rustc_env = os.environ.copy()

    # We'll capture the arguments rustc passes to the linker by telling it
    # that this script *is* the linker.
    # On Posix systems, this file is directly executable thanks to it's shebang.
    # On Windows, we use a .cmd wrapper file.
    if os.name == "nt":
        rustc_linker_base, rustc_linker_ext = path.splitext(__file__)
        rustc_linker = rustc_linker_base + ".cmd"
    else:
        rustc_linker = __file__

    # Make sure that when rustc invokes this script, it uses the same version
    # of the Python interpreter as we're currently using. On Posix systems this
    # is done making the Python directory the first element of PATH.
    # On Windows, the wrapper script uses the PYTHON_EXE environment variable.
    if os.name == "nt":
        rustc_env["PYTHON_EXE"] = sys.executable
    else:
        python_dir = path.dirname(sys.executable)
        rustc_env["PATH"] = python_dir + path.pathsep + os.environ["PATH"]

    # Create a temporary file to write captured Rust linker arguments to.
    # Unfortunately we can't use tempfile.NamedTemporaryFile here, because the
    # file it creates can't be open in two processes at the same time.
    argsfile_fd, argsfile_path = tempfile.mkstemp()
    rustc_env["ARGSFILE_PATH"] = argsfile_path

    try:
        # Build the rustc command line.
        #   * `-Clinker=` tells rustc to use our fake linker.
        #   * `-Csave-temps` prevents rustc from deleting object files after
        #     linking. We need to preserve the file `xx.crate.allocator.rcgu.o`.
        rustc_cmd = [
            "rustc",
            "-Clinker=" + rustc_linker,
            "-Csave-temps",
        ] + sys.argv[1:]

        # Spawn the rust compiler.
        rustc_proc = subprocess.Popen(rustc_cmd,
                                      env=rustc_env,
                                      stdout=subprocess.PIPE,
                                      stderr=subprocess.STDOUT)

        # Forward rustc's output to stderr.
        for line in rustc_proc.stdout:
            # Suppress the warning:
            #   `-C save-temps` might not produce all requested temporary
            #   products when incremental compilation is enabled.
            # It's pointless, because incremental compilation is disabled.
            if re.match(r"^warning:.*save-temps.*incremental compilation",
                        line):
                continue
            # Also, do not write completely blank lines to stderr.
            if line.strip() == "":
                continue
            sys.stderr.write(line)

        # The rustc process should return zero. If not, raise an exception.
        rustc_retcode = rustc_proc.wait()
        if rustc_retcode != 0:
            raise subprocess.CalledProcessError(rustc_retcode, rustc_cmd)

        # Read captured linker arguments from argsfile.
        argsfile_size = os.fstat(argsfile_fd).st_size
        argsfile_content = os.read(argsfile_fd, argsfile_size)
        args = argsfile_content.split("\n")

    finally:
        # Close and delete the temporary file.
        os.close(argsfile_fd)
        os.unlink(argsfile_path)

    # From the list of captured linker arguments, build the list of ldflags that
    # we actually need.
    ldflags = []
    next_arg_is_flag_value = False
    for arg in args:
        # Note that within the following if/elif blocks, `pass` means that
        # that captured arguments gets included in `ldflags`. The final `else`
        # clause filters out unrecognized/unwanted flags.
        if next_arg_is_flag_value:
            # We're looking at a value that follows certain parametric flags,
            # e.g. the path in '-L <path>'.
            next_arg_is_flag_value = False
        elif arg.endswith(".rlib"):
            # Built-in Rust library, e.g. `libstd-8524caae8408aac2.rlib`.
            pass
        elif arg.endswith(".crate.allocator.rcgu.o"):
            # This file is needed because it contains certain allocator
            # related symbols (e.g. `__rust_alloc`, `__rust_oom`).
            # The Rust compiler normally generates this file just before
            # linking an executable. We pass `-Csave-temps` to rustc so it
            # doesn't delete the file when it's done linking.
            pass
        elif arg.endswith(".lib") and not arg.startswith("msvcrt"):
            # Include most Windows static/import libraries (e.g. `ws2_32.lib`).
            # However we ignore Rusts choice of C runtime (`mvcrt*.lib`).
            # Rust insists on always using the release "flavor", even in debug
            # mode, which causes conflicts with other libraries we link with.
            pass
        elif arg.upper().startswith("/LIBPATH:"):
            # `/LIBPATH:<path>`: Linker search path (Microsoft style).
            pass
        elif arg == "-l" or arg == "-L":
            # `-l <name>`: Link with library (GCC style).
            # `-L <path>`: Linker search path (GCC style).
            next_arg_is_flag_value = True  # Ensure flag argument is captured.
        elif arg == "-Wl,--start-group" or arg == "-Wl,--end-group":
            # Start or end of an archive group (GCC style).
            pass
        else:
            # Not a flag we're interested in -- don't add it to ldflags.
            continue

        ldflags += [arg]

    # Write the filtered ldflags to stdout, separated by newline characters.
    sys.stdout.write("\n".join(ldflags))
Example #14
0
 def test_build_check_output_raises(self, mock_check_call, mock_subprocess):
     mock_subprocess.side_effect = subprocess.CalledProcessError('cmd', 1)
     assert_raises(subprocess.CalledProcessError, build, 'build_root', None)
Example #15
0
def _check_call_with_sudo_on_chromeos(command):
    p = _popen_with_sudo_on_chromeos(command)
    retcode = p.wait()
    if retcode:
        raise subprocess.CalledProcessError(retcode, ' '.join(command))
Example #16
0
def start(cmd, output='json', interval=1):
    '''
    Parse stdout of a command and generate an event

    The script engine will scrap stdout of the
    given script and generate an event based on the
    presence of the 'tag' key and it's value.

    If there is a data obj available, that will also
    be fired along with the tag.

    Example:

        Given the following json output from a script:

            { "tag" : "lots/of/tacos",
              "data" : { "toppings" : "cilantro" }
            }

        This will fire the event 'lots/of/tacos'
        on the event bus with the data obj as is.

    :param cmd: The command to execute
    :param output: How to deserialize stdout of the script
    :param interval: How often to execute the script.
    '''
    try:
        cmd = shlex.split(cmd)
    except AttributeError:
        cmd = shlex.split(six.text_type(cmd))
    log.debug("script engine using command %s", cmd)

    serializer = _get_serializer(output)

    if __opts__.get('__role') == 'master':
        fire_master = salt.utils.event.get_master_event(
            __opts__, __opts__['sock_dir']).fire_event
    else:
        fire_master = __salt__['event.send']

    while True:

        try:
            proc = subprocess.Popen(cmd,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.STDOUT)

            log.debug("Starting script with pid %d", proc.pid)

            for raw_event in _read_stdout(proc):
                log.debug(raw_event)

                event = serializer.deserialize(raw_event)
                tag = event.get('tag', None)
                data = event.get('data', {})

                if data and 'id' not in data:
                    data['id'] = __opts__['id']

                if tag:
                    log.info("script engine firing event with tag %s", tag)
                    fire_master(tag=tag, data=data)

            log.debug("Closing script with pid %d", proc.pid)
            proc.stdout.close()
            rc = proc.wait()
            if rc:
                raise subprocess.CalledProcessError(rc, cmd)

        except subprocess.CalledProcessError as e:
            log.error(e)
        finally:
            if proc.poll is None:
                proc.terminate()

        time.sleep(interval)
Example #17
0
def _PackageApk(options, build):
    """Compile and link resources with aapt2.

  Args:
    options: The command-line options.
    build: BuildContext object.
  Returns:
    The manifest package name for the APK.
  """
    dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips,
                                             build.deps_dir)
    path_info = resource_utils.ResourceInfoFile()
    _DuplicateZhResources(dep_subdirs, path_info)
    _RenameLocaleResourceDirs(dep_subdirs, path_info)

    _RemoveUnwantedLocalizedStrings(dep_subdirs, options)

    # Create a function that selects which resource files should be packaged
    # into the final output. Any file that does not pass the predicate will
    # be removed below.
    keep_predicate = _CreateKeepPredicate(
        options.resource_blacklist_regex,
        options.resource_blacklist_exceptions)
    png_paths = []
    for directory in dep_subdirs:
        for f in _IterFiles(directory):
            if not keep_predicate(f):
                os.remove(f)
            elif f.endswith('.png'):
                png_paths.append((f, directory))
    if png_paths and options.png_to_webp:
        _ConvertToWebP(options.webp_binary, png_paths, path_info)
    for directory in dep_subdirs:
        _MoveImagesToNonMdpiFolders(directory, path_info)
        _RemoveImageExtensions(directory, path_info)

    link_command = [
        options.aapt2_path,
        'link',
        '--auto-add-overlay',
        '--no-version-vectors',
        # Set SDK versions in case they are not set in the Android manifest.
        '--min-sdk-version',
        options.min_sdk_version,
        '--target-sdk-version',
        options.target_sdk_version,
    ]

    for j in options.include_resources:
        link_command += ['-I', j]
    if options.version_code:
        link_command += ['--version-code', options.version_code]
    if options.version_name:
        link_command += ['--version-name', options.version_name]
    if options.proguard_file:
        link_command += ['--proguard', build.proguard_path]
        link_command += ['--proguard-minimal-keep-rules']
    if options.proguard_file_main_dex:
        link_command += ['--proguard-main-dex', build.proguard_main_dex_path]
    if options.emit_ids_out:
        link_command += ['--emit-ids', build.emit_ids_path]
    if options.r_text_in:
        shutil.copyfile(options.r_text_in, build.r_txt_path)
    else:
        link_command += ['--output-text-symbols', build.r_txt_path]

    # Note: only one of --proto-format, --shared-lib or --app-as-shared-lib
    #       can be used with recent versions of aapt2.
    if options.shared_resources:
        link_command.append('--shared-lib')

    if options.no_xml_namespaces:
        link_command.append('--no-xml-namespaces')

    if options.package_id:
        link_command += [
            '--package-id',
            hex(options.package_id),
            '--allow-reserved-package-id',
        ]

    fixed_manifest, desired_manifest_package_name = _FixManifest(
        options, build.temp_dir)
    if options.rename_manifest_package:
        desired_manifest_package_name = options.rename_manifest_package
    if options.android_manifest_expected:
        _VerifyManifest(fixed_manifest, options.android_manifest_expected,
                        options.android_manifest_normalized,
                        options.android_manifest_expectations_failure_file)

    link_command += [
        '--manifest', fixed_manifest, '--rename-manifest-package',
        desired_manifest_package_name
    ]

    # Creates a .zip with AndroidManifest.xml, resources.arsc, res/*
    # Also creates R.txt
    if options.use_resource_ids_path:
        _CreateStableIdsFile(options.use_resource_ids_path,
                             build.stable_ids_path,
                             desired_manifest_package_name)
        link_command += ['--stable-ids', build.stable_ids_path]

    partials = _CompileDeps(options.aapt2_path, dep_subdirs, build.temp_dir)
    for partial in partials:
        link_command += ['-R', partial]

    # We always create a binary arsc file first, then convert to proto, so flags
    # such as --shared-lib can be supported.
    arsc_path = build.arsc_path
    if arsc_path is None:
        _, arsc_path = tempfile.mkstmp()
    link_command += ['-o', build.arsc_path]

    link_proc = subprocess.Popen(link_command)

    # Create .res.info file in parallel.
    _CreateResourceInfoFile(path_info, build.info_path,
                            options.dependencies_res_zips)

    exit_code = link_proc.wait()
    if exit_code:
        raise subprocess.CalledProcessError(exit_code, link_command)

    if options.proguard_file and (options.shared_resources
                                  or options.app_as_shared_lib):
        # Make sure the R class associated with the manifest package does not have
        # its onResourcesLoaded method obfuscated or removed, so that the framework
        # can call it in the case where the APK is being loaded as a library.
        with open(build.proguard_path, 'a') as proguard_file:
            keep_rule = '''
                  -keep class {package}.R {{
                    public static void onResourcesLoaded(int);
                  }}
                  '''.format(package=desired_manifest_package_name)
            proguard_file.write(textwrap.dedent(keep_rule))

    build_utils.CheckOutput([
        options.aapt2_path, 'convert', '--output-format', 'proto', '-o',
        build.proto_path, build.arsc_path
    ])

    if build.arsc_path is None:
        os.remove(arsc_path)

    if options.optimized_proto_path:
        _OptimizeApk(build.optimized_proto_path, options, build.temp_dir,
                     build.proto_path, build.r_txt_path)
    elif options.optimized_arsc_path:
        _OptimizeApk(build.optimized_arsc_path, options, build.temp_dir,
                     build.arsc_path, build.r_txt_path)

    return desired_manifest_package_name
Example #18
0
def createInjectionFile(hipe_dir,
                        cp,
                        cpinj,
                        injrun,
                        injection_segment,
                        source_file,
                        ipn_gps=None,
                        usertag=None,
                        verbose=False):
    """
  Creates an master injection file containing all injections for this run.
  Also reads the file and returns its contents
  """
    cpinj = copy.deepcopy(cpinj)

    # get the number of injections to be made
    for opt in ['exttrig-inj-start', 'exttrig-inj-stop']:
        value = int(cpinj.get(injrun, opt))
        cpinj.remove_option(injrun, opt)
        if 'start' in opt:
            injStart = value
        else:
            injEnd = value
    seed = hash_n_bits(hipe_dir, 31)
    numberInjections = injEnd - injStart + 1  # e.g., 1 through 5000 inclusive

    # get the jitter parameters
    if cpinj.has_option(injrun, "jitter-skyloc"):
        jitter_sigma_deg = cpinj.getfloat(injrun, "jitter-skyloc")
        cpinj.remove_option(injrun, "jitter-skyloc")
    else:
        jitter_sigma_deg = None

    # check if the specific Fermi systematic error needs to
    # be added to the location jittering
    if cpinj.has_option(injrun, "jitter-skyloc-fermi"):
        jitter_skyloc_fermi = cpinj.getboolean(injrun, "jitter-skyloc-fermi")
        cpinj.remove_option(injrun, "jitter-skyloc-fermi")
    else:
        jitter_skyloc_fermi = False

    # check if we should align the total angular momentum
    if cpinj.has_option(injrun, "align-total-spin"):
        align_total_spin = cpinj.getboolean(injrun, "align-total-spin")
        cpinj.remove_option(injrun, "align-total-spin")
    else:
        align_total_spin = False

    # set all the arguments
    argument = []
    for (opt, value) in cpinj.items(injrun):
        argument.append("--%s %s" % (opt, value))

    # add arguments on times and time-intervals
    interval = abs(injection_segment)
    injInterval = interval / numberInjections
    argument.append(" --gps-start-time %d" % injection_segment[0])
    argument.append(" --gps-end-time %d" % injection_segment[1])
    argument.append(" --time-interval %f" % injInterval)
    argument.append(" --time-step %f" % injInterval)
    argument.append(" --seed %d" % seed)
    argument.append(" --user-tag %s" % usertag)

    # set output file as exttrig-file or IPN file with IPN GPS time
    if ipn_gps:
        argument.append(" --ipn-gps-time %d" % ipn_gps)
    else:
        argument.append(" --exttrig-file %s" % source_file)

    # execute the command
    executable = cp.get("condor", "inspinj")
    arguments = " ".join(argument)
    inspiralutils.make_external_call(executable + " " + arguments,
                                     show_command=verbose)

    # recreate the output filename
    injFile = "HL-INJECTIONS_" + str(seed)
    if usertag is not None:
        injFile += "_" + usertag
    injFile += "-%d-%d.xml" % (injection_segment[0], abs(injection_segment))

    # move it into the GRB directory to avoid clutter
    new_injFile = hipe_dir + "/" + injFile
    os.rename(injFile, new_injFile)

    # jitter the sky locations of the injections
    if jitter_sigma_deg is not None:
        # rename the original, then have ligolw_cbc_jitter_skyloc create a new one
        os.rename(new_injFile, new_injFile + ".prejitter")
        cmd = ["ligolw_cbc_jitter_skyloc"]
        if jitter_skyloc_fermi:
            cmd.append("--apply-fermi-error")
        cmd.extend([
            "--jitter-sigma-deg",
            str(jitter_sigma_deg), "--output-file", new_injFile,
            new_injFile + ".prejitter"
        ])
        if verbose:
            print " ".join(cmd)
        p = subprocess.Popen(cmd,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
        out, err = p.communicate()
        if p.returncode != 0:
            raise subprocess.CalledProcessError(
                p.returncode, "%s: %s" % (" ".join(cmd), err))

    # rotate the binary so that total angular momentum has the current inclination
    if align_total_spin:
        # rename the original then have ligolw_cbc_align_total_spin create a new one
        os.rename(new_injFile, new_injFile + ".prealign")
        cmd = [
            "ligolw_cbc_align_total_spin", "--output-file", new_injFile,
            new_injFile + ".prealign"
        ]
        if verbose:
            print " ".join(cmd)
        p = subprocess.Popen(cmd,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
        out, err = p.communicate()
        if p.returncode != 0:
            raise subprocess.CalledProcessError(
                p.returncode, "%s: %s" % (" ".join(cmd), err))

    # read in the file and the tables
    doc = utils.load_filename(new_injFile,
                              contenthandler=lsctables.use_in(
                                  ligolw.LIGOLWContentHandler))
    sims = lsctables.SimInspiralTable.get_table(doc)

    return sims, injInterval, numberInjections, new_injFile
Example #19
0
class BashFeatureTesting(FeatureTestingBase):
    def setUp(self):
        # logging must be disabled else it calls time.time()
        # what will break these unit tests.
        logging.disable(logging.CRITICAL)
        with mock.patch('six.moves.builtins.open'):
            self.feature = feature.BashFeature(project_name=self._project_name,
                                               case_name=self._case_name)

    @mock.patch('subprocess.Popen')
    def test_run_no_cmd(self, mock_subproc):
        self.assertEqual(self.feature.run(), testcase.TestCase.EX_RUN_ERROR)
        mock_subproc.assert_not_called()

    @mock.patch('os.path.isdir', return_value=True)
    @mock.patch('subprocess.Popen',
                side_effect=subprocess.CalledProcessError(0, '', ''))
    def test_run_ko1(self, *args):
        with mock.patch('six.moves.builtins.open', mock.mock_open()) as mopen:
            self._test_run(testcase.TestCase.EX_RUN_ERROR)
        mopen.assert_called_once_with(self._output_file, "w")
        args[0].assert_called_once_with(self._cmd,
                                        shell=True,
                                        stderr=mock.ANY,
                                        stdout=mock.ANY)
        args[1].assert_called_once_with(self.feature.res_dir)

    @mock.patch('os.path.isdir', return_value=True)
    @mock.patch('subprocess.Popen')
    def test_run_ko2(self, *args):
        stream = six.BytesIO()
        stream.write(b"foo")
        stream.seek(0)
        attrs = {'return_value.stdout': stream, 'return_value.returncode': 1}
        args[0].configure_mock(**attrs)
        with mock.patch('six.moves.builtins.open', mock.mock_open()) as mopen:
            self._test_run(testcase.TestCase.EX_RUN_ERROR)
        self.assertIn(mock.call(self._output_file, 'w'), mopen.mock_calls)
        self.assertIn(mock.call(self._output_file, 'r'), mopen.mock_calls)
        args[0].assert_called_once_with(self._cmd,
                                        shell=True,
                                        stderr=mock.ANY,
                                        stdout=mock.ANY)
        args[1].assert_called_once_with(self.feature.res_dir)

    @mock.patch('os.path.isdir', return_value=True)
    @mock.patch('subprocess.Popen')
    def test_run1(self, *args):
        stream = six.BytesIO()
        stream.write(b"foo")
        stream.seek(0)
        attrs = {'return_value.stdout': stream, 'return_value.returncode': 0}
        args[0].configure_mock(**attrs)
        with mock.patch('six.moves.builtins.open', mock.mock_open()) as mopen:
            self._test_run(testcase.TestCase.EX_OK)
        self.assertIn(mock.call(self._output_file, 'w'), mopen.mock_calls)
        self.assertIn(mock.call(self._output_file, 'r'), mopen.mock_calls)
        args[0].assert_called_once_with(self._cmd,
                                        shell=True,
                                        stderr=mock.ANY,
                                        stdout=mock.ANY)
        args[1].assert_called_once_with(self.feature.res_dir)

    @mock.patch('os.path.isdir', return_value=True)
    @mock.patch('subprocess.Popen')
    def test_run2(self, *args):
        stream = six.BytesIO()
        stream.write(b"foo")
        stream.seek(0)
        attrs = {'return_value.stdout': stream, 'return_value.returncode': 0}
        args[0].configure_mock(**attrs)
        with mock.patch('six.moves.builtins.open', mock.mock_open()) as mopen:
            self._test_run_console(True, testcase.TestCase.EX_OK)
        self.assertIn(mock.call(self._output_file, 'w'), mopen.mock_calls)
        self.assertIn(mock.call(self._output_file, 'r'), mopen.mock_calls)
        args[0].assert_called_once_with(self._cmd,
                                        shell=True,
                                        stderr=mock.ANY,
                                        stdout=mock.ANY)
        args[1].assert_called_once_with(self.feature.res_dir)

    @mock.patch('os.path.isdir', return_value=True)
    @mock.patch('subprocess.Popen')
    def test_run3(self, *args):
        stream = six.BytesIO()
        stream.write(b"foo")
        stream.seek(0)
        attrs = {'return_value.stdout': stream, 'return_value.returncode': 0}
        args[0].configure_mock(**attrs)
        with mock.patch('six.moves.builtins.open', mock.mock_open()) as mopen:
            self._test_run_console(False, testcase.TestCase.EX_OK)
        self.assertIn(mock.call(self._output_file, 'w'), mopen.mock_calls)
        self.assertIn(mock.call(self._output_file, 'r'), mopen.mock_calls)
        args[0].assert_called_once_with(self._cmd,
                                        shell=True,
                                        stderr=mock.ANY,
                                        stdout=mock.ANY)
        args[1].assert_called_once_with(self.feature.res_dir)

    @mock.patch('os.makedirs')
    @mock.patch('os.path.isdir', return_value=False)
    @mock.patch('subprocess.Popen')
    def test_run4(self, *args):
        stream = six.BytesIO()
        stream.write(b"foo")
        stream.seek(0)
        attrs = {'return_value.stdout': stream, 'return_value.returncode': 0}
        args[0].configure_mock(**attrs)
        with mock.patch('six.moves.builtins.open', mock.mock_open()) as mopen:
            self._test_run_console(False, testcase.TestCase.EX_OK)
        self.assertIn(mock.call(self._output_file, 'w'), mopen.mock_calls)
        self.assertIn(mock.call(self._output_file, 'r'), mopen.mock_calls)
        args[0].assert_called_once_with(self._cmd,
                                        shell=True,
                                        stderr=mock.ANY,
                                        stdout=mock.ANY)
        args[1].assert_called_once_with(self.feature.res_dir)
        args[2].assert_called_once_with(self.feature.res_dir)
Example #20
0
 def command_ogr2ogr_fail(args, stdout, stderr, encoding):
     raise subprocess.CalledProcessError(1, 2)
Example #21
0
    print(timestamp())
    print('invoking{}{}: {}'.format(' async' if async else '',
                                    ' fail-ok' if fail_ok else '', invocation))
    print('in: ' + os.getcwd())
    try:
        if capture:
            if async:
                raise Exception(
                    "can't respect async and capture simultaneously")
            p = subprocess.Popen(invocation,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.STDOUT,
                                 shell=True)
            p.wait()
            r = (p.returncode, p.stdout.read().decode('utf-8'))
            if r[0]: raise subprocess.CalledProcessError(r[0], invocation)
        else:
            r = (subprocess.Popen if async else subprocess.check_call)(
                invocation, shell=True)
    except:
        if fail_ok: r = None
        else: raise
    os.chdir(start)
    return r


def retry(f):
    i = 0
    while True:
        try:
            return f()
Example #22
0
        cmdline = ['bash', '-c', source + cd + cmd]
        return cmdline


def runFoamCommand(cmdline, case=None):
    """ Run a command in the OpenFOAM environment and wait until finished. Return output.
        Also print output as we go.
        cmdline - The command line to run as a string
              e.g. transformPoints -scale "(0.001 0.001 0.001)"
        case - Case directory or path
    """
    proc = CfdSynchronousFoamProcess()
    exit_code = proc.run(cmdline, case)
    # Reproduce behaviour of failed subprocess run
    if exit_code:
        raise subprocess.CalledProcessError(exit_code, cmdline)
    return proc.output


class CfdSynchronousFoamProcess:
    def __init__(self):
        self.process = CfdConsoleProcess.CfdConsoleProcess(
            stdoutHook=self.readOutput, stderrHook=self.readOutput)
        self.output = ""

    def run(self, cmdline, case=None):
        print("Running ", cmdline)
        self.process.start(makeRunCommand(cmdline, case),
                           env_vars=getRunEnvironment())
        if not self.process.waitForFinished():
            raise Exception("Unable to run command " + cmdline)
Example #23
0
def run_cmd(cmd, return_output=False, output_file=None):
    """Print the cmd to stderr and execute using a subprocess.

    Args:
        cmd (str or list): Command to be executed using subprocess. Input of 
            type 'list' is recommended. When input is of type 'string',
            command is executed using /bin/bash and shell=True is specified.
        return_output (bool): If command output should be returned.
        output_file (str): Filename of the file command std output should be redirected to.

    Note:
        return_output and output_file inputs are mutually exclusive.
        If output_file exists, it will be overwritten.

    Returns:
        Before executing, 'cmd' input is pretty-printed to stderr.
        If return_output is provided, the output string is returned.
        If output_file is provided, stdout is redirected to the specified file
        and the function returns None.

    Raises:
        CalledProcessError may be raised if the command failed to execute.

    Examples:
        run_cmd(['echo', 'hello world'], output_file='test1.txt')

    """
    if type(cmd) is list:
        shell = False
        executable = None
        if output_file:
            print_cmd = cmd + ['>', output_file]
        else:
            print_cmd = cmd
        print(subprocess.list2cmdline(print_cmd), file=sys.stderr)
    else:
        shell = True
        executable = '/bin/bash'
        if output_file:
            print_cmd = cmd + '>' + output_file
        else:
            print_cmd = cmd
        print(print_cmd, file=sys.stderr)

    # run the command in an error aware way
    process = subprocess.Popen(cmd, shell=shell, stdout=subprocess.PIPE)
    output, err = process.communicate()
    retcode = process.poll()

    # this prints full traceback to logs if an error occurs
    if retcode:
        sys.stdout.write(output)
        sys.stderr.write(err)
        raise subprocess.CalledProcessError(retcode, cmd, output=output)

    if return_output:
        print(output)
        return output.strip()
    elif output_file:
        with open(output_file, 'w') as fopen:
            fopen.write(output)
Example #24
0
 def check_call_effect(*args, **kwargs):
     if "xz" in args[0]:
         raise subprocess.CalledProcessError(1, args)
Example #25
0
 def run(args, **kwargs):
     if args[0:2] == ["wstool", "init"]:
         raise subprocess.CalledProcessError(1, "foo", b"bar", b"baz")
Example #26
0
 def check_call_effect(*args, **kwargs):
     if "unsquashfs" not in args[0]:
         raise subprocess.CalledProcessError(1, args)
 def subprocess_check_output_mock(*unused):
     if self._subprocess_check_output_code != 0:
         raise subprocess.CalledProcessError(None, None)
     return 0
Example #28
0
    def run_tests(self, install_dirs, schedule_type, owner, test_filter,
                  retry: int, no_testpilot) -> None:
        env = self._compute_env(install_dirs)
        ctest = path_search(env, "ctest")
        cmake = path_search(env, "cmake")

        # On Windows, we also need to update $PATH to include the directories that
        # contain runtime library dependencies.  This is not needed on other platforms
        # since CMake will emit RPATH properly in the binary so they can find these
        # dependencies.
        if self.build_opts.is_windows():
            path_entries = self.get_dev_run_extra_path_dirs(install_dirs)
            path = env.get("PATH")
            if path:
                path_entries.insert(0, path)
            env["PATH"] = ";".join(path_entries)

        # Don't use the cmd_prefix when running tests.  This is vcvarsall.bat on
        # Windows.  vcvarsall.bat is only needed for the build, not tests.  It
        # unfortunately fails if invoked with a long PATH environment variable when
        # running the tests.
        use_cmd_prefix = False

        def get_property(test, propname, defval=None):
            """extracts a named property from a cmake test info json blob.
            The properties look like:
            [{"name": "WORKING_DIRECTORY"},
             {"value": "something"}]
            We assume that it is invalid for the same named property to be
            listed more than once.
            """
            props = test.get("properties", [])
            for p in props:
                if p.get("name", None) == propname:
                    return p.get("value", defval)
            return defval

        def list_tests():
            output = subprocess.check_output([ctest, "--show-only=json-v1"],
                                             env=env,
                                             cwd=self.build_dir)
            try:
                data = json.loads(output.decode("utf-8"))
            except ValueError as exc:
                raise Exception(
                    "Failed to decode cmake test info using %s: %s.  Output was: %r"
                    % (ctest, str(exc), output))

            tests = []
            machine_suffix = self.build_opts.host_type.as_tuple_string()
            for test in data["tests"]:
                working_dir = get_property(test, "WORKING_DIRECTORY")
                labels = []
                machine_suffix = self.build_opts.host_type.as_tuple_string()
                labels.append("tpx-fb-test-type=3")
                labels.append("tpx_test_config::buildsystem=getdeps")
                labels.append(
                    "tpx_test_config::platform={}".format(machine_suffix))

                if get_property(test, "DISABLED"):
                    labels.append("disabled")
                command = test["command"]
                if working_dir:
                    command = [cmake, "-E", "chdir", working_dir] + command

                import os

                tests.append({
                    "type":
                    "custom",
                    "target":
                    "%s-%s-getdeps-%s" %
                    (self.manifest.name, test["name"], machine_suffix),
                    "command":
                    command,
                    "labels":
                    labels,
                    "env": {},
                    "required_paths": [],
                    "contacts": [],
                    "cwd":
                    os.getcwd(),
                })
            return tests

        if schedule_type == "continuous" or schedule_type == "testwarden":
            # for continuous and testwarden runs, disabling retry can give up
            # better signals for flaky tests.
            retry = 0

        testpilot = path_search(env, "testpilot")
        tpx = path_search(env, "tpx")
        if (tpx or testpilot) and not no_testpilot:
            buck_test_info = list_tests()
            import os

            buck_test_info_name = os.path.join(self.build_dir,
                                               ".buck-test-info.json")
            with open(buck_test_info_name, "w") as f:
                json.dump(buck_test_info, f)

            env.set("http_proxy", "")
            env.set("https_proxy", "")
            runs = []
            from sys import platform

            if platform == "win32":
                machine_suffix = self.build_opts.host_type.as_tuple_string()
                testpilot_args = [
                    "parexec-testinfra.exe",
                    "C:/tools/testpilot/sc_testpilot.par",
                    # Need to force the repo type otherwise testpilot on windows
                    # can be confused (presumably sparse profile related)
                    "--force-repo",
                    "fbcode",
                    "--force-repo-root",
                    self.build_opts.fbsource_dir,
                    "--buck-test-info",
                    buck_test_info_name,
                    "--retry=%d" % retry,
                    "-j=%s" % str(self.num_jobs),
                    "--test-config",
                    "platform=%s" % machine_suffix,
                    "buildsystem=getdeps",
                    "--return-nonzero-on-failures",
                ]
            else:
                testpilot_args = [
                    tpx,
                    "--force-local-execution",
                    "--buck-test-info",
                    buck_test_info_name,
                    "--retry=%d" % retry,
                    "-j=%s" % str(self.num_jobs),
                    "--print-long-results",
                ]

            if owner:
                testpilot_args += ["--contacts", owner]

            if tpx and env:
                testpilot_args.append("--env")
                testpilot_args.extend(f"{key}={val}"
                                      for key, val in env.items())

            if test_filter:
                testpilot_args += ["--", test_filter]

            if schedule_type == "continuous":
                runs.append([
                    "--tag-new-tests",
                    "--collection",
                    "oss-continuous",
                    "--purpose",
                    "continuous",
                ])
            elif schedule_type == "testwarden":
                # One run to assess new tests
                runs.append([
                    "--tag-new-tests",
                    "--collection",
                    "oss-new-test-stress",
                    "--stress-runs",
                    "10",
                    "--purpose",
                    "stress-run-new-test",
                ])
                # And another for existing tests
                runs.append([
                    "--tag-new-tests",
                    "--collection",
                    "oss-existing-test-stress",
                    "--stress-runs",
                    "10",
                    "--purpose",
                    "stress-run",
                ])
            else:
                runs.append(["--collection", "oss-diff", "--purpose", "diff"])

            for run in runs:
                self._run_cmd(
                    testpilot_args + run,
                    cwd=self.build_opts.fbcode_builder_dir,
                    env=env,
                    use_cmd_prefix=use_cmd_prefix,
                )
        else:
            args = [ctest, "--output-on-failure", "-j", str(self.num_jobs)]
            if test_filter:
                args += ["-R", test_filter]

            count = 0
            while count <= retry:
                retcode = self._run_cmd(args,
                                        env=env,
                                        use_cmd_prefix=use_cmd_prefix,
                                        allow_fail=True)

                if retcode == 0:
                    break
                if count == 0:
                    # Only add this option in the second run.
                    args += ["--rerun-failed"]
                count += 1
            # pyre-fixme[61]: `retcode` is undefined, or not always defined.
            if retcode != 0:
                # Allow except clause in getdeps.main to catch and exit gracefully
                # This allows non-testpilot runs to fail through the same logic as failed testpilot runs, which may become handy in case if post test processing is needed in the future
                # pyre-fixme[61]: `retcode` is undefined, or not always defined.
                raise subprocess.CalledProcessError(retcode, args)
Example #29
0
    def go(self):
        """
        Contains functionality of the vdsmaker
        """
        super(vdsmaker, self).go()
        # **********************************************************************
        # 1. Load data from disk create output files
        args = self.inputs['args']
        self.logger.debug("Loading input-data mapfile: %s" % args[0])
        data = DataMap.load(args[0])

        # Skip items in `data` that have 'skip' set to True
        data.iterator = DataMap.SkipIterator

        # Create output vds names
        vdsnames = [
            os.path.join(self.inputs['directory'],
                         os.path.basename(item.file) + '.vds') for item in data
        ]

        # *********************************************************************
        # 2. Call vdsmaker
        command = "python %s" % (self.__file__.replace('master', 'nodes'))
        jobs = []
        for inp, vdsfile in zip(data, vdsnames):
            jobs.append(
                ComputeJob(inp.host,
                           command,
                           arguments=[
                               inp.file,
                               self.config.get('cluster', 'clusterdesc'),
                               vdsfile, self.inputs['makevds']
                           ]))
        self._schedule_jobs(jobs, max_per_node=self.inputs['nproc'])
        vdsnames = [
            vds for vds, job in zip(vdsnames, jobs)
            if job.results['returncode'] == 0
        ]
        if not vdsnames:
            self.logger.error("All makevds processes failed. Bailing out!")
            return 1

        # *********************************************************************
        # 3. Combine VDS files to produce GDS
        failure = False
        self.logger.info("Combining VDS files")
        executable = self.inputs['combinevds']
        gvds_out = self.inputs['gvds']
        # Create the gvds directory for output files, needed for combine
        create_directory(os.path.dirname(gvds_out))

        try:
            command = [executable, gvds_out] + vdsnames
            combineproc = subprocess.Popen(command,
                                           close_fds=True,
                                           stdout=subprocess.PIPE,
                                           stderr=subprocess.PIPE)
            sout, serr = combineproc.communicate()
            log_process_output(executable, sout, serr, self.logger)
            if combineproc.returncode != 0:
                raise subprocess.CalledProcessError(combineproc.returncode,
                                                    command)
            self.outputs['gvds'] = gvds_out
            self.logger.info("Wrote combined VDS file: %s" % gvds_out)
        except subprocess.CalledProcessError, cpe:
            self.logger.exception("combinevds failed with status %d: %s" %
                                  (cpe.returncode, serr))
            failure = True
    def _exec(self,
              timeout=None,
              env=None,
              update_env=None,
              stdin=None,
              cwd=None,
              raise_on_error=False):

        self._prep()

        if update_env:
            env = env if env is not None else os.environ.copy()
            env.update(self._env)
            env.update(update_env)

        proc = subprocess.Popen(self._cmd,
                                env=env,
                                stdin=stdin,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                universal_newlines=True,
                                bufsize=0,
                                cwd=cwd,
                                preexec_fn=os.setsid)

        start_time = time.time()

        logger.debug('Running command "{cmd}"'.format(cmd=''.join(self._cmd)))
        try:
            self.stdout, self.stderr = proc.communicate(timeout=timeout)
            self.rc = proc.returncode

        except subprocess.TimeoutExpired:
            # kill the whole process group - the process and its children
            os.killpg(os.getpgid(proc.pid), signal.SIGKILL)

            self.stdout, self.stderr = proc.communicate()
            self.rc = 1
            self.expired = True

            logger.error(
                'Command "{cmd}" timed out after {t} seconds: {stderr}'.format(
                    cmd=' '.join(self._cmd), t=timeout, stderr=self.stderr))

        finally:
            self.duration = (time.time() - start_time)
            self._cleanup()

            if not self.rc:
                # success
                logger.debug('Command {cmd} succeeded in {t} seconds'.format(
                    cmd=self._cmd, t=self.duration))
            else:
                # failure
                # only log non-zero return code error here as timeout errors are logged elsewhere
                if not self.expired:
                    logger.error(
                        'Command "{cmd}" returned {rc}: {stderr}'.format(
                            cmd=' '.join(self._cmd),
                            rc=self.rc,
                            stderr=self.stderr))

                if raise_on_error:
                    raise subprocess.CalledProcessError(
                        self.rc, self._cmd, self.stderr)
                return False

        return True