Exemplo n.º 1
0
def run_python_until_end(*args, **env_vars):
    env_required = interpreter_requires_environment()
    if '__isolated' in env_vars:
        isolated = env_vars.pop('__isolated')
    else:
        isolated = not env_vars and not env_required
    cmd_line = [sys.executable, '-X', 'faulthandler']
    if isolated:
        cmd_line.append('-I')
    elif not env_vars and not env_required:
        cmd_line.append('-E')
    if env_vars.pop('__cleanenv', None):
        env = {}
        if sys.platform == 'win32':
            env['SYSTEMROOT'] = os.environ['SYSTEMROOT']
    else:
        env = os.environ.copy()
    if 'TERM' not in env_vars:
        env['TERM'] = ''
    env.update(env_vars)
    cmd_line.extend(args)
    proc = subprocess.Popen(cmd_line,
                            stdin=subprocess.PIPE,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE,
                            env=env)
    with proc:
        try:
            out, err = proc.communicate()
        finally:
            proc.kill()
            subprocess._cleanup()
    rc = proc.returncode
    err = strip_python_stderr(err)
    return _PythonRunResult(rc, out, err), cmd_line
Exemplo n.º 2
0
    def analyze(self, **kwargs):
        """Add information about license

        :param file_path: file path

        :returns result: dict of the results of the analysis
        """
        result = {'licenses': []}
        file_path = kwargs['file_path']

        try:
            msg = subprocess.check_output(
                [self.exec_path, '--json-pp', '-', '--license',
                 file_path]).decode("utf-8")
        except subprocess.CalledProcessError as e:
            raise GraalError(cause="Scancode failed at %s, %s" %
                             (file_path, e.output.decode("utf-8")))
        finally:
            subprocess._cleanup()

        licenses_raw = json.loads(msg)
        if 'files' not in licenses_raw:
            return result

        result['licenses'] = licenses_raw['files'][0]['licenses']

        return result
Exemplo n.º 3
0
def run_python_until_end(*args, **env_vars):
    cmd_line = [sys.executable, '-X', 'faulthandler']
    # Need to preserve the original environment, for in-place testing of
    # shared library builds.
    env = os.environ.copy()
    # set TERM='' unless the TERM environment variable is passed explicitly
    # see issues #11390 and #18300
    if 'TERM' not in env_vars:
        env['TERM'] = ''
    # But a special flag that can be set to override -- in this case, the
    # caller is responsible to pass the full environment.
    if env_vars.pop('__cleanenv', None):
        env = {}
    env.update(env_vars)
    cmd_line.extend(args)
    p = subprocess.Popen(cmd_line,
                         stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE,
                         env=env)
    try:
        out, err = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    err = strip_python_stderr(err)
    return _PythonRunResult(rc, out, err), cmd_line
Exemplo n.º 4
0
def run_python_until_end(*args, **env_vars):
    env_required = _interpreter_requires_environment()
    if '__isolated' in env_vars:
        isolated = env_vars.pop('__isolated')
    else:
        isolated = not env_vars and not env_required
    cmd_line = [sys.executable, '-X', 'faulthandler']
    if isolated:
        # isolated mode: ignore Python environment variables, ignore user
        # site-packages, and don't add the current directory to sys.path
        cmd_line.append('-I')
    elif not env_vars and not env_required:
        # ignore Python environment variables
        cmd_line.append('-E')
    # Need to preserve the original environment, for in-place testing of
    # shared library builds.
    env = os.environ.copy()
    # But a special flag that can be set to override -- in this case, the
    # caller is responsible to pass the full environment.
    if env_vars.pop('__cleanenv', None):
        env = {}
    env.update(env_vars)
    cmd_line.extend(args)
    p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                         env=env)
    try:
        out, err = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    err = strip_python_stderr(err)
    return _PythonRunResult(rc, out, err), cmd_line
Exemplo n.º 5
0
    def analyze(self, **kwargs):
        """Get Jadolint results for a Dockerfile.

        :param file_path: file path
        :param result: dict of the results of the analysis
        """
        results = []
        result = {self.analysis: results}
        file_path = kwargs['file_path']

        if self.analysis == DEPENDENCIES:
            cmd = ['java', '-jar', self.exec_path, file_path, '--deps']
        else:
            cmd = ['java', '-jar', self.exec_path, file_path, '--smells']

        try:
            msg = subprocess.check_output(cmd).decode("utf-8")
        except subprocess.CalledProcessError as e:
            raise GraalError(cause="Jadolint failed at %s, %s" %
                             (file_path, e.output.decode("utf-8")))
        finally:
            subprocess._cleanup()

        results_raw = msg.split('\n')
        for res_raw in results_raw:
            res = res_raw.strip()
            if res:
                results.append(res)

        return result
Exemplo n.º 6
0
    def process(self, the_queue):
        if self.threads < 2:
            worker(self, the_queue)
        else:
            if sys.version_info < (2, 6):
                # work around a race condition in subprocess
                _old_subprocess_cleanup = subprocess._cleanup

                def _cleanup():
                    pass

                subprocess._cleanup = _cleanup

            threads = []

            for i in range(self.threads):
                thread = threading.Thread(target=worker,
                                          args=(self, the_queue))
                thread.start()
                threads.append(thread)
            for thread in threads:
                thread.join()
            if sys.version_info < (2, 6):
                subprocess._cleanup = _old_subprocess_cleanup
                subprocess._cleanup()

        if self.errors:
            logger.error("There have been errors, see messages above.")
            sys.exit(1)
def commandExecutionCygwin2(command):
    """
    :param command: pass the command line statement
    :return: returns the output of command being supplied
    """
    log = cl.custom_logger(logging.DEBUG)

    try:
        log.info("executing command....")
        print(command)

        with Popen([r'C:\cygwin64\bin\bash.exe'],
                   stdin=PIPE,
                   stdout=PIPE,
                   shell=True) as p:
            p.stdin.write(command.encode())
            p.stdin.close()
            out = p.stdout
            output = out.read().splitlines()

    except:
        output = ""
        print("error :" + str(p.stderr.errors))
        log.error("error with executing command..")
    finally:
        p.terminate()
        p.kill()
        subprocess._cleanup()
    return output
Exemplo n.º 8
0
def do_task(**post_data):
    callback = post_data.get("callback_url", callback_url)
    acceptkey = post_data.get("accept_key", accept_key)
    task_id = post_data.get("task_id", 0)
    playbook = post_data.get("playbook", "")
    extra_vars = post_data.get("extra_vars", "")
    hosts = post_data.get("hosts", "127.0.0.1,")
    p = Popen(
        "/usr/bin/ansible-playbook -i %s  %s --extra-vars='%s' -s" % (hosts, playbook, extra_vars),
        shell=True,
        stdout=PIPE,
        stderr=PIPE,
    )
    try:
        stdout, stderr = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode

    mylggr.debug(
        "task id  %d in hosts %s playbook %s return stdout %s ,stderr %s!" % (task_id, hosts, playbook, stdout, stderr)
    )
    return {
        "task_id": task_id,
        "callback_url": callback,
        "accept_key": acceptkey,
        "hosts": hosts,
        "playbook": playbook,
        "stdout": stdout,
        "stderr": stderr,
        "returncode": rc,
    }
def run_python_until_end(*args, **env_vars):
    env_required = interpreter_requires_environment()
    if '__isolated' in env_vars:
        isolated = env_vars.pop('__isolated')
    else:
        isolated = not env_vars and not env_required
    cmd_line = [sys.executable, '-X', 'faulthandler']
    if isolated:
        # isolated mode: ignore Python environment variables, ignore user
        # site-packages, and don't add the current directory to sys.path
        cmd_line.append('-I')
    elif not env_vars and not env_required:
        # ignore Python environment variables
        cmd_line.append('-E')
    # Need to preserve the original environment, for in-place testing of
    # shared library builds.
    env = os.environ.copy()
    # But a special flag that can be set to override -- in this case, the
    # caller is responsible to pass the full environment.
    if env_vars.pop('__cleanenv', None):
        env = {}
    env.update(env_vars)
    cmd_line.extend(args)
    p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                         env=env)
    try:
        out, err = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    err = strip_python_stderr(err)
    return _PythonRunResult(rc, out, err), cmd_line
Exemplo n.º 10
0
    def process(self, the_queue):
        if self.threads < 2:
            worker(self, the_queue)
        else:
            if sys.version_info < (2, 6):
                # work around a race condition in subprocess
                _old_subprocess_cleanup = subprocess._cleanup

                def _cleanup():
                    pass

                subprocess._cleanup = _cleanup

            threads = []

            for i in range(self.threads):
                thread = threading.Thread(target=worker, args=(self, the_queue))
                thread.start()
                threads.append(thread)
            for thread in threads:
                thread.join()
            if sys.version_info < (2, 6):
                subprocess._cleanup = _old_subprocess_cleanup
                subprocess._cleanup()

        if self.errors:
            logger.error("There have been errors, see messages above.")
            sys.exit(1)
Exemplo n.º 11
0
    def __analyze_scancode(self, file_path):
        """Add information about license and copyright using scancode

        :param file_path: file path (in case of scancode)
        """
        result = {
            'licenses': [],
            'copyrights': [],
        }
        try:
            msg = subprocess.check_output([
                self.exec_path, '--json-pp', '-', '--license', '--copyright',
                file_path
            ]).decode("utf-8")
        except subprocess.CalledProcessError as e:
            raise GraalError(cause="Scancode failed at %s, %s" %
                             (file_path, e.output.decode("utf-8")))
        finally:
            subprocess._cleanup()

        scancode_raw = json.loads(msg)
        if 'files' in scancode_raw:
            result['licenses'] = scancode_raw['files'][0]['licenses']
            result['copyrights'] = scancode_raw['files'][0]['copyrights']

        return result
Exemplo n.º 12
0
def _assert_python(expected_success, *args, **env_vars):
    cmd_line = [sys.executable]
    if not env_vars:
        cmd_line.append('-E')
    # Need to preserve the original environment, for in-place testing of
    # shared library builds.
    env = os.environ.copy()
    # But a special flag that can be set to override -- in this case, the
    # caller is responsible to pass the full environment.
    if env_vars.pop('__cleanenv', None):
        env = {}
    env.update(env_vars)
    cmd_line.extend(args)
    p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                         env=env)
    try:
        out, err = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    err =  strip_python_stderr(err)
    if (rc and expected_success) or (not rc and not expected_success):
        raise AssertionError(
            "Process return code is %d, "
            "stderr follows:\n%s" % (rc, err.decode('ascii', 'ignore')))
    return rc, out, err
Exemplo n.º 13
0
    def analyze(self, **kwargs):
        """Add information about license

        :param file_path: file path

        :returns result: dict of the results of the analysis
        """
        result = {'licenses': []}
        file_path = kwargs['file_path']

        try:
            msg = subprocess.check_output([self.exec_path,
                                           file_path]).decode("utf-8")
        except subprocess.CalledProcessError as e:
            raise GraalError(cause="Nomos failed at %s, %s" %
                             (file_path, e.output.decode("utf-8")))
        finally:
            subprocess._cleanup()

        licenses_raw = re.findall(self.search_pattern, msg)
        licenses = []
        for license_raw in licenses_raw:
            license_digested = license_raw.split("license(s)")[1].strip()
            licenses.append(license_digested)

        if licenses:
            result['licenses'] = licenses

        return result
Exemplo n.º 14
0
def _assert_python(expected_success, *args, **env_vars):
    cmd_line = [sys.executable]
    if not env_vars:
        cmd_line.append('-E')
    cmd_line.extend(args)
    # Need to preserve the original environment, for in-place testing of
    # shared library builds.
    env = os.environ.copy()
    env.update(env_vars)
    p = subprocess.Popen(cmd_line,
                         stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE,
                         env=env)
    try:
        out, err = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    err = strip_python_stderr(err)
    if (rc and expected_success) or (not rc and not expected_success):
        raise AssertionError("Process return code is %d, "
                             "stderr follows:\n%s" %
                             (rc, err.decode('ascii', 'ignore')))
    return rc, out, err
Exemplo n.º 15
0
def gen(project_name, database_host, database_user,database,database_passwd='',table='',database_port=3306):
    #gen config
    with open('configs/db.py','wb+') as f:
        f.write("db={ 'host':"+database_host+
                ',port:'+database_port+
                ',user:'******',password:'******',database:'+database+
                "}")

    #gen model
    p = subprocess.Popen('python -m pwiz -e mysql -u%s -H%s -P%s -p%d %s -t %s >./models/%s.py'%
                     (database_user,database_host,database_passwd,database_port,database,table,database+table),
                     shell=True,stderr=subprocess.PIPE,stdout=subprocess.PIPE)

    try:
        stdout,stderr = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    
    if rc !=0 :
        print "gen model error %s" % stderr
        sys.exit(1)
    #gen template

    #gen controller

    #copy
    for item in dirs:
        shutil.copy(item, os.path.join(project_name, item))
def _assert_python(expected_success, *args, **env_vars):
    cmd_line = [sys.executable]
    if not env_vars:
        cmd_line.append('-E')
    # Need to preserve the original environment, for in-place testing of
    # shared library builds.
    env = os.environ.copy()
    # But a special flag that can be set to override -- in this case, the
    # caller is responsible to pass the full environment.
    if env_vars.pop('__cleanenv', None):
        env = {}
    env.update(env_vars)
    cmd_line.extend(args)
    p = subprocess.Popen(cmd_line,
                         stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE,
                         env=env)
    try:
        out, err = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    err = strip_python_stderr(err)
    if (rc and expected_success) or (not rc and not expected_success):
        raise AssertionError("Process return code is %d, "
                             "stderr follows:\n%s" %
                             (rc, err.decode('ascii', 'ignore')))
    return rc, out, err
    def run(self):
        currentDir = os.getcwd()

        try:
            os.chdir(self.pathToFXApp)
            process = subprocess.Popen([shlex.split(self.cmdFXAPP), self.cmdFXArg], shell=True, stdout=subprocess.PIPE)
            self.lgr.info('Exteral application started')
        except Exception as e:
            self.lgr.error('Critical: Cannot execute fluid explorer app. Details: %s', e.message)
            self.emit(QtCore.SIGNAL('update(QString)'), "ERROR")
            return

        finally:
            os.chdir(currentDir)
            subprocess._cleanup()

        while self.running:

            output = process.stdout.readline()
            if output.startswith(self.SEARCH_PATTERN_CMD):
                self.lgr.info('Received event from fluid explorer app')

            if output == '' and process.poll() is not None:
                break
            if output:
                self.lgr.info(output.strip())
                #print output.strip()

        rc = process.poll()
        return rc
Exemplo n.º 18
0
    def analyze(self, **kwargs):
        """Add information using SCC

        :param file_path: file path
        :param repository_level: set to True if analysis has to be performed on a repository
        :returns result: dict of the results of the analysis
        """
        repository_level = kwargs.get('repository_level', False)

        if repository_level:
            file_path = kwargs['repository_path']
        else:
            file_path = kwargs['file_path']

        try:
            scc_command = ['scc', file_path]
            message = subprocess.check_output(scc_command).decode("utf-8")
        except subprocess.CalledProcessError as e:
            message = e.output.decode("utf-8")
        finally:
            subprocess._cleanup()

        if repository_level:
            results = self.__analyze_repository(message)
        else:
            results = self.__analyze_file(message)
            results['ext'] = GraalRepository.extension(file_path)

        return results
Exemplo n.º 19
0
def do_task(**post_data):
    callback = post_data.get('callback_url')
    acceptkey = post_data.get('accept_key')
    task_id = post_data.get('task_id')
    playbook = post_data.get('playbook')
    extra_vars = post_data.get('extra_vars')
    hosts = post_data.get('hosts')
    p = Popen(
        "/usr/bin/ansible-playbook -i %s  %s --extra-vars='%s' -s" %
        (hosts, playbook, extra_vars),
        shell=True,
        stdout=PIPE,
        stderr=PIPE)
    try:
        stdout, stderr = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode

    log_debug(
        'task id  %d in hosts %s playbook %s return stdout %s ,stderr %s!' %
        (task_id, hosts, playbook, stdout, stderr))
    return {
        'task_id': task_id,
        'callback_url': callback,
        'accept_key': acceptkey,
        'hosts': hosts,
        'playbook': playbook,
        'stdout': stdout,
        'stderr': stderr,
        'returncode': rc
    }
Exemplo n.º 20
0
    def analyze(self, **kwargs):
        """Get a UML class diagrams from a Python project.

        :param module_path: module path
        :param result: dict of the results of the analysis
        """
        result = {}
        module_path = kwargs['module_path']

        try:
            subprocess.check_output(['pyreverse', module_path]).decode("utf-8")
        except subprocess.CalledProcessError as e:
            raise GraalError(cause="Pyreverse failed at %s, %s" % (module_path, e.output.decode("utf-8")))
        finally:
            subprocess._cleanup()

        class_diagram = os.path.join(self.tmp_path, CLASSES_FILE_NAME)
        if os.path.exists(class_diagram):
            graph_classes = self.__dotfile2json(class_diagram)
            result['classes'] = graph_classes

        package_diagram = os.path.join(self.tmp_path, PACKAGES_FILE_NAME)
        if os.path.exists(package_diagram):
            graph_packages = self.__dotfile2json(package_diagram)
            result['packages'] = graph_packages

        return result
Exemplo n.º 21
0
    def analyze(self, **kwargs):
        """Add information using CLOC

        :param file_path: file path
        :param repository_level: set to True if analysis has to be performed on a repository

        :returns result: dict of the results of the analysis
        """

        file_path = kwargs['file_path']
        repository_level = kwargs.get('repository_level', False)

        try:
            message = subprocess.check_output(['cloc',
                                               file_path]).decode("utf-8")
        except subprocess.CalledProcessError as e:
            raise GraalError(cause="Cloc failed at %s, %s" %
                             (file_path, e.output.decode("utf-8")))
        finally:
            subprocess._cleanup()

        if repository_level:
            results = self.__analyze_repository(message)
        else:
            results = self.__analyze_file(message)
            results['ext'] = GraalRepository.extension(file_path)

        return results
Exemplo n.º 22
0
def do_task(**post_data):
    callback = post_data.get('callback_url')
    acceptkey = post_data.get('accept_key')
    task_id = post_data.get('task_id')
    playbook = post_data.get('playbook')
    extra_vars = post_data.get('extra_vars')
    hosts = post_data.get('hosts')
    p = Popen(
        "/usr/bin/ansible-playbook -i %s  %s --extra-vars='%s' -s" %
        (hosts, playbook, extra_vars),
        shell=True,
        stdout=PIPE,
        stderr=PIPE)
    try:
        stdout, stderr = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode

    log_debug(
        'task id  %d in hosts %s playbook %s return stdout %s ,stderr %s!' %
        (task_id, hosts, playbook, stdout, stderr))
    return {'task_id': task_id,
            'callback_url': callback,
            'accept_key': acceptkey,
            'hosts': hosts,
            'playbook': playbook,
            'stdout': stdout,
            'stderr': stderr,
            'returncode': rc}
Exemplo n.º 23
0
def _assert_python(expected_success, *args, **env_vars):
    cmd_line = [sys.executable]
    if not env_vars:
        cmd_line.append('-E')
    cmd_line.extend(args)
    # Need to preserve the original environment, for in-place testing of
    # shared library builds.
    env = os.environ.copy()
    env.update(env_vars)
    p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                         env=env)
    try:
        out, err = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    err =  strip_python_stderr(err)
    if (rc and expected_success) or (not rc and not expected_success):
        raise AssertionError(
            "Process return code is %d, "
            "stderr follows:\n%s" % (rc, err.decode('ascii', 'ignore')))
    return rc, out, err
Exemplo n.º 24
0
def _assert_python(expected_success, *args, **env_vars):
    env_required = interpreter_requires_environment()
    if '__isolated' in env_vars:
        isolated = env_vars.pop('__isolated')
    else:
        isolated = not env_vars and not env_required
    cmd_line = [sys.executable, '-X', 'faulthandler']
    if isolated:
        # isolated mode: ignore Python environment variables, ignore user
        # site-packages, and don't add the current directory to sys.path
        cmd_line.append('-I')
    elif not env_vars and not env_required:
        # ignore Python environment variables
        cmd_line.append('-E')
    # Need to preserve the original environment, for in-place testing of
    # shared library builds.
    env = os.environ.copy()
    # But a special flag that can be set to override -- in this case, the
    # caller is responsible to pass the full environment.
    if env_vars.pop('__cleanenv', None):
        env = {}
    env.update(env_vars)
    cmd_line.extend(args)
    p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                         env=env)
    try:
        out, err = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    err = strip_python_stderr(err)
    if (rc and expected_success) or (not rc and not expected_success):
        # Limit to 80 lines to ASCII characters
        maxlen = 80 * 100
        if len(out) > maxlen:
            out = b'(... truncated stdout ...)' + out[-maxlen:]
        if len(err) > maxlen:
            err = b'(... truncated stderr ...)' + err[-maxlen:]
        out = out.decode('ascii', 'replace').rstrip()
        err = err.decode('ascii', 'replace').rstrip()
        raise AssertionError("Process return code is %d\n"
                             "command line: %r\n"
                             "\n"
                             "stdout:\n"
                             "---\n"
                             "%s\n"
                             "---\n"
                             "\n"
                             "stderr:\n"
                             "---\n"
                             "%s\n"
                             "---"
                             % (rc, cmd_line,
                                out,
                                err))
    return rc, out, err
Exemplo n.º 25
0
def kill_python(p):
    """Run the given Popen process until completion and return stdout."""
    p.stdin.close()
    data = p.stdout.read()
    p.stdout.close()
    p.wait()
    subprocess._cleanup()
    return data
Exemplo n.º 26
0
def _kill_python(p):
    p.stdin.close()
    data = p.stdout.read()
    p.stdout.close()
    # try to cleanup the child so we don't appear to leak when running
    # with regrtest -R.  This should be a no-op on Windows.
    subprocess._cleanup()
    return data
Exemplo n.º 27
0
def _kill_python(p):
    p.stdin.close()
    data = p.stdout.read()
    p.stdout.close()
    # try to cleanup the child so we don't appear to leak when running
    # with regrtest -R.  This should be a no-op on Windows.
    subprocess._cleanup()
    return data
Exemplo n.º 28
0
def _assert_python(expected_success, *args, **env_vars):
    env_required = interpreter_requires_environment()
    if '__isolated' in env_vars:
        isolated = env_vars.pop('__isolated')
    else:
        isolated = not env_vars and not env_required
    cmd_line = [sys.executable, '-X', 'faulthandler']
    if isolated:
        # isolated mode: ignore Python environment variables, ignore user
        # site-packages, and don't add the current directory to sys.path
        cmd_line.append('-I')
    elif not env_vars and not env_required:
        # ignore Python environment variables
        cmd_line.append('-E')
    # Need to preserve the original environment, for in-place testing of
    # shared library builds.
    env = os.environ.copy()
    # But a special flag that can be set to override -- in this case, the
    # caller is responsible to pass the full environment.
    if env_vars.pop('__cleanenv', None):
        env = {}
    env.update(env_vars)
    cmd_line.extend(args)
    p = subprocess.Popen(cmd_line,
                         stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE,
                         env=env)
    try:
        out, err = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    err = strip_python_stderr(err)
    if (rc and expected_success) or (not rc and not expected_success):
        # Limit to 80 lines to ASCII characters
        maxlen = 80 * 100
        if len(out) > maxlen:
            out = b'(... truncated stdout ...)' + out[-maxlen:]
        if len(err) > maxlen:
            err = b'(... truncated stderr ...)' + err[-maxlen:]
        out = out.decode('ascii', 'replace').rstrip()
        err = err.decode('ascii', 'replace').rstrip()
        raise AssertionError("Process return code is %d\n"
                             "command line: %r\n"
                             "\n"
                             "stdout:\n"
                             "---\n"
                             "%s\n"
                             "---\n"
                             "\n"
                             "stderr:\n"
                             "---\n"
                             "%s\n"
                             "---" % (rc, cmd_line, out, err))
    return rc, out, err
Exemplo n.º 29
0
 def test_extract_frames_img(self):
     fLOG(__file__,
          self._testMethodName,
          OutputPrint=__name__ == "__main__")
     temp = get_temp_folder(__file__, "temp_video_extract_frames_img")
     vid = os.path.join(temp, '..', 'data', 'videxa.mp4')
     fns = list(video_enumerate_frames(vid, folder=temp, clean=True))
     self.assertEqual(len(fns), 78)
     subprocess._cleanup()
Exemplo n.º 30
0
    def analyze(self, **kwargs):
        """Add quality checks data using Pylint.

        :param module_path: module path
        :param details: if True, it returns information about single modules

        :returns result: dict of the results of the analysis
        """
        module_path = kwargs['module_path']
        details = kwargs['details']

        try:
            msg = subprocess.check_output(
                ['pylint', '-rn', '--output-format=text',
                 module_path]).decode("utf-8")
        except subprocess.CalledProcessError as e:
            msg = e.output.decode("utf-8")
            if not msg.startswith("***"):
                raise GraalError(cause="Pylint failed at %s, %s" %
                                 (module_path, msg))
        finally:
            subprocess._cleanup()

        end = False
        code_quality = None
        mod_details = []
        module_name = ""
        lines = msg.split('\n')
        modules = {}
        for line in lines:
            if line.startswith("***"):
                if mod_details:
                    modules.update({module_name: mod_details})
                module_name = line.strip("*").strip().replace("Module ", "")
                mod_details = []
            elif line.strip() == "":
                continue
            elif line.startswith("----"):
                modules.update({module_name: mod_details})
                end = True
            else:
                if end:
                    code_quality = line.split("/")[0].split(" ")[-1]
                    break
                else:
                    mod_details.append(line)

        result = {
            'quality': code_quality,
            'num_modules': len(modules),
            'warnings': sum([len(mod) for mod in modules])
        }

        if details:
            result['modules'] = modules

        return result
Exemplo n.º 31
0
def kill_python(p):
    p.stdin.close()
    data = p.stdout.read()
    p.stdout.close()
    # try to cleanup the child so we don't appear to leak when running
    # with regrtest -R.
    p.wait()
    subprocess._cleanup()
    return data
Exemplo n.º 32
0
def kill_python(p):
    p.stdin.close()
    data = p.stdout.read()
    p.stdout.close()
    # try to cleanup the child so we don't appear to leak when running
    # with regrtest -R.
    p.wait()
    subprocess._cleanup()
    return data
Exemplo n.º 33
0
def kill_python(p):
    """Run the given Popen process until completion and return stdout."""
    p.stdin.close()
    data = p.stdout.read()
    p.stdout.close()
    # try to cleanup the child so we don't appear to leak when running
    # with regrtest -R.
    p.wait()
    subprocess._cleanup()
    return data
Exemplo n.º 34
0
def exec_cmd(cmd):
    p = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
    try:
        stdout, stderr = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    return rc, stdout, stderr
Exemplo n.º 35
0
def kill_python(p):
    """Run the given Popen process until completion and return stdout."""
    p.stdin.close()
    data = p.stdout.read()
    p.stdout.close()
    # try to cleanup the child so we don't appear to leak when running
    # with regrtest -R.
    p.wait()
    subprocess._cleanup()
    return data
Exemplo n.º 36
0
def run_python_until_end(*args, **env_vars):
    env_required = interpreter_requires_environment()
    cwd = env_vars.pop('__cwd', None)
    if '__isolated' in env_vars:
        isolated = env_vars.pop('__isolated')
    else:
        isolated = not env_vars and not env_required
    cmd_line = [sys.executable, '-X', 'faulthandler']
    if isolated:
        # isolated mode: ignore Python environment variables, ignore user
        # site-packages, and don't add the current directory to sys.path
        cmd_line.append('-I')
    elif not env_vars and not env_required:
        # ignore Python environment variables
        cmd_line.append('-E')

    # But a special flag that can be set to override -- in this case, the
    # caller is responsible to pass the full environment.
    if env_vars.pop('__cleanenv', None):
        env = {}
        if sys.platform == 'win32':
            # Windows requires at least the SYSTEMROOT environment variable to
            # start Python.
            env['SYSTEMROOT'] = os.environ['SYSTEMROOT']

        # Other interesting environment variables, not copied currently:
        # COMSPEC, HOME, PATH, TEMP, TMPDIR, TMP.
    else:
        # Need to preserve the original environment, for in-place testing of
        # shared library builds.
        env = os.environ.copy()

    # set TERM='' unless the TERM environment variable is passed explicitly
    # see issues #11390 and #18300
    if 'TERM' not in env_vars:
        env['TERM'] = ''

    env.update(env_vars)
    cmd_line.extend(args)
    proc = subprocess.Popen(cmd_line,
                            stdin=subprocess.PIPE,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE,
                            env=env,
                            cwd=cwd)
    with proc:
        try:
            out, err = proc.communicate()
        finally:
            proc.kill()
            subprocess._cleanup()
    rc = proc.returncode
    err = strip_python_stderr(err)
    return _PythonRunResult(rc, out, err), cmd_line
Exemplo n.º 37
0
def cloud():
    logo()
    target_site = input("Enter the site eg target.com: \n")
    print(B)
    pwd = os.path.dirname(str(os.path.realpath(__file__)))
    cloud = subprocess.Popen('python ' + pwd + "/cloudbuster.py " +
                             str(target_site),
                             shell=True)
    cloud.communicate()
    subprocess._cleanup()
    print("Cloud Resolving Finished")
Exemplo n.º 38
0
def cloud():
    import time
    logo()
    target_site = input("Enter the site eg target.com: \n")
    print(B)
    pwd = os.path.dirname(str(os.path.realpath(__file__)))
    cloud = subprocess.Popen('python ' + pwd + "/cloudbuster.py " + str(target_site), shell=True)
    cloud.communicate()
    subprocess._cleanup()
    print("Cloud Resolving Finished")
    time.sleep(6)
Exemplo n.º 39
0
 def tearDown(self):
     for inst in popen2._active:
         inst.wait()
     popen2._cleanup()
     self.assertFalse(popen2._active, "popen2._active not empty")
     # The os.popen*() API delegates to the subprocess module (on Unix)
     import subprocess
     for inst in subprocess._active:
         inst.wait()
     subprocess._cleanup()
     self.assertFalse(subprocess._active, "subprocess._active not empty")
     reap_children()
Exemplo n.º 40
0
 def tearDown(self):
     for inst in popen2._active:
         inst.wait()
     popen2._cleanup()
     self.assertFalse(popen2._active, "popen2._active not empty")
     # The os.popen*() API delegates to the subprocess module (on Unix)
     import subprocess
     for inst in subprocess._active:
         inst.wait()
     subprocess._cleanup()
     self.assertFalse(subprocess._active, "subprocess._active not empty")
     reap_children()
Exemplo n.º 41
0
 def tearDown(self):
     for inst in popen2._active:
         inst.wait()
     popen2._cleanup()
     self.assertFalse(popen2._active, "popen2._active not empty")
     # The os.popen*() API delegates to the subprocess module (on Unix)
     if not due_to_ironpython_bug("http://ironpython.codeplex.com/workitem/15512"):
         import subprocess
         for inst in subprocess._active:
             inst.wait()
         subprocess._cleanup()
         self.assertFalse(subprocess._active, "subprocess._active not empty")
     reap_children()
Exemplo n.º 42
0
def run_python_until_end(*args, **env_vars):
    env_required = interpreter_requires_environment()
    cwd = env_vars.pop('__cwd', None)
    if '__isolated' in env_vars:
        isolated = env_vars.pop('__isolated')
    else:
        isolated = not env_vars and not env_required
    cmd_line = [sys.executable, '-X', 'faulthandler']
    if isolated:
        # isolated mode: ignore Python environment variables, ignore user
        # site-packages, and don't add the current directory to sys.path
        cmd_line.append('-I')
    elif not env_vars and not env_required:
        # ignore Python environment variables
        cmd_line.append('-E')

    # But a special flag that can be set to override -- in this case, the
    # caller is responsible to pass the full environment.
    if env_vars.pop('__cleanenv', None):
        env = {}
        if sys.platform == 'win32':
            # Windows requires at least the SYSTEMROOT environment variable to
            # start Python.
            env['SYSTEMROOT'] = os.environ['SYSTEMROOT']

        # Other interesting environment variables, not copied currently:
        # COMSPEC, HOME, PATH, TEMP, TMPDIR, TMP.
    else:
        # Need to preserve the original environment, for in-place testing of
        # shared library builds.
        env = os.environ.copy()

    # set TERM='' unless the TERM environment variable is passed explicitly
    # see issues #11390 and #18300
    if 'TERM' not in env_vars:
        env['TERM'] = ''

    env.update(env_vars)
    cmd_line.extend(args)
    proc = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                         env=env, cwd=cwd)
    with proc:
        try:
            out, err = proc.communicate()
        finally:
            proc.kill()
            subprocess._cleanup()
    rc = proc.returncode
    err = strip_python_stderr(err)
    return _PythonRunResult(rc, out, err), cmd_line
Exemplo n.º 43
0
def exit_cleanup():
    """
    Make sure that when the script exits it kills all subprocesses.
    """

    for proc in _active:
        proc.kill()
        proc.communicate()
    _cleanup()
    run("kill -9 $(pgrep cloud_sql_proxy)", shell=True, stderr=PIPE)
    if not _active:
        info("Processes cleaned up.")
    else:
        error("Sub processes could not be cleaned up.")
Exemplo n.º 44
0
def cloud():
    try:
        logo()
        target_site = input("Enter the site eg target.com: \n")
        print(B)
        pwd = os.path.dirname(str(os.path.realpath(__file__)))
        cloud = subprocess.Popen('python ' + pwd + "/modules/buster.py -m" +
                                 str(target_site),
                                 shell=True)
        cloud.communicate()
        subprocess._cleanup()
        fmenu()
    except Exception:
        print(Exception)
Exemplo n.º 45
0
def main():
    while PDF_FILES:
        if len(subprocess._active) < 10:
            pdf = PDF_FILES.pop()
            print("Converting {}".format(pdf))
            try:
                pdftohtml("-s", "-p", "-c", "-dataurls", pdf)
            except:
                pass
        else:
            subprocess._cleanup()

    print("Finishing up conversion")
    while subprocess._active:
        pass
Exemplo n.º 46
0
 def tearDown(self):
     for inst in popen2._active:
         inst.wait()
     popen2._cleanup()
     self.assertFalse(popen2._active, "popen2._active not empty")
     # The os.popen*() API delegates to the subprocess module (on Unix)
     if not due_to_ironpython_bug(
             "http://ironpython.codeplex.com/workitem/15512"):
         import subprocess
         for inst in subprocess._active:
             inst.wait()
         subprocess._cleanup()
         self.assertFalse(subprocess._active,
                          "subprocess._active not empty")
     reap_children()
Exemplo n.º 47
0
def exit_sequence(comms_port):
    if process_result(make_request(url_prefix+url_shutdown(activity_id['processor']))) == 'success':
        print "Shutting down waypoint processor activity"
        time.sleep(0.5)
        if process_result(make_request(url_prefix+url_shutdown(activity_id['captain']))) == 'success':
            print "Shutting down captain activity"
            time.sleep(10)
            if process_result(make_request(url_prefix+url_shutdown(activity_id['generator']))) == 'success':
                print "Shutting down waypoint generator activity"
                time.sleep(0.5)
                if process_result(make_request(url_prefix+url_shutdown(activity_id[comms_port]))) == 'success':
                    print "Shutting down communications activity"
                    time.sleep(1)
                    if process_result(make_request(url_prefix+url_shutdown(activity_id['mavlink']))) == 'success':
                        print "Shutting down mavlink activity"
                        time.sleep(2)
                        subprocess._cleanup()
                        print "Everything shut down"
                        return
    print "Could not shutdown all the activities"
Exemplo n.º 48
0
def _assert_python(expected_success, *args, **env_vars):
    env_required = _interpreter_requires_environment()
    if '__isolated' in env_vars:
        isolated = env_vars.pop('__isolated')
    else:
        isolated = not env_vars and not env_required
    cmd_line = [sys.executable, '-X', 'faulthandler']
    if isolated:
        # isolated mode: ignore Python environment variables, ignore user
        # site-packages, and don't add the current directory to sys.path
        cmd_line.append('-I')
    elif not env_vars and not env_required:
        # ignore Python environment variables
        cmd_line.append('-E')
    # Need to preserve the original environment, for in-place testing of
    # shared library builds.
    env = os.environ.copy()
    # But a special flag that can be set to override -- in this case, the
    # caller is responsible to pass the full environment.
    if env_vars.pop('__cleanenv', None):
        env = {}
    env.update(env_vars)
    cmd_line.extend(args)
    p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                         env=env)
    try:
        out, err = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    err = strip_python_stderr(err)
    if (rc and expected_success) or (not rc and not expected_success):
        raise AssertionError(
            "Process return code is %d, command line was: %r, "
            "stderr follows:\n%s" % (rc, cmd_line,
                                     err.decode('ascii', 'ignore')))
    return rc, out, err
Exemplo n.º 49
0
def do_task(**post_data):
    callback = post_data.get('callback_url', callback_url)
    acceptkey = post_data.get('accept_key', accept_key)
    task_id = post_data.get('task_id', 0)
    filepath = post_data.get('upfile').replace('\\','/')
    filename = filepath.split('/')[-1]

    newFile = os.path.join(uploadpath, filename)

    if not os.path.exists(uploadpath):
        os.mkdir(uploadpath)

    fout = open(newFile,'w')
    fout.write(post_data.get('filename'))
    fout.close()

    #创建yum仓库索引
    p = subprocess.Popen("cd %s && createrepo %s" % (rpmdir,yumname),
                     shell=True,
                     stdout=subprocess.PIPE,
                     stderr=subprocess.PIPE)
    try:
        stdout, stderr = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode

    mylggr.debug(
        'task id  %d return stdout %s ,stderr %s!' %
        (task_id, stdout, stderr))
    return {'task_id': task_id,
            'callback_url': callback,
            'accept_key': acceptkey,
            'filename': filename,
            'stdout': stdout,
            'stderr': stderr,
            'returncode': rc}
Exemplo n.º 50
0
def run_python_until_end(*args, **env_vars):
    env_required = interpreter_requires_environment()
    if "__isolated" in env_vars:
        isolated = env_vars.pop("__isolated")
    else:
        isolated = not env_vars and not env_required
    cmd_line = [sys.executable, "-X", "faulthandler"]
    if isolated:
        # isolated mode: ignore Python environment variables, ignore user
        # site-packages, and don't add the current directory to sys.path
        cmd_line.append("-I")
    elif not env_vars and not env_required:
        # ignore Python environment variables
        cmd_line.append("-E")
    # Need to preserve the original environment, for in-place testing of
    # shared library builds.
    env = os.environ.copy()
    # set TERM='' unless the TERM environment variable is passed explicitly
    # see issues #11390 and #18300
    if "TERM" not in env_vars:
        env["TERM"] = ""
    # But a special flag that can be set to override -- in this case, the
    # caller is responsible to pass the full environment.
    if env_vars.pop("__cleanenv", None):
        env = {}
    env.update(env_vars)
    cmd_line.extend(args)
    p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
    try:
        out, err = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    err = strip_python_stderr(err)
    return _PythonRunResult(rc, out, err), cmd_line
Exemplo n.º 51
0
def _assert_python(expected_success, *args, **env_vars):
    if "__isolated" in env_vars:
        isolated = env_vars.pop("__isolated")
    else:
        isolated = not env_vars
    cmd_line = [sys.executable]
    if sys.version_info >= (3, 3):
        cmd_line.extend(("-X", "faulthandler"))
    if isolated and sys.version_info >= (3, 4):
        # isolated mode: ignore Python environment variables, ignore user
        # site-packages, and don't add the current directory to sys.path
        cmd_line.append("-I")
    elif not env_vars:
        # ignore Python environment variables
        cmd_line.append("-E")
    # Need to preserve the original environment, for in-place testing of
    # shared library builds.
    env = os.environ.copy()
    # But a special flag that can be set to override -- in this case, the
    # caller is responsible to pass the full environment.
    if env_vars.pop("__cleanenv", None):
        env = {}
    env.update(env_vars)
    cmd_line.extend(args)
    p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
    try:
        out, err = p.communicate()
    finally:
        subprocess._cleanup()
        p.stdout.close()
        p.stderr.close()
    rc = p.returncode
    err = strip_python_stderr(err)
    if (rc and expected_success) or (not rc and not expected_success):
        raise AssertionError("Process return code is %d, " "stderr follows:\n%s" % (rc, err.decode("ascii", "ignore")))
    return rc, out, err
Exemplo n.º 52
0
def rhn_popen(cmd, progressCallback=None, bufferSize=16384, outputLog=None):
    """ popen-like function, that accepts execvp-style arguments too (i.e. an
        array of params, thus making shell escaping unnecessary)

        cmd can be either a string (like "ls -l /dev"), or an array of
        arguments ["ls", "-l", "/dev"]

        Returns the command's error code, a stream with stdout's contents
        and a stream with stderr's contents

        progressCallback --> progress bar twiddler
        outputLog --> optional log file file object write method
    """

    subprocess._cleanup()

    cmd_is_list = type(cmd) in (types.ListType, types.TupleType)
    if cmd_is_list:
        cmd = map(str, cmd)
    c = subprocess.Popen(cmd, bufsize=0, stdin=subprocess.PIPE,
                stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                close_fds=True, shell=(not cmd_is_list))

    # We don't write to the child process
    c.stdin.close()

    # Create two temporary streams to hold the info from stdout and stderr
    child_out = tempfile.TemporaryFile(prefix = '/tmp/my-popen-', mode = 'r+b')
    child_err = tempfile.TemporaryFile(prefix = '/tmp/my-popen-', mode = 'r+b')

    # Map the input file descriptor with the temporary (output) one
    fd_mappings = [(c.stdout, child_out), (c.stderr, child_err)]
    exitcode = None
    count = 1

    while 1:
        # Is the child process done?
        status = c.poll()
        if status is not None:
            if status >= 0:
                # Save the exit code, we still have to read from the pipes
                exitcode = status
            else:
                # Some signal sent to this process
                if outputLog is not None:
                    outputLog("rhn_popen: Signal %s received\n" % (-status))
                exitcode = status
                break

        fd_set = map(lambda x: x[0], fd_mappings)
        readfds = select.select(fd_set, [], [])[0]

        for in_fd, out_fd in fd_mappings:
            if in_fd in readfds:
                # There was activity on this file descriptor
                output = os.read(in_fd.fileno(), bufferSize)
                if output:
                    # show progress
                    if progressCallback:
                        count = count + len(output)
                        progressCallback(count)

                    if outputLog is not None:
                        outputLog(output)

                    # write to the output buffer(s)
                    out_fd.write(output)
                    out_fd.flush()

        if exitcode is not None:
            # Child process is done
            break

    for f_in, f_out in fd_mappings:
        f_in.close()
        f_out.seek(0, 0)

    return exitcode, child_out, child_err
Exemplo n.º 53
0
def fmenu():
    global vuln
    vuln = []
    if endsub != 1:
        vulnscan()
    logo()
    print("[1] Dork and vuln scan")
    print("[2] Admin page finder")
    print("[3] FTP crawler and vuln scan")
    print("[4] DNS brute")
    print("[5] Enable Tor/Proxy Support")
    print("[6] Misc Options")
    print("[0] Exit\n")
    chce = input(":")

    if chce == '1':
        print(W + "")
        fscan()

    elif chce == '2':
        afsite = input("Enter the site eg target.com: ")
        print(B)
        pwd = os.path.dirname(str(os.path.realpath(__file__)))
        findadmin = subprocess.Popen(pwd + "/modules/adminfinder.py -w modules/adminlist.txt -u " + str(afsite),
                                     shell=True)
        findadmin.communicate()
        subprocess._cleanup()

    elif chce == '3':
        randips = input("How many IP addresses do you want to scan: ")
        print(B)
        pwd = os.path.dirname(str(os.path.realpath(__file__)))
        ftpcrawl = subprocess.Popen(pwd + "/modules/ftpcrawler.py -i " + str(randips), shell=True)
        ftpcrawl.communicate()
        subprocess._cleanup()

    elif chce == '4':
        dnstarg = input("Enter the site eg target.com: ")
        print(B)
        pwd = os.path.dirname(str(os.path.realpath(__file__)))
        dnsbrute = subprocess.Popen(pwd + "/modules/dnsbrute.py -w modules/subdomainsmid.txt -u " + str(dnstarg),
                                    shell=True)
        dnsbrute.communicate()
        subprocess._cleanup()

    elif chce == '5':
        print(W + "")
        enable_proxy()

    elif chce == '0':
        print(R + "\n Exiting ...")
        print(W)
        sys.exit(0)

    elif chce == '6':
        print(W + "")
        os.system('clear')
        logo()
        print("[1] Skip to custom SQLi list checking")
        print("[2] Cloudflare IP Resolver ::= Next Release")
        print("[3] Identify Hash ::= Next Release")
        print("[4] SockStress DDoS Tool ::= Next Release")
        print("[0] Return to main menu")
        chce2 = input(":")
        if chce2 == '1':
            os.system('clear')
            logo()
            try:
                url = [line.strip() for line in open(input("Please Input Custom List Path \n"
                                                       "ie> \n"
                                                       " /home/user/Desktop/samples.txt \n"
                                                       "\n :    :"))]
                classicinj(url)
            except:
                os.system('clear')
                logo()
                print("Target file not found!")
                os.system('clear')
                fmenu()
        elif chce2 == '2':
            os.system('clear')
            logo()
#           cloud()
        elif chce2 == '0':
            fmenu()
Exemplo n.º 54
0
    def __init__(self, args, bufsize=0, executable=None,
                 stdin=None, stdout=None, stderr=None,
                 preexec_fn=None, close_fds=False, shell=False,
                 cwd=None, env=None, universal_newlines=False,
                 startupinfo=None, creationflags=0,
                 verbose=False, fork=True):  #new
        
        # do stuff
        self.opts, self.args = None, args
        self.name = args[0]
        self.usage = string.join(string.split(self. __doc__, '\n')[1:], '\n')
        
        # Argument Processing
        self.parser = cmd_parser(usage=self.usage)  # build parser
        self.parser.add_option('-v', '--verbose',  action='store_true',
            dest='verbose', default=verbose, help='enable verbose output.')
        self.parser.add_option('-q', '--quiet',  action='store_false',
            dest='verbose', default=verbose, help='disable verbose output.')

        try: # see if object needs to register parameters with option parser
            register_opts_func = getattr(self, 'register_opts')
            register_opts_func()
        except AttributeError:  pass
        # Option parser
        try:  self.parse_args(args)
        except ValueError:
            # on error or help (-h), defeat run() function, and print \n
            self.run = self.newline
        
        # run supa-class:
        #super(ConsoleAppBase2, self).__init__(self,args,**kw)
        subprocess._cleanup()

        self.stdin = None
        self.stdout = None
        self.stderr = None
        self.pid = None
        self.returncode = None
        self.universal_newlines = universal_newlines

        # Input and output objects. The general principle is like
        # this:
        #
        # Parent                   Child
        # ------                   -----
        # p2cwrite   ---stdin--->  p2cread
        # c2pread    <--stdout---  c2pwrite
        # errread    <--stderr---  errwrite
        if fork:
            (p2cread, p2cwrite,
             c2pread, c2pwrite,
             errread, errwrite) = self._get_handles(stdin, stdout, stderr)
            # need to implement this for the next line... is stdout a tty?
            if c2pwrite <> None:
                    self.istty = os.isatty(c2pwrite)
            else:   self.istty = os.isatty(sys.stdout.fileno())
    
            self._execute_child(args, executable, preexec_fn, close_fds,
                                cwd, env, universal_newlines,
                                startupinfo, creationflags, shell,
                                p2cread, p2cwrite,
                                c2pread, c2pwrite,
                                errread, errwrite)
            if p2cwrite:
                self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
            if c2pread:
                if universal_newlines:
                    self.stdout = os.fdopen(c2pread, 'rU', bufsize)
                else:
                    self.stdout = os.fdopen(c2pread, 'rb', bufsize)
            if errread:
                if universal_newlines:
                    self.stderr = os.fdopen(errread, 'rU', bufsize)
                else:
                    self.stderr = os.fdopen(errread, 'rb', bufsize)
        else:
            self.istty = os.isatty(sys.stdout.fileno())
            returncode = self.run()  # don't want .run() to have to set rt. explicitly
            if returncode:  self.returncode = returncode
            else:           self.returncode = 0
            
        subprocess._active.append(self)
 def tearDown(self):
     for inst in subprocess._active:
         inst.wait()
     subprocess._cleanup()
     self.assertFalse(subprocess._active, "subprocess._active not empty")
    def __init__(self, args, bufsize=0, executable=None,
                 stdin=None, stdout=None, stderr=None,
                 preexec_fn=None, close_fds=False, shell=False,
                 cwd=None, env=None, universal_newlines=False,
                 startupinfo=None, creationflags=0):
        """Create new Popen instance."""
        _subprocess._cleanup()

        self._child_created = False
        if not isinstance(bufsize, (int, long)):
            raise TypeError("bufsize must be an integer")

        if _subprocess.mswindows:
            if preexec_fn is not None:
                raise ValueError("preexec_fn is not supported on Windows "
                                 "platforms")
            if close_fds and (stdin is not None or stdout is not None or
                              stderr is not None):
                raise ValueError("close_fds is not supported on Windows "
                                 "platforms if you redirect stdin/stdout/stderr")
        else:
            # POSIX
            if startupinfo is not None:
                raise ValueError("startupinfo is only supported on Windows "
                                 "platforms")
            if creationflags != 0:
                raise ValueError("creationflags is only supported on Windows "
                                 "platforms")

        self.stdin = None
        self.stdout = None
        self.stderr = None
        self.pid = None
        self.returncode = None
        self.universal_newlines = universal_newlines

        # Input and output objects. The general principle is like
        # this:
        #
        # Parent                   Child
        # ------                   -----
        # p2cwrite   ---stdin--->  p2cread
        # c2pread    <--stdout---  c2pwrite
        # errread    <--stderr---  errwrite
        #
        # On POSIX, the child objects are file descriptors.  On
        # Windows, these are Windows file handles.  The parent objects
        # are file descriptors on both platforms.  The parent objects
        # are None when not using PIPEs. The child objects are None
        # when not redirecting.

        t = self._get_handles(stdin, PIPE, PIPE)
        nreturned = len(t)
        if nreturned == 2:
            p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite = t[0]
            to_close = t[1]
            self._execute_child(args, executable, preexec_fn, close_fds,
                                cwd, env, universal_newlines,
                                startupinfo, creationflags, shell, to_close,
                                p2cread, p2cwrite,
                                c2pread, c2pwrite,
                                errread, errwrite)

        else:
            p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite = t
            self._execute_child(args, executable, preexec_fn, close_fds,
                                cwd, env, universal_newlines,
                                startupinfo, creationflags, shell,
                                p2cread, p2cwrite,
                                c2pread, c2pwrite,
                                errread, errwrite)

        if _subprocess.mswindows:
            if p2cwrite is not None:
                p2cwrite = _subprocess.msvcrt.open_osfhandle(p2cwrite.Detach(), 0)
            if c2pread is not None:
                c2pread = _subprocess.msvcrt.open_osfhandle(c2pread.Detach(), 0)
            if errread is not None:
                errread = _subprocess.msvcrt.open_osfhandle(errread.Detach(), 0)

        if p2cwrite is not None:
            self.stdin = Pipe(p2cwrite, 'wb', bufsize)
        if c2pread is not None:
            if universal_newlines:
                self.stdout = Pipe(c2pread, 'rU', bufsize)
            else:
                self.stdout = Pipe(c2pread, 'rb', bufsize)
        if errread is not None:
            if universal_newlines:
                self.stderr = Pipe(errread, 'rU', bufsize)
            else:
                self.stderr = Pipe(errread, 'rb', bufsize)
Exemplo n.º 57
0
 gc.collect()
 print '\n\nWorking With: '
 print 'Dataset: ' + fileName
 print 'LSH-Canopy-Method: ' + cluster_method 
 try:
     os.makedirs(processed_data_dir)
 except OSError as exception:
     if exception.errno != errno.EEXIST:
         raise        
 shutil.rmtree(processed_data_dir)        
 try:
     os.makedirs(processed_data_dir)
 except OSError as exception:
     if exception.errno != errno.EEXIST:
         raise                
 subprocess._cleanup()
 gc.collect()        
 if copyToTemp==True:     
     try:
         os.makedirs(temp_dir)
     except OSError as exception:
         if exception.errno != errno.EEXIST:
             raise            
     shutil.rmtree(temp_dir)            
     try:
         os.makedirs(temp_dir)
     except OSError as exception:
         if exception.errno != errno.EEXIST:
             raise            
     source=os.path.join(data_dir,fileName)
     destination=os.path.join(temp_dir,fileName)