Пример #1
0
 def csv_reader(self,
                url,
                header=False,
                encoding=None,
                skip_rows=0,
                data=None,
                **kwargs):
     if not data:
         result = urlparse(url)
         if result.scheme == 'ftp':
             data = StringIO()
             ftp = FTP(result.hostname)
             ftp.login(result.username, result.password)
             ftp.retrbinary('RETR {}'.format(result.path),
                            lambda block: data.write(block.decode('utf-8')))
             ftp.quit()
             data.seek(0)
         else:
             response = self.get(url, **kwargs)
             if encoding:
                 response.encoding = encoding
             data = StringIO(response.text.strip())
     if skip_rows:
         for _ in range(skip_rows):
             data.readline()
     if header:
         return csv.DictReader(data)
     else:
         return csv.reader(data)
 def test_filter_encrypted(self):
     stdout = StringIO()
     with patch('sys.stdout', stdout):
         execute_from_command_line(['', 'listbackups', '--encrypted', '-q'])
     stdout.seek(0)
     stdout.readline()
     for line in stdout.readlines():
         self.assertIn('.gpg', line)
 def test_filter_media(self):
     stdout = StringIO()
     with patch('sys.stdout', stdout):
         execute_from_command_line(
             ['', 'listbackups', '--content-type', 'media', '-q'])
     stdout.seek(0)
     stdout.readline()
     for line in stdout.readlines():
         self.assertIn('.tar', line)
Пример #4
0
    class _File(object):
        """
        A file like object representing a file in git

        @todo: We don't support any byte ranges yet.
        """
        def __init__(self, content):
            self._iter = iter
            self._data = StringIO(content)

        def readline(self):
            return self._data.readline()

        def readlines(self):
            return self._data.readlines()

        def read(self, size=None):
            return self._data.read(size)

        def close(self):
            return self._data.close()

        def __enter__(self):
            return self

        def __exit__(self, exc_type, exc_val, exc_tb):
            self.close()
Пример #5
0
    class _File(object):
        """
        A file like object representing a file in git

        @todo: We don't support any byte ranges yet.
        """
        def __init__(self, content):
            self._iter = iter
            self._data = StringIO(content)

        def readline(self):
            return self._data.readline()

        def readlines(self):
            return self._data.readlines()

        def read(self, size=None):
            return self._data.read(size)

        def close(self):
            return self._data.close()

        def __enter__(self):
            return self

        def __exit__(self, exc_type, exc_val, exc_tb):
            self.close()
Пример #6
0
    def _parse_result(self, response):
        data = StringIO(BeautifulSoup(response.text).find('pre').text.strip())
        # `header` is e.g.:
        # "u'-LAMBDA-VAC-ANG-|-SPECTRUM--|TT|--------TERM---------|---J-J---|----LEVEL-ENERGY--CM-1----'"
        # `colnames` is then
        # [u'LAMBDA VAC ANG', u'SPECTRUM', u'TT', u'TERM', u'J J',
        #  u'LEVEL ENERGY  CM 1']

        header = data.readline().strip().strip('|')

        colnames = [colname.strip('-').replace('-', ' ')
                    for colname in header.split('|') if colname.strip()]
        indices = [i for i, c in enumerate(header) if c == '|']
        input = []
        for line in data:
            row = []
            for start, end in zip([0] + indices, indices + [None]):
                # `value` will hold all cell values in the line, so
                # `u'1.010799'`, `u'Zn XXX'` etc.
                value = line[start:end].strip()
                if value:
                    row.append(value)
                else:
                    # maintain table dimensions when data missing
                    row.append('None')
            if row:
                input.append('\t'.join(row))
        if input:
            return ascii.read(input, data_start=0, delimiter='\t',
                              names=colnames, fast_reader=False)
        else:
            # return an empty table if the query yielded no results
            return Table()
Пример #7
0
    def testSaveContentIncludesExpectedKeysAndValues(self):
        """
        When a backend saves, its JSON content must include the expected
        keys and values.
        """
        dbParams = DatabaseParameters(landmarks=[], trigPoints=[],
                                      limitPerLandmark=16, maxDistance=17,
                                      minDistance=18, distanceBase=19.0)
        be = Backend()
        be.configure(dbParams, 'backend', 33)
        fp = StringIO()
        be.save(fp)
        fp.seek(0)

        DatabaseParameters.restore(fp)
        SubjectStore.restore(fp)
        state = loads(fp.readline()[:-1])

        # Keys
        self.assertEqual(
            set(['checksum', 'd', 'name', '_totalCoveredResidues']),
            set(state.keys()))

        # Values
        self.assertEqual(be.checksum(), state['checksum'])
        self.assertEqual({}, state['d'])
        self.assertEqual('backend', state['name'])
        self.assertEqual(0, state['_totalCoveredResidues'])
Пример #8
0
        def test_simple_cmd_with_input_fileobject_and_redirected_output_fileobject(self):
            out_buff = StringIO()
            in_buff = StringIO()
            in_buff.write(self.PHRASE)

            assert not os.path.exists(self.IN_FILE)
            in_buff.seek(0)
            assert in_buff.readline().strip("\n") == self.PHRASE

            ret = ProcessHelper.run_subprocess(self.CAT_COMMAND, input=in_buff, output=out_buff)
            in_buff.close()
            assert ret == 0
            assert not os.path.exists(self.OUT_FILE)
            out_buff.seek(0)
            assert out_buff.readline().strip("\n") == self.PHRASE
            out_buff.close()
Пример #9
0
 def test_simple_cmd_with_redirected_output_fileobject(self):
     buff = StringIO()
     ret = ProcessHelper.run_subprocess(self.ECHO_COMMAND, output=buff)
     assert ret == 0
     assert not os.path.exists(self.OUT_FILE)
     assert buff.readline().strip('\n') == self.PHRASE
     buff.close()
Пример #10
0
 def test_simple_cmd_with_redirected_output_fileobject(self):
     buff = StringIO()
     ret = ProcessHelper.run_subprocess(self.ECHO_COMMAND, output=buff)
     assert ret == 0
     assert not os.path.exists(self.OUT_FILE)
     assert buff.readline().strip("\n") == self.PHRASE
     buff.close()
Пример #11
0
    def __init__(self, server):
        self.host, self.port = server.split(':')
        try:
            stat = self.send_cmd('stat\n')
            envi = self.send_cmd('envi\n')
            sio = StringIO(stat)
            line = sio.readline()
            if 'not currently serving requests' in line:
                raise Exception(
                    "This ZooKeeper instance is not currently serving requests"
                )
        except socket.error:
            self.mode = "Unreachable"
            self.sessions = []
            self.version = "Unknown"
            return
        except Exception as e:
            traceback.print_exc()
            self.mode = "Internal error"
            self.sessions = []
            self.version = "Unknown"
            return

        m = re.search('.*: (\d+\.\d+\.\d+)-.*', line)
        self.version = m.group(1)
        sio.readline()
        self.sessions = []
        for line in sio:
            if not line.strip():
                break

            self.sessions.append(Session(line.strip()))
        for line in sio:
            attr, value = line.split(':')
            attr = attr.strip().replace(" ", "_").replace("/", "_").lower()
            self.__dict__[attr] = value.strip()

        self.min_latency, self.avg_latency, self.max_latency = self.latency_min_avg_max.split(
            "/")

        self.envi = []
        sio = StringIO(envi)
        for line in sio:
            if not line.strip(): break
            attr, equ, value = line.partition("=")
            if not equ: continue
            self.envi.append((attr, value))
Пример #12
0
 def testSaveContentHasFourParts(self):
     """
     When a simple database saves, its content must include parts for the
     database parameters, the database state, the backend parameters and
     the backend state.
     """
     dbParams = DatabaseParameters(landmarks=[AlphaHelix],
                                   trigPoints=[Peaks])
     db = Database(dbParams)
     fp = StringIO()
     db.save(fp)
     fp.seek(0)
     dbParams = DatabaseParameters.restore(fp)
     loads(fp.readline()[:-1])
     backendParams = DatabaseParameters.restore(fp)
     loads(fp.readline()[:-1])
     self.assertIs(None, dbParams.compare(backendParams))
Пример #13
0
        def test_simple_cmd_with_input_fileobject_and_redirected_output_fileobject(self):
            out_buff = StringIO()
            in_buff = StringIO()
            in_buff.write(self.PHRASE)

            assert not os.path.exists(self.IN_FILE)
            in_buff.seek(0)
            assert in_buff.readline().strip('\n') == self.PHRASE

            ret = ProcessHelper.run_subprocess(self.CAT_COMMAND,
                                               input=in_buff,
                                               output=out_buff)
            in_buff.close()
            assert ret == 0
            assert not os.path.exists(self.OUT_FILE)
            out_buff.seek(0)
            assert out_buff.readline().strip('\n') == self.PHRASE
            out_buff.close()
Пример #14
0
    def __init__(self, server):
        self.host, self.port = server.split(':')
        try:
            stat = self.send_cmd('stat\n')
            envi = self.send_cmd('envi\n')
            sio = StringIO(stat)
            line = sio.readline()
            if 'not currently serving requests' in line:
                raise Exception("This ZooKeeper instance is not currently serving requests")
        except socket.error:
            self.mode = "Unreachable"
            self.sessions = []
            self.version = "Unknown"
            return
        except Exception as e:
            traceback.print_exc()
            self.mode = "Internal error"
            self.sessions = []
            self.version = "Unknown"
            return

        m = re.search('.*: (\d+\.\d+\.\d+)-.*', line)
        self.version = m.group(1)
        sio.readline()
        self.sessions = []
        for line in sio:
            if not line.strip():
                break

            self.sessions.append(Session(line.strip()))
        for line in sio:
            attr, value = line.split(':')
            attr = attr.strip().replace(" ", "_").replace("/", "_").lower()
            self.__dict__[attr] = value.strip()

        self.min_latency, self.avg_latency, self.max_latency = self.latency_min_avg_max.split("/")

        self.envi = []
        sio = StringIO(envi)
        for line in sio:
            if not line.strip(): break
            attr, equ, value = line.partition("=")
            if not equ: continue
            self.envi.append((attr, value))
Пример #15
0
        def decode(text, *args, **kwargs):
            """Used by pypy and pylint to deal with a spec file"""
            return_tuple = kwargs.get("return_tuple", True)

            if six.PY3:
                if hasattr(text, 'tobytes'):
                    text = text.tobytes().decode('utf8')
                else:
                    text = text.decode('utf8')

            buffered = StringIO(text)

            # Determine if we need to have imports for this string
            # It may be a fragment of the file
            has_spec = regexes['encoding_matcher'].search(buffered.readline())
            no_imports = not has_spec
            buffered.seek(0)

            # Translate the text
            if six.PY2:
                utf8 = encodings.search_function(
                    'utf8')  # Assume utf8 encoding
                reader = utf8.streamreader(buffered)
            else:
                reader = buffered

            data = self.dealwith(reader.readline, no_imports=no_imports)

            # If nothing was changed, then we want to use the original file/line
            # Also have to replace indentation of original line with indentation of new line
            # To take into account nested describes
            if text and not regexes['only_whitespace'].match(text):
                if regexes['whitespace'].sub(
                        '', text) == regexes['whitespace'].sub('', data):
                    bad_indentation = regexes['leading_whitespace'].search(
                        text).groups()[0]
                    good_indentation = regexes['leading_whitespace'].search(
                        data).groups()[0]
                    data = '%s%s' % (good_indentation,
                                     text[len(bad_indentation):])

            # If text is empty and data isn't, then we should return text
            if len(text) == 0 and len(data) == 1:
                if return_tuple:
                    return "", 0
                else:
                    return ""

            # Return translated version and it's length
            if return_tuple:
                return data, len(data)
            else:
                return data
Пример #16
0
 def csv_reader(self, url, header=False, encoding=None, skip_rows=0, data=None, **kwargs):
     if not data:
         result = urlparse(url)
         if result.scheme == 'ftp':
             data = StringIO()
             ftp = FTP(result.hostname)
             ftp.login(result.username, result.password)
             ftp.retrbinary('RETR {}'.format(result.path), lambda block: data.write(block.decode('utf-8')))
             ftp.quit()
             data.seek(0)
         else:
             response = self.get(url, **kwargs)
             if encoding:
                 response.encoding = encoding
             data = StringIO(response.text.strip())
     if skip_rows:
         for _ in range(skip_rows):
             data.readline()
     if header:
         return csv.DictReader(data)
     else:
         return csv.reader(data)
Пример #17
0
        def decode(text, *args, **kwargs):
            """Used by pypy and pylint to deal with a spec file"""
            return_tuple = kwargs.get("return_tuple", True)

            if six.PY3:
                if hasattr(text, 'tobytes'):
                    text = text.tobytes().decode('utf8')
                else:
                    text = text.decode('utf8')

            buffered = StringIO(text)

            # Determine if we need to have imports for this string
            # It may be a fragment of the file
            has_spec = regexes['encoding_matcher'].search(buffered.readline())
            no_imports = not has_spec
            buffered.seek(0)

            # Translate the text
            if six.PY2:
                utf8 = encodings.search_function('utf8') # Assume utf8 encoding
                reader = utf8.streamreader(buffered)
            else:
                reader = buffered

            data = self.dealwith(reader.readline, no_imports=no_imports)

            # If nothing was changed, then we want to use the original file/line
            # Also have to replace indentation of original line with indentation of new line
            # To take into account nested describes
            if text and not regexes['only_whitespace'].match(text):
                if regexes['whitespace'].sub('', text) == regexes['whitespace'].sub('', data):
                    bad_indentation = regexes['leading_whitespace'].search(text).groups()[0]
                    good_indentation = regexes['leading_whitespace'].search(data).groups()[0]
                    data = '%s%s' % (good_indentation, text[len(bad_indentation):])

            # If text is empty and data isn't, then we should return text
            if len(text) == 0 and len(data) == 1:
                if return_tuple:
                    return "", 0
                else:
                    return ""

            # Return translated version and it's length
            if return_tuple:
                return data, len(data)
            else:
                return data
Пример #18
0
    def testSaveContentIncludesExpectedKeysAndValues(self):
        """
        When the database saves, its JSON content must include the expected
        keys and values.
        """
        dbParams = DatabaseParameters(landmarks=[AlphaHelix],
                                      trigPoints=[Peaks])
        db = Database(dbParams)
        fp = StringIO()
        db.save(fp)
        fp.seek(0)
        DatabaseParameters.restore(fp)
        state = loads(fp.readline()[:-1])

        # Keys
        self.assertEqual(['_connectorClassName'], list(state.keys()))

        # Values
        self.assertEqual('SimpleConnector', state['_connectorClassName'])
Пример #19
0
def check_html(name, file_path=True):
    """
    Returns True if the file/string contains HTML code.
    """
    # Handles files if file_path is True or text if file_path is False
    if file_path:
        temp = io.open(name, "r", encoding='utf-8')
    else:
        temp = StringIO(util.unicodify(name))
    try:
        for _ in range(HTML_CHECK_LINES):
            line = temp.readline(CHUNK_SIZE)
            if not line:
                break
            if any(regexp.search(line) for regexp in HTML_REGEXPS):
                return True
    except UnicodeDecodeError:
        return False
    finally:
        temp.close()
    return False
Пример #20
0
 def GET(self, filename, offset):
     arc = open(arc_dir + '/' + filename)
     arc.seek(int(offset))
     size = int(arc.readline().split(' ')[4])
     f = StringIO(arc.read(size))
     f.readline()
     ret = ''
     while True:
         line=f.readline()
         if line == '\r\n':
             break
     while True:
         line = f.readline()
         chunk_size = int(line, 16)
         if chunk_size == 0:
             break
         buf = f.read(chunk_size)
         ret += buf
         f.readline()
     return ret
Пример #21
0
    def upload(self, file, portal, storage, status):
        """Add the redirections from the CSV file `file`. If anything goes wrong, do nothing."""

        # No file picked. Theres gotta be a better way to handle this.
        if not file.filename:
            err = _(u"Please pick a file to upload.")
            status.addStatusMessage(err, type='error')
            self.form_errors['file'] = err
            return
        # Turn all kinds of newlines into LF ones. The csv module doesn't do
        # its own newline sniffing and requires either \n or \r.
        contents = safe_text(file.read()).splitlines()
        file = StringIO('\n'.join(contents))

        # Use first two lines as a representative sample for guessing format,
        # in case one is a bunch of headers.
        dialect = csv.Sniffer().sniff(file.readline() + file.readline())
        file.seek(0)

        # key is old path, value is tuple(new path, datetime, manual)
        successes = {}
        had_errors = False
        for i, fields in enumerate(csv.reader(file, dialect)):
            if len(fields) >= 2:
                redirection = fields[0]
                target = fields[1]

                now = None
                manual = True
                if len(fields) >= 3:
                    dt = fields[2]
                    if dt:
                        try:
                            now = DateTime(dt)
                        except DateTimeError:
                            logger.warning('Failed to parse as DateTime: %s',
                                           dt)
                            now = None
                if len(fields) >= 4:
                    manual = fields[3].lower()
                    # Compare first character with false, no, 0.
                    if manual and manual[0] in 'fn0':
                        manual = False
                    else:
                        manual = True
                abs_redirection, err = absolutize_path(redirection,
                                                       is_source=True)
                abs_target, target_err = absolutize_path(target,
                                                         is_source=False)
                if err and target_err:
                    if (i == 0 and not redirection.startswith('/')
                            and not target.startswith('/')):
                        # First line is a header.  Ignore this.
                        continue
                    err = "%s %s" % (err, target_err)  # sloppy w.r.t. i18n
                elif target_err:
                    err = target_err
                else:
                    if abs_redirection == abs_target:
                        # TODO: detect indirect recursion
                        err = _(
                            u"Alternative urls that point to themselves will cause"
                            u"an endless cycle of redirects.")
            else:
                err = _(u"Each line must have 2 or more columns.")

            if not err:
                if not had_errors:  # else don't bother
                    successes[abs_redirection] = (abs_target, now, manual)
            else:
                had_errors = True
                self.csv_errors.append(
                    dict(
                        line_number=i + 1,
                        line=dialect.delimiter.join(fields),
                        message=err,
                    ))

        if not had_errors:
            storage.update(successes)
            status.addStatusMessage(
                _(
                    u"${count} alternative urls added.",
                    mapping={'count': len(successes)},
                ),
                type='info',
            )
        else:
            self.csv_errors.insert(
                0,
                dict(
                    line_number=0,
                    line='',
                    message=_(
                        u'msg_delimiter',
                        default=u"Delimiter detected: ${delimiter}",
                        mapping={'delimiter': dialect.delimiter},
                    ),
                ),
            )
Пример #22
0
class OnlyReadline:
    def __init__(self, s):
        self.sio = StringIO(s)

    def readline(self):
        return self.sio.readline()
Пример #23
0
    def upload(self, file, portal, storage, status):
        """Add the redirections from the CSV file `file`. If anything goes wrong, do nothing."""

        # No file picked. Theres gotta be a better way to handle this.
        if not file.filename:
            err = _(u"Please pick a file to upload.")
            status.addStatusMessage(err, type='error')
            self.form_errors['file'] = err
            return
        # Turn all kinds of newlines into LF ones. The csv module doesn't do
        # its own newline sniffing and requires either \n or \r.
        contents = safe_text(file.read()).splitlines()
        file = StringIO('\n'.join(contents))

        # Use first two lines as a representative sample for guessing format,
        # in case one is a bunch of headers.
        dialect = csv.Sniffer().sniff(file.readline() + file.readline())
        file.seek(0)

        successes = []  # list of tuples: (abs_redirection, target)
        had_errors = False
        for i, fields in enumerate(csv.reader(file, dialect)):
            if len(fields) == 2:
                redirection, target = fields
                abs_redirection, err = absolutize_path(redirection,
                                                       is_source=True)
                abs_target, target_err = absolutize_path(target,
                                                         is_source=False)
                if err and target_err:
                    err = "%s %s" % (err, target_err)  # sloppy w.r.t. i18n
                elif target_err:
                    err = target_err
                else:
                    if abs_redirection == abs_target:
                        # TODO: detect indirect recursion
                        err = _(
                            u"Alternative urls that point to themselves will cause"
                            u"an endless cycle of redirects.")
            else:
                err = _(u"Each line must have 2 columns.")

            if not err:
                if not had_errors:  # else don't bother
                    successes.append((abs_redirection, abs_target))
            else:
                had_errors = True
                self.csv_errors.append(
                    dict(
                        line_number=i + 1,
                        line=dialect.delimiter.join(fields),
                        message=err,
                    ))

        if not had_errors:
            for abs_redirection, abs_target in successes:
                storage.add(abs_redirection, abs_target)
            status.addStatusMessage(
                _(
                    u"${count} alternative urls added.",
                    mapping={'count': len(successes)},
                ),
                type='info',
            )
Пример #24
0
class NormalHTTPFile(HTTPFile):
    def __init__(self,
                 path,
                 devid,
                 backup_dests=None,
                 mg=None,
                 fid=None,
                 cls=None,
                 key=None,
                 create_close_arg=None,
                 **kwds):

        super(NormalHTTPFile, self).__init__(mg, fid, key, cls,
                                             create_close_arg)

        if backup_dests is None:
            backup_dests = []
        self._fp = StringIO()
        self._paths = [(devid, path)] + list(backup_dests)
        self._is_closed = 0

    def paths(self):
        return self._paths

    def read(self, n=-1):
        return self._fp.read(n)

    def readline(self, *args, **kwds):
        return self._fp.readline(*args, **kwds)

    def readlines(self, *args, **kwds):
        return self._fp.readlines(*args, **kwds)

    def write(self, content):
        self._fp.write(content)

    def close(self):
        if not self._is_closed:
            self._is_closed = True

            #      content = self._fp.getvalue()
            #      self._fp.close()

            for tried_devid, tried_path in self._paths:
                try:
                    #          self._request(tried_path, "PUT", content)
                    self._fp.seek(0)
                    put.putfile(self._fp, tried_path)
                    devid = tried_devid
                    path = tried_path
                    break
                except HTTPError as e:
                    continue
            else:
                devid = None
                path = None

            self._fp.seek(0, 2)
            size = self._fp.tell()
            self._fp.close()
            if devid:
                params = {
                    'fid': self.fid,
                    'domain': self.mg.domain,
                    'key': self.key,
                    'path': path,
                    'devid': devid,
                    'size': size
                }
                if self.create_close_arg:
                    params.update(self.create_close_arg)
                try:
                    self.mg.backend.do_request('create_close', params)
                except MogileFSError as e:
                    if e.err != 'empty_file':
                        raise

    def seek(self, pos, mode=0):
        return self._fp.seek(pos, mode)

    def tell(self):
        return self._fp.tell()
Пример #25
0
def shell(name, arguments, options, content, lineno, content_offset,
          block_text, state, state_machine):
    """insert a shell command's raw output in a pre block, like::
        
        | .. shell:: 
        |    :run_on_method: some.module.main
        | 
        |    mycmd --arg 1
    
    Also:
    
        | .. shell::
        |    :setup: some.module.setup
        |    :teardown: some.module.teardown
        | 
        |    mycmd --arg 1
    
    """
    printable_cmd_parts = content
    cmd = ' '.join([c.replace("\\", "") for c in content])

    if options.get('setup'):
        setup = get_object_from_path(options['setup'])
        setup()

    if options.get('run_on_method'):
        main = get_object_from_path(options['run_on_method'])

        def decode(s):
            if isinstance(s, unicode):
                s = str(s.decode())
            return s

        def unquot(s):
            if s[0] in ('"', "'"):
                s = s[1:-1]
            return s

        cmdlist = []
        # get args with whitespace normalized:
        for part in re.split(r'\s*', cmd.strip()):
            part = decode(part)
            part = unquot(part)
            e = part.find('=')
            if e != -1:
                # i.e. --where="title='Dune'"
                part = "%s=%s" % (part[:e], unquot(part[e + 1:]))
            cmdlist.append(part)

        stdout = StringIO()
        stderr = StringIO()
        sys.stdout = stdout
        sys.stderr = stderr
        _program = sys.argv[0]
        sys.argv[0] = cmdlist[0]
        try:
            try:
                main(cmdlist[1:])
            except SystemExit as e:
                returncode = e.code
            else:
                returncode = 0
        finally:
            sys.stdout = sys.__stdout__
            sys.stderr = sys.__stderr__
            stdout.seek(0)
            stderr.seek(0)
            sys.argv[0] = _program
    else:
        p = subprocess.Popen(cmd,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE,
                             close_fds=True,
                             shell=True)

        returncode = p.wait()
        stdout, stderr = p.stdout, p.stderr

    if returncode != 0:
        raise RuntimeError("%s\n%s (exit: %s)" %
                           (stderr.read(), cmd, returncode))

    if options.get('teardown'):
        setup = get_object_from_path(options['teardown'])
        setup()

    # just create a pre block and fill it with command output...
    pad = "  "
    output = ["\n::\n\n"]
    output.append(pad + "$ " + ("%s\n" % pad).join(printable_cmd_parts) + "\n")
    while 1:
        line = stdout.readline()
        if not line:
            break
        output.append(pad + line)
    output.append("\n")

    output = "".join(output)

    include_lines = statemachine.string2lines(output)
    state_machine.insert_input(include_lines, None)
    return []
Пример #26
0
class NormalHTTPFile(HTTPFile):
  def __init__(self,
               path,
               devid,
               backup_dests=None,
               mg=None,
               fid=None,
               cls=None,
               key=None,
               create_close_arg=None,
               **kwds):

    super(NormalHTTPFile, self).__init__(mg, fid, key, cls, create_close_arg)

    if backup_dests is None:
      backup_dests = []
    self._fp = StringIO()
    self._paths = [(devid, path)] + list(backup_dests)
    self._is_closed = 0

  def paths(self):
    return self._paths

  def read(self, n= -1):
    return self._fp.read(n)

  def readline(self, *args, **kwds):
    return self._fp.readline(*args, **kwds)

  def readlines(self, *args, **kwds):
    return self._fp.readlines(*args, **kwds)

  def write(self, content):
    self._fp.write(content)

  def close(self):
    if not self._is_closed:
      self._is_closed = True

#      content = self._fp.getvalue()
#      self._fp.close()

      for tried_devid, tried_path in self._paths:
        try:
#          self._request(tried_path, "PUT", content)
          self._fp.seek(0)
          put.putfile(self._fp, tried_path)
          devid = tried_devid
          path = tried_path
          break
        except HTTPError as e:
          continue
      else:
        devid = None
        path = None

      self._fp.seek(0, 2)
      size = self._fp.tell()
      self._fp.close()
      if devid:
        params = {
                   'fid'   : self.fid,
                   'domain': self.mg.domain,
                   'key'   : self.key,
                   'path'  : path,
                   'devid' : devid,
                   'size'  : size
                 }
        if self.create_close_arg:
          params.update(self.create_close_arg)
        try:
          self.mg.backend.do_request('create_close', params)
        except MogileFSError as e:
          if e.err != 'empty_file':
            raise

  def seek(self, pos, mode=0):
    return self._fp.seek(pos, mode)

  def tell(self):
    return self._fp.tell()
Пример #27
0
def shell(  
        name, arguments, options, content, lineno,
        content_offset, block_text, state, state_machine):
    """insert a shell command's raw output in a pre block, like::
        
        | .. shell:: 
        |    :run_on_method: some.module.main
        | 
        |    mycmd --arg 1
    
    Also:
    
        | .. shell::
        |    :setup: some.module.setup
        |    :teardown: some.module.teardown
        | 
        |    mycmd --arg 1
    
    """
    printable_cmd_parts = content
    cmd = ' '.join([c.replace("\\", "") for c in content])
    
    if options.get('setup'):
        setup = get_object_from_path(options['setup'])
        setup()
        
    if options.get('run_on_method'):
        main = get_object_from_path(options['run_on_method'])
        
        def decode(s):
            if isinstance(s, unicode):
                s = str(s.decode())
            return s
        def unquot(s):
            if s[0] in ('"', "'"):
                s = s[1:-1]
            return s
        cmdlist = []
        # get args with whitespace normalized:
        for part in re.split(r'\s*', cmd.strip()):
            part = decode(part)
            part = unquot(part)
            e = part.find('=')
            if e != -1:
                # i.e. --where="title='Dune'"
                part = "%s=%s" % (part[:e], unquot(part[e+1:]))
            cmdlist.append(part)
        
        stdout = StringIO()
        stderr = StringIO()
        sys.stdout = stdout
        sys.stderr = stderr
        _program = sys.argv[0]
        sys.argv[0] = cmdlist[0]
        try:
            try:
                main(cmdlist[1:])
            except SystemExit as e:
                returncode = e.code
            else:
                returncode = 0
        finally:
            sys.stdout = sys.__stdout__
            sys.stderr = sys.__stderr__
            stdout.seek(0)
            stderr.seek(0)
            sys.argv[0] = _program
    else:
        p = subprocess.Popen(cmd, stdout=subprocess.PIPE, 
                stderr=subprocess.PIPE, close_fds=True, shell=True)
    
        returncode = p.wait()
        stdout, stderr = p.stdout, p.stderr
        
    if returncode != 0:
        raise RuntimeError("%s\n%s (exit: %s)" % (
                            stderr.read(), cmd, returncode))
    
    if options.get('teardown'):
        setup = get_object_from_path(options['teardown'])
        setup()
        
    # just create a pre block and fill it with command output...
    pad = "  "
    output = ["\n::\n\n"]
    output.append(pad + "$ " + ("%s\n" % pad).join(printable_cmd_parts) + "\n")
    while 1:
        line = stdout.readline()
        if not line:
            break
        output.append(pad + line)
    output.append("\n")
    
    output = "".join(output)
        
    include_lines = statemachine.string2lines(output)
    state_machine.insert_input(include_lines, None)
    return []