Пример #1
0
class MockProcess(object):
    def __init__(self, stdout='MOCK STDOUT\n', stderr='', returncode=0):
        self.pid = 42
        self.stdout = StringIO(stdout)
        self.stderr = StringIO(stderr)
        self.stdin = StringIO()
        self.returncode = returncode

    def wait(self):
        return

    def poll(self):
        # Consider the process completed when all the stdout and stderr has been read.
        if (len(self.stdout.getvalue()) != self.stdout.tell()
                or len(self.stderr.getvalue()) != self.stderr.tell()):
            return None
        return self.returncode

    def communicate(self, *_):
        return (self.stdout.getvalue(), self.stderr.getvalue())

    def kill(self):
        return

    def terminate(self):
        return
Пример #2
0
def sitemap_generator(request, maps, page, current_site):
    output = StringIO()
    protocol = request.is_secure() and 'https' or 'http'
    xml = SimplerXMLGenerator(output, settings.DEFAULT_CHARSET)
    xml.startDocument()
    xml.startElement(
        'urlset', {'xmlns': 'http://www.sitemaps.org/schemas/sitemap/0.9'})
    yield output.getvalue()
    pos = output.tell()
    for site in maps:
        if callable(site):
            if issubclass(site, RequestSitemap):
                site = site(request=request)
            else:
                site = site()
        elif hasattr(site, 'request'):
            site.request = request

        try:
            urls = site.get_urls(page=page, site=current_site,
                                 protocol=protocol)
        except InvalidPage:
            raise Http404('Page not found')

        for url in urls:
            xml.startElement('url', {})
            xml.addQuickElement('loc', url['location'])
            try:
                if url['lastmod']:
                    xml.addQuickElement(
                        'lastmod', url['lastmod'].strftime('%Y-%m-%d'))
            except (KeyError, AttributeError):
                pass
            try:
                if url['changefreq']:
                    xml.addQuickElement('changefreq', url['changefreq'])
            except KeyError:
                pass
            try:
                if url['priority']:
                    xml.addQuickElement('priority', url['priority'])
            except KeyError:
                pass
            xml.endElement('url')
            output.seek(pos)
            yield output.read()
            pos = output.tell()
    xml.endElement('urlset')
    xml.endDocument()
    output.seek(pos)
    last = output.read()
    output.close()
    yield last
Пример #3
0
 def do_POST(self):
     """Serve a POST request."""
     r, info = self.deal_post_data()
     print(r, info, "by: ", self.client_address)
     f = StringIO()
     f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
     f.write("<html>\n<title>Upload Result Page</title>\n")
     f.write("<body>\n<h2>Upload Result Page</h2>\n")
     f.write("<hr>\n")
     if r:
         f.write("<strong>Success:</strong>")
     else:
         f.write("<strong>Failed:</strong>")
     f.write(info)
     f.write("<br><a href=\"%s\">back</a>" % self.headers['referer'])
     f.write("<hr><small>Powerd By: bones7456, check new version at ")
     f.write("<a href=\"http://li2z.cn/?s=SimpleHTTPServerWithUpload\">")
     f.write("here</a>.</small></body>\n</html>\n")
     length = f.tell()
     f.seek(0)
     self.send_response(200)
     self.send_header("Content-type", "text/html")
     self.send_header("Content-Length", str(length))
     self.end_headers()
     if f:
         self.copyfile(f, self.wfile)
         f.close()
Пример #4
0
    def fetch_data(self):
        # create a data frame directly from the full text of
        # the response from the returned file-descriptor.
        data = self.fetch_url(self.url)
        fd = StringIO()

        if isinstance(data, str):
            fd.write(data)
        else:
            for chunk in data:
                fd.write(chunk)

        self.fetch_size = fd.tell()

        fd.seek(0)

        try:
            # see if pandas can parse csv data
            frames = read_csv(fd, **self.pandas_kwargs)

            frames_hash = hashlib.md5(str(fd.getvalue()).encode('utf-8'))
            self.fetch_hash = frames_hash.hexdigest()
        except pd.parser.CParserError:
            # could not parse the data, raise exception
            raise Exception('Error parsing remote CSV data.')
        finally:
            fd.close()

        return frames
Пример #5
0
    def _send_command(command, host, port, timeout):
        sock = socket.socket()
        sock.settimeout(timeout)
        buf = StringIO()
        chunk_size = 1024
        # try-finally and try-except to stay compatible with python 2.4
        try:
            try:
                # Connect to the zk client port and send the stat command
                sock.connect((host, port))
                sock.sendall(ensure_bytes(command))

                # Read the response into a StringIO buffer
                chunk = ensure_unicode(sock.recv(chunk_size))
                buf.write(chunk)
                num_reads = 1
                max_reads = 10000
                while chunk:
                    if num_reads > max_reads:
                        # Safeguard against an infinite loop
                        raise Exception(
                            "Read %s bytes before exceeding max reads of %s. "
                            % (buf.tell(), max_reads))
                    chunk = ensure_unicode(sock.recv(chunk_size))
                    buf.write(chunk)
                    num_reads += 1
            except (socket.timeout, socket.error):
                raise ZKConnectionFailure()
        finally:
            sock.close()
        return buf
Пример #6
0
    def fetch_data(self):
        # create a data frame directly from the full text of
        # the response from the returned file-descriptor.
        data = self.fetch_url(self.url)
        fd = StringIO()

        if isinstance(data, str):
            fd.write(data)
        else:
            for chunk in data:
                fd.write(chunk)

        self.fetch_size = fd.tell()

        fd.seek(0)

        try:
            # see if pandas can parse csv data
            frames = read_csv(fd, **self.pandas_kwargs)

            frames_hash = hashlib.md5(str(fd.getvalue()).encode('utf-8'))
            self.fetch_hash = frames_hash.hexdigest()
        except pd.parser.CParserError:
            # could not parse the data, raise exception
            raise Exception('Error parsing remote CSV data.')
        finally:
            fd.close()

        return frames
Пример #7
0
 def do_POST(self):
     """Serve a POST request."""
     r, info = self.deal_post_data()
     print(r, info, "by: ", self.client_address)
     f = StringIO()
     f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
     f.write("<html>\n<title>Upload Result Page</title>\n")
     f.write("<body>\n<h2>Upload Result Page</h2>\n")
     f.write("<hr>\n")
     if r:
         f.write("<strong>Success:</strong>")
     else:
         f.write("<strong>Failed:</strong>")
     f.write(info)
     f.write("<br><a href=\"%s\">back</a>" % self.headers['referer'])
     f.write("<hr><small>Powerd By: bones7456, check new version at ")
     f.write("<a href=\"http://li2z.cn/?s=SimpleHTTPServerWithUpload\">")
     f.write("here</a>.</small></body>\n</html>\n")
     length = f.tell()
     f.seek(0)
     self.send_response(200)
     self.send_header("Content-type", "text/html")
     self.send_header("Content-Length", str(length))
     self.end_headers()
     if f:
         self.copyfile(f, self.wfile)
         f.close()
Пример #8
0
 def render_tasks(self, iterable, **kw):
     fd = StringIO()
     err = StringIO(u'')
     tasks = tasks_from_iterable(iterable)
     render_tasks(write=fd.write, write_err=err.write, tasks=tasks, **kw)
     if err.tell():
         return fd.getvalue(), err.getvalue()
     return fd.getvalue()
Пример #9
0
def sitemap_generator(request, maps, page, current_site):
    output = StringIO()
    protocol = request.is_secure() and 'https' or 'http'
    xml = SimplerXMLGenerator(output, settings.DEFAULT_CHARSET)
    xml.startDocument()
    xml.startElement('urlset', {'xmlns':'http://www.sitemaps.org/schemas/sitemap/0.9'})
    yield output.getvalue()
    pos = output.tell()
    for site in maps:
        if callable(site):
            if issubclass(site, RequestSitemap):
                site = site(request=request)
            else:
                site = site()
        elif hasattr(site, 'request'):
            site.request = request
        for url in site.get_urls(page=page, site=current_site, protocol=protocol):
            xml.startElement('url', {})
            xml.addQuickElement('loc', url['location'])
            try:
                if url['lastmod']:
                    xml.addQuickElement('lastmod', url['lastmod'].strftime('%Y-%m-%d'))
            except (KeyError, AttributeError):
                pass
            try:
                if url['changefreq']:
                    xml.addQuickElement('changefreq', url['changefreq'])
            except KeyError:
                pass
            try:
                if url['priority']:
                    xml.addQuickElement('priority', url['priority'])
            except KeyError:
                pass
            xml.endElement('url')
            output.seek(pos)
            yield output.read()
            pos = output.tell()
    xml.endElement('urlset')
    xml.endDocument()
    output.seek(pos)
    last = output.read()
    output.close()
    yield last
Пример #10
0
    def AddModule(self, m):
        f = StringIO()
        m.AppendJSContentsToFile(f, False, None)

        attrs = {'label': '%s (%i)' % (m.name, f.tell())}

        f.close()

        attr_items = ['%s="%s"' % (x, y) for x, y in six.iteritems(attrs)]
        node = 'M%i [%s];' % (m.id, ','.join(attr_items))
        self.nodes.append(node)
Пример #11
0
 def build_csv_response(self, wb, title="report"):
     """ Take a workbook and return a csv file response """
     title = generate_filename(title, ".csv")
     myfile = StringIO()
     sh = wb.get_active_sheet()
     c = csv.writer(myfile)
     for r in sh.rows:
         c.writerow([cell.value for cell in r])
     response = HttpResponse(myfile.getvalue(), content_type="text/csv")
     response["Content-Disposition"] = "attachment; filename=%s" % title
     response["Content-Length"] = myfile.tell()
     return response
Пример #12
0
 def build_csv_response(self, wb, title="report"):
     """ Take a workbook and return a csv file response """
     title = generate_filename(title, '.csv')
     myfile = StringIO()
     sh = wb.get_active_sheet()
     c = csv.writer(myfile)
     for r in sh.rows:
         c.writerow([cell.value for cell in r])
     response = HttpResponse(myfile.getvalue(), content_type='text/csv')
     response['Content-Disposition'] = 'attachment; filename=%s' % title
     response['Content-Length'] = myfile.tell()
     return response
Пример #13
0
def getSize(obj):
    """Calculate the size as cheap as possible
    """
    # Try the cheap variants first.
    # Actually the checks ensure the code never fails but beeing sure
    # is better.
    try:
        # check if to return zero (length is zero)
        if len(obj) == 0:
            return 0
    except:
        pass

    try:
        # check if ``IStreamableReference``
        if IStreamableReference.providedBy(obj):
            size = obj.getSize()
            if size is not None:
                return size
    except:
        pass

    try:
        # string
        if isinstance(obj, types.StringTypes):
            return len(obj)
    except:
        pass

    try:
        # file like object
        methods = dir(obj)
        if "seek" in methods and "tell" in methods:
            currentPos = obj.tell()
            obj.seek(0, 2)
            size = obj.tell()
            obj.seek(currentPos)
            return size
    except:
        pass

    try:
        # fallback: pickling the object
        stream = StringIO()
        p = Pickler(stream, 1)
        p.dump(obj)
        size = stream.tell()
    except:
        size = None

    return size
 def build_csv_response(self, wb, title="report"):
     """ Take a workbook and return a csv file response """
     title = generate_filename(title, '.csv')
     myfile = StringIO()
     sh = wb.active
     c = csv.writer(myfile)
     for r in sh.rows:
         c.writerow([cell.value for cell in r])
     response = HttpResponse(
         myfile.getvalue(),
         content_type='text/csv')
     response['Content-Disposition'] = 'attachment; filename=%s' % title
     response['Content-Length'] = myfile.tell()
     return response
Пример #15
0
 def build_csv_response(self, wb, title="report"):
     """ Take a workbook and return a csv file response """
     title = generate_filename(title, '.csv')
     myfile = StringIO()
     sh = wb.active
     c = csv.writer(myfile)
     for r in sh.rows:
         c.writerow([cell.value for cell in r])
     response = HttpResponse(
         myfile.getvalue(),
         content_type='text/csv')
     self.add_content_disposition_header(response, title)
     response['Content-Length'] = myfile.tell()
     return response
Пример #16
0
    def check(self, instance):
        host = instance.get('host', 'localhost')
        port = int(instance.get('port', 2181))
        timeout = float(instance.get('timeout', 3.0))
        dimensions = self._set_dimensions(
            {'component': 'zookeeper', 'service': 'zookeeper'}, instance)

        sock = socket.socket()
        sock.settimeout(timeout)
        buf = StringIO()
        chunk_size = 1024
        # try-finally and try-except to stay compatible with python 2.4
        try:
            try:
                # Connect to the zk client port and send the stat command
                sock.connect((host, port))
                sock.sendall(b'stat')

                # Read the response into a StringIO buffer
                chunk = encodeutils.safe_decode(sock.recv(chunk_size), 'utf-8')
                buf.write(chunk)
                num_reads = 1
                max_reads = 10000
                while chunk:
                    if num_reads > max_reads:
                        # Safeguard against an infinite loop
                        raise Exception(
                            "Read %s bytes before exceeding max reads of %s. " %
                            (buf.tell(), max_reads))
                    chunk = encodeutils.safe_decode(sock.recv(chunk_size), 'utf-8')
                    buf.write(chunk)
                    num_reads += 1
            except socket.timeout:
                buf = None
        finally:
            sock.close()

        if buf is not None:
            # Parse the response
            metrics, new_dimensions = self.parse_stat(buf)
            if new_dimensions is not None:
                dimensions.update(new_dimensions.copy())

            # Write the data
            for metric, value in metrics:
                self.gauge(metric, value, dimensions=dimensions)
        else:
            # Reading from the client port timed out, track it as a metric
            self.increment('zookeeper.timeouts', dimensions=dimensions)
Пример #17
0
def parse_oldraw(f):
    """Read raw emission potentials from rawfile"""

    buf = StringIO()
    re_identifier = re.compile("^#\s*(\d+)\s+(\d+)\s*$")

    x_single = None
    x_pair = None
    i, j = None, None
    meta = None
    for line_idx, line in enumerate(f):
        if line.startswith(META_PREFIX):
            meta = json.loads(line[len(META_PREFIX):].strip())

        elif line.startswith("#"):

            buf.seek(0)

            if x_single is not None:
                x_pair[i, j, :, :] = np.loadtxt(buf)
                x_pair[j, i, :, :] = x_pair[i, j, :, :].T

            else:
                x_single = np.loadtxt(buf)

                ncol = x_single.shape[0]
                x_pair = np.zeros((ncol, ncol, 21, 21))

            buf = StringIO()

            m = re_identifier.match(line)
            if m:
                i, j = int(m.group(1)), int(m.group(2))

            else:
                raise Exception(
                    "Line {0} starts with # but doesn't match regex!".format(
                        line_idx + 1))

        else:
            buf.write(line)

    if x_single is not None and buf.tell():
        buf.seek(0)
        x_pair[i, j, :, :] = np.loadtxt(buf)
        x_pair[j, i, :, :] = x_pair[i, j, :, :].T

    return CCMRaw(ncol, x_single, x_pair, meta)
Пример #18
0
 def test_unseekable_file(self):
     def tell_fails():
         raise IOError()
     ticket = get_ticket(urls=[get_http_ticket(EXAMPLE_URL)])
     for num_retries in range(10):
         temp_file = StringIO()
         temp_file.tell = tell_fails
         with mock.patch("time.sleep") as mock_sleep, \
                 mock.patch("logging.warning") as mock_warning:
             dm = RetryCountDownloadManager(
                 ticket, temp_file, max_retries=num_retries)
             self.assertEqual(dm.max_retries, num_retries)
             self.assertRaises(exceptions.RetryableError, dm.run)
             self.assertEqual(dm.attempt_counts[EXAMPLE_URL], 1)
             self.assertEqual(mock_sleep.call_count, 0)
             self.assertEqual(mock_warning.call_count, 0)
Пример #19
0
    def list_directory(self, path):
        """Helper to produce a directory listing (absent index.html).

        Return value is either a file object, or None (indicating an
        error).  In either case, the headers are sent, making the
        interface the same as for send_head().

        """
        try:
            list = os.listdir(path)
        except os.error:
            self.send_error(404, "No permission to list directory")
            return None
        list.sort(key=lambda a: a.lower())
        f = StringIO()
        displaypath = cgi.escape(unquote(self.path))
        f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
        f.write("<html>\n<title>Directory listing for %s</title>\n" %
                displaypath)
        f.write("<body>\n<h2>Directory listing for %s</h2>\n" % displaypath)
        f.write("<hr>\n")
        f.write("<form ENCTYPE=\"multipart/form-data\" method=\"post\">")
        f.write("<input name=\"file\" type=\"file\"/>")
        f.write("<input type=\"submit\" value=\"upload\"/></form>\n")
        f.write("<hr>\n<ul>\n")
        for name in list:
            fullname = os.path.join(path, name)
            displayname = linkname = name
            # Append / for directories or @ for symbolic links
            if os.path.isdir(fullname):
                displayname = name + "/"
                linkname = name + "/"
            if os.path.islink(fullname):
                displayname = name + "@"
                # Note: a link to a directory displays with @ and links with /
            f.write('<li><a href="%s">%s</a>\n' %
                    (quote(linkname), cgi.escape(displayname)))
        f.write("</ul>\n<hr>\n</body>\n</html>\n")
        length = f.tell()
        f.seek(0)
        self.send_response(200)
        self.send_header("Content-type", "text/html")
        self.send_header("Content-Length", str(length))
        self.end_headers()
        return f
Пример #20
0
    def list_directory(self, path):
        """Helper to produce a directory listing (absent index.html).

        Return value is either a file object, or None (indicating an
        error).  In either case, the headers are sent, making the
        interface the same as for send_head().

        """
        try:
            list = os.listdir(path)
        except os.error:
            self.send_error(404, "No permission to list directory")
            return None
        list.sort(key=lambda a: a.lower())
        f = StringIO()
        displaypath = cgi.escape(unquote(self.path))
        f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
        f.write("<html>\n<title>Directory listing for %s</title>\n" % displaypath)
        f.write("<body>\n<h2>Directory listing for %s</h2>\n" % displaypath)
        f.write("<hr>\n")
        f.write("<form ENCTYPE=\"multipart/form-data\" method=\"post\">")
        f.write("<input name=\"file\" type=\"file\"/>")
        f.write("<input type=\"submit\" value=\"upload\"/></form>\n")
        f.write("<hr>\n<ul>\n")
        for name in list:
            fullname = os.path.join(path, name)
            displayname = linkname = name
            # Append / for directories or @ for symbolic links
            if os.path.isdir(fullname):
                displayname = name + "/"
                linkname = name + "/"
            if os.path.islink(fullname):
                displayname = name + "@"
                # Note: a link to a directory displays with @ and links with /
            f.write('<li><a href="%s">%s</a>\n'
                    % (quote(linkname), cgi.escape(displayname)))
        f.write("</ul>\n<hr>\n</body>\n</html>\n")
        length = f.tell()
        f.seek(0)
        self.send_response(200)
        self.send_header("Content-type", "text/html")
        self.send_header("Content-Length", str(length))
        self.end_headers()
        return f
Пример #21
0
    def _get_data(sock, command):
        chunk_size = 1024
        max_reads = 10000
        buf = StringIO()
        sock.sendall(ensure_bytes(command))
        # Read the response into a StringIO buffer
        chunk = ensure_unicode(sock.recv(chunk_size))
        buf.write(chunk)
        num_reads = 1

        while chunk:
            if num_reads > max_reads:
                # Safeguard against an infinite loop
                raise Exception(
                    "Read %s bytes before exceeding max reads of %s. " %
                    (buf.tell(), max_reads))
            chunk = ensure_unicode(sock.recv(chunk_size))
            buf.write(chunk)
            num_reads += 1
        return buf
Пример #22
0
class FormatterTest(unittest.TestCase):
    def setUp(self):
        self.position = 0
        self.logger = structuredlog.StructuredLogger("test_%s" %
                                                     type(self).__name__)
        self.output_file = StringIO()
        self.handler = handlers.StreamHandler(self.output_file,
                                              self.get_formatter())
        self.logger.add_handler(self.handler)

    def set_position(self, pos=None):
        if pos is None:
            pos = self.output_file.tell()
        self.position = pos

    def get_formatter(self):
        raise NotImplementedError(
            "FormatterTest subclasses must implement get_formatter")

    @property
    def loglines(self):
        self.output_file.seek(self.position)
        return [line.rstrip() for line in self.output_file.readlines()]
Пример #23
0
class FormatterTest(unittest.TestCase):

    def setUp(self):
        self.position = 0
        self.logger = structuredlog.StructuredLogger(
            "test_%s" % type(self).__name__)
        self.output_file = StringIO()
        self.handler = handlers.StreamHandler(
            self.output_file, self.get_formatter())
        self.logger.add_handler(self.handler)

    def set_position(self, pos=None):
        if pos is None:
            pos = self.output_file.tell()
        self.position = pos

    def get_formatter(self):
        raise NotImplementedError(
            "FormatterTest subclasses must implement get_formatter")

    @property
    def loglines(self):
        self.output_file.seek(self.position)
        return [line.rstrip() for line in self.output_file.readlines()]
Пример #24
0
    def _flushHeaderMessage(self, buf, wout, wsz):
        """Write a message for self.HEADERS_CLIENT_TYPE

        This method writes a message using the same logic as
        THeaderTransport._flushHeaderMessage
        but mutates fields included in self._fuzz_fields
        """
        cls = self.__class__

        transform_data = StringIO()
        num_transforms = len(self._THeaderTransport__write_transforms)
        for trans_id in self._THeaderTransport__write_transforms:
            trans_id = self._get_fuzzy_field('transform_id', trans_id, 'i32')
            transform_data.write(getVarint(trans_id))

        if self._THeaderTransport__hmac_func:
            num_transforms += 1
            transform_data.write(getVarint(self.HMAC_TRANSFORM))
            transform_data.write(b'\0')  # size of hmac, fixup later.

        # Add in special flags.
        if self._THeaderTransport__identity:
            id_version = self._get_fuzzy_field(
                'version', self.ID_VERSION, 'str')
            self._THeaderTransport__write_headers[self.ID_VERSION_HEADER] = (
                id_version)
            identity = self._get_fuzzy_field(
                'identity', self._THeaderTransport__identity, 'str')
            self._THeaderTransport__write_headers[self.IDENTITY_HEADER] = (
                identity)

        info_data = StringIO()

        # Write persistent kv-headers
        cls._flush_info_headers(
            info_data,
            self._THeaderTransport__write_persistent_headers,
            self.INFO_PKEYVALUE)

        # Write non-persistent kv-headers
        cls._flush_info_headers(
            info_data,
            self._THeaderTransport__write_headers,
            self.INFO_KEYVALUE)

        header_data = StringIO()
        proto_id = self._get_fuzzy_field(
            'proto_id', self._THeaderTransport__proto_id, 'i32')
        header_data.write(getVarint(proto_id))
        num_transforms = self._get_fuzzy_field(
            'num_transforms', num_transforms, 'i32')
        header_data.write(getVarint(num_transforms))

        header_size = (transform_data.tell() +
                       header_data.tell() +
                       info_data.tell())

        padding_size = 4 - (header_size % 4)

        # Fuzz padding size, but do not let total header size exceed 2**16 - 1
        padding_size = min((2 ** 16 - 1) - header_size,
            self._get_fuzzy_field('padding', padding_size, 'i16'))
        header_size = header_size + padding_size

        wsz += header_size + 10

        self._write_fuzzy_field(buf, 'length', wsz, 'i32')
        self._write_fuzzy_field(buf, 'magic', self.HEADER_MAGIC, 'i16')
        self._write_fuzzy_field(
            buf, 'flags', self._THeaderTransport__flags, 'i16')
        self._write_fuzzy_field(
            buf, 'seq_id', self._THeaderTransport__seq_id, 'i32')
        self._write_fuzzy_field(buf, 'header_size', header_size // 4, 'i16')

        buf.write(header_data.getvalue())
        buf.write(transform_data.getvalue())
        hmac_loc = buf.tell() - 1  # Fixup hmac size later
        buf.write(info_data.getvalue())

        # Pad out the header with 0x00
        if 'padding_bytes' in self._fuzz_fields:
            # Print that padding bytes are being fuzzed in favor of printing
            # the value of each individual padding byte
            self._print("Fuzzing %d padding bytes" % padding_size)
        old_verbose, self._verbose = self._verbose, False
        for _ in sm.xrange(padding_size):
            self._write_fuzzy_field(
                buf, 'padding_bytes', six.int2byte(0), 'char')
        self._verbose = old_verbose

        self._write_fuzzy_field(buf, 'payload', wout, 'str')

        # HMAC calculation should always be last.
        if self._THeaderTransport__hmac_func:
            hmac_data = buf.getvalue()[4:]
            hmac = self._THeaderTransport__hmac_func(hmac_data)

            # Fill in hmac size.
            buf.seek(hmac_loc)
            self._write_hmac_size(buf, hmac)
            buf.seek(0, os.SEEK_END)
            self._write_hmac(buf, hmac)

            # Fix packet size since we appended data.
            new_sz = buf.tell() - 4
            buf.seek(0)
            self._write_wsz(buf, new_sz)
Пример #25
0
class File(object):
    def __init__(self, name, data=None, type_=None):
        self._name = name
        self.id = None
        self._url = None
        self._acl = None
        self.current_user = None  # TODO
        self._metadata = {
            'owner': 'unknown'
        }
        if self.current_user and self.current_user is not None:
            self._metadata['owner'] = self.current_user.id

        pattern = re.compile('\.([^.]*)$')
        extension = pattern.findall(name)
        if extension:
            self.extension = extension[0].lower()
        else:
            self.extension = ''

        if type_:
            self._type = type_
        else:
            self._type = mime_types.get(self.extension, 'text/plain')

        if data is None:
            self._source = None
        elif isinstance(data, StringIO):
            self._source = data
        elif isinstance(data, file):
            data.seek(0, os.SEEK_SET)
            self._source = StringIO(data.read())
        elif isinstance(data, buffer):
            self._source = StringIO(data)
        elif six.PY2:
            import cStringIO
            if isinstance(data, cStringIO.OutputType):
                data.seek(0, os.SEEK_SET)
                self._source = StringIO(data.getvalue())
            else:
                raise TypeError('data must be a StringIO / buffer / file instance')
        else:
            raise TypeError('data must be a StringIO / buffer / file instance')

        if self._source:
            self._source.seek(0, os.SEEK_END)
            self._metadata['size'] = self._source.tell()
            self._source.seek(0, os.SEEK_SET)

    @classmethod
    def create_with_url(cls, name, url, meta_data=None, type_=None):
        f = File(name, None, type_)
        if meta_data:
            f._metadata.update(meta_data)

        f._url = url
        f._metadata['__source'] = 'external'
        return f

    @classmethod
    def create_without_data(cls, object_id):
        f = File('')
        f.id = object_id
        return f

    def get_acl(self):
        return self._acl

    def set_acl(self, acl):
        if not isinstance(acl, leancloud.ACL):
            raise TypeError('acl must be a leancloud.ACL instance')
        self._acl = acl

    @property
    def name(self):
        return self._name

    @property
    def url(self):
        return self._url

    @property
    def size(self):
        return self._metadata['size']

    @property
    def owner_id(self):
        return self._metadata['owner']

    @property
    def metadata(self):
        return self._metadata

    def get_thumbnail_url(self, width, height, quality=100, scale_to_fit=True, fmt='png'):
        if not self._url:
            raise ValueError('invalid url')

        if width < 0 or height < 0:
            raise ValueError('invalid height or width params')

        if quality > 100 or quality <= 0:
            raise ValueError('quality must between 0 and 100')

        mode = 2 if scale_to_fit else 1

        return self.url + '?imageView/{0}/w/{1}/h/{2}/q/{3}/format/{4}'.format(mode, width, height, quality, fmt)

    def destroy(self):
        if not self.id:
            return False
        response = client.delete('/files/{0}'.format(self.id))
        if response.status_code != 200:
            raise LeanCloudError(1, "the file is not sucessfully destroyed")

    def _save_to_qiniu(self):
        self._source.seek(0)
        hex_octet = lambda: hex(int(0x10000 * (1 + random.random())))[-4:]
        key = ''.join(hex_octet() for _ in xrange(4))
        key = '{0}.{1}'.format(key, self.extension)
        data = {
            'name': self._name,
            'key': key,
            'ACL': self._acl,
            'mime_type': self._type,
            'metaData': self._metadata,
        }
        response = client.post('/qiniu', data)
        content = utils.response_to_json(response)
        self.id = content['objectId']
        self._url = content['url']
        uptoken = content['token']
        ret, info = qiniu.put_data(uptoken, key, self._source)

        if info.status_code != 200:
            raise LeanCloudError(1, 'the file is not saved, qiniu status code: {0}'.format(info.status_code))

    def _save_to_leancloud(self):
        self._source.seek(0)
        encoded = codecs.encode(self._source.read(), 'base64')
        data = {
            'base64': encoded,
            '_ContentType': self._type,
            'ACL': self._acl,
            'mime_type': self._type,
            'metaData': self._metadata,
        }
        response = client.post('/files/{}'.format(self._name), data)
        response.raise_for_status()
        content = response.json()
        self.id = content['objectId']
        self._url = content['url']
        self._name = content['name']

    def save(self):
        if self._source:
            if client.REGION == 'US':
                self._save_to_leancloud()
            else:
                self._save_to_qiniu()
        elif self._url and self.metadata.get('__source') == 'external':
            data = {
                'name': self._name,
                'ACL': self._acl,
                'metaData': self._metadata,
                'mime_type': self._type,
                'url': self._url,
            }
            response = client.post('/files/{0}'.format(self._name), data)
            content = utils.response_to_json(response)

            self._name = content['name']
            self._url = content['url']
            self.id = content['objectId']
            if 'size' in content:
                self._metadata['size'] = content['size']
            else:
                raise ValueError

        return self

    def fetch(self):
        response = client.get('/files/{0}'.format(self.id))
        content = utils.response_to_json(response)
        self._name = content.get('name')
        self.id = content.get('objectId')
        self._url = content.get('url')
        self._type = content.get('mime_type')
        self._metadata = content.get('metaData')
Пример #26
0
class SSHSession(Session):

    "Implements a :rfc:`4742` NETCONF session over SSH."

    def __init__(self, device_handler):
        capabilities = Capabilities(device_handler.get_capabilities())
        Session.__init__(self, capabilities)
        self._host_keys = paramiko.HostKeys()
        self._transport = None
        self._connected = False
        self._channel = None
        self._channel_id = None
        self._channel_name = None
        self._buffer = StringIO()
        # parsing-related, see _parse()
        self._device_handler = device_handler
        self._parsing_state10 = 0
        self._parsing_pos10 = 0
        self._parsing_pos11 = 0
        self._parsing_state11 = 0
        self._expchunksize = 0
        self._curchunksize = 0
        self._inendpos = 0
        self._size_num_list = []
        self._message_list = []

    def _parse(self):
        "Messages ae delimited by MSG_DELIM. The buffer could have grown by a maximum of BUF_SIZE bytes everytime this method is called. Retains state across method calls and if a byte has been read it will not be considered again."
        return self._parse10()

    def _parse10(self):

        """Messages are delimited by MSG_DELIM. The buffer could have grown by
        a maximum of BUF_SIZE bytes everytime this method is called. Retains
        state across method calls and if a chunk has been read it will not be
        considered again."""

        logger.debug("parsing netconf v1.0")
        buf = self._buffer
        buf.seek(self._parsing_pos10)
        if MSG_DELIM in buf.read().decode('UTF-8'):
            buf.seek(0)
            msg, _, remaining = buf.read().decode('UTF-8').partition(MSG_DELIM)
            msg = msg.strip()
            if sys.version < '3':
                self._dispatch_message(msg.encode())
            else:
                self._dispatch_message(msg)
            # create new buffer which contains remaining of old buffer
            self._buffer = StringIO()
            self._buffer.write(remaining.encode())
            self._parsing_pos10 = 0
            if len(remaining) > 0:
                # There could be another entire message in the
                # buffer, so we should try to parse again.
                logger.debug('Trying another round of parsing since there is still data')
                self._parse10()
        else:
            # handle case that MSG_DELIM is split over two chunks
            self._parsing_pos10 = buf.tell() - MSG_DELIM_LEN
            if self._parsing_pos10 < 0:
                self._parsing_pos10 = 0

    def _parse11(self):
        logger.debug("parsing netconf v1.1")
        expchunksize = self._expchunksize
        curchunksize = self._curchunksize
        idle, instart, inmsg, inbetween, inend = range(5)
        state = self._parsing_state11
        inendpos = self._inendpos
        num_list = self._size_num_list
        MAX_STARTCHUNK_SIZE = 12 # \#+4294967295+\n
        pre = 'invalid base:1:1 frame'
        buf = self._buffer
        buf.seek(self._parsing_pos11)
        message_list = self._message_list # a message is a list of chunks
        chunk_list = []   # a chunk is a list of characters

        should_recurse = False

        while True:
            x = buf.read(1)
            if not x:
                logger.debug('No more data to read')
                # Store the current chunk to the message list
                chunk = b''.join(chunk_list)
                message_list.append(textify(chunk))
                break # done reading
            logger.debug('x: %s', x)
            if state == idle:
                if x == b'\n':
                    state = instart
                    inendpos = 1
                else:
                    logger.debug('%s (%s: expect newline)'%(pre, state))
                    raise Exception
            elif state == instart:
                if inendpos == 1:
                    if x == b'#':
                        inendpos += 1
                    else:
                        logger.debug('%s (%s: expect "#")'%(pre, state))
                        raise Exception
                elif inendpos == 2:
                    if x.isdigit():
                        inendpos += 1 # == 3 now #
                        num_list.append(x)
                    else:
                        logger.debug('%s (%s: expect digit)'%(pre, state))
                        raise Exception
                else:
                    if inendpos == MAX_STARTCHUNK_SIZE:
                        logger.debug('%s (%s: no. too long)'%(pre, state))
                        raise Exception
                    elif x == b'\n':
                        num = b''.join(num_list)
                        num_list = [] # Reset num_list
                        try: num = int(num)
                        except:
                            logger.debug('%s (%s: invalid no.)'%(pre, state))
                            raise Exception
                        else:
                            state = inmsg
                            expchunksize = num
                            logger.debug('response length: %d'%expchunksize)
                            curchunksize = 0
                            inendpos += 1
                    elif x.isdigit():
                        inendpos += 1 # > 3 now #
                        num_list.append(x)
                    else:
                        logger.debug('%s (%s: expect digit)'%(pre, state))
                        raise Exception
            elif state == inmsg:
                chunk_list.append(x)
                curchunksize += 1
                chunkleft = expchunksize - curchunksize
                if chunkleft == 0:
                    inendpos = 0
                    state = inbetween
                    chunk = b''.join(chunk_list)
                    message_list.append(textify(chunk))
                    chunk_list = [] # Reset chunk_list
                    logger.debug('parsed new chunk: %s'%(chunk))
            elif state == inbetween:
                if inendpos == 0:
                    if x == b'\n': inendpos += 1
                    else:
                        logger.debug('%s (%s: expect newline)'%(pre, state))
                        raise Exception
                elif inendpos == 1:
                    if x == b'#': inendpos += 1
                    else:
                        logger.debug('%s (%s: expect "#")'%(pre, state))
                        raise Exception
                else:
                    inendpos += 1 # == 3 now #
                    if x == b'#':
                        state = inend
                    elif x.isdigit():
                        # More trunks
                        state = instart
                        num_list = []
                        num_list.append(x)
                    else:
                        logger.debug('%s (%s: expect "#")'%(pre, state))
                        raise Exception
            elif state == inend:
                if inendpos == 3:
                    if x == b'\n':
                        inendpos = 0
                        state = idle
                        logger.debug('dispatching message')
                        self._dispatch_message(''.join(message_list))
                        # reset
                        rest = buf.read()
                        buf = BytesIO()
                        buf.write(rest)
                        buf.seek(0)
                        message_list = []
                        self._message_list = message_list
                        chunk_list = []
                        expchunksize = chunksize = 0
                        parsing_state11 = idle
                        inendpos = parsing_pos11 = 0
                        # There could be another entire message in the
                        # buffer, so we should try to parse again.
                        should_recurse = True
                        break
                    else:
                        logger.debug('%s (%s: expect newline)'%(pre, state))
                        raise Exception
            else:
                logger.debug('%s (%s invalid state)'%(pre, state))
                raise Exception

        self._expchunksize = expchunksize
        self._curchunksize = curchunksize
        self._parsing_state11 = state
        self._inendpos = inendpos
        self._size_num_list = num_list
        self._buffer = buf
        self._parsing_pos11 = self._buffer.tell()
        logger.debug('parse11 ending ...')

        if should_recurse:
            logger.debug('Trying another round of parsing since there is still data')
            self._parse11()


    def load_known_hosts(self, filename=None):

        """Load host keys from an openssh :file:`known_hosts`-style file. Can
        be called multiple times.

        If *filename* is not specified, looks in the default locations i.e. :file:`~/.ssh/known_hosts` and :file:`~/ssh/known_hosts` for Windows.
        """

        if filename is None:
            filename = os.path.expanduser('~/.ssh/known_hosts')
            try:
                self._host_keys.load(filename)
            except IOError:
                # for windows
                filename = os.path.expanduser('~/ssh/known_hosts')
                try:
                    self._host_keys.load(filename)
                except IOError:
                    pass
        else:
            self._host_keys.load(filename)

    def close(self):
        if self._transport.is_active():
            self._transport.close()
        self._channel = None
        self._connected = False


    # REMEMBER to update transport.rst if sig. changes, since it is hardcoded there
    def connect(self, host, port=830, timeout=None, unknown_host_cb=default_unknown_host_cb,
                username=None, password=None, key_filename=None, allow_agent=True,
                hostkey_verify=True, look_for_keys=True, ssh_config=None):

        """Connect via SSH and initialize the NETCONF session. First attempts the publickey authentication method and then password authentication.

        To disable attempting publickey authentication altogether, call with *allow_agent* and *look_for_keys* as `False`.

        *host* is the hostname or IP address to connect to

        *port* is by default 830, but some devices use the default SSH port of 22 so this may need to be specified

        *timeout* is an optional timeout for socket connect

        *unknown_host_cb* is called when the server host key is not recognized. It takes two arguments, the hostname and the fingerprint (see the signature of :func:`default_unknown_host_cb`)

        *username* is the username to use for SSH authentication

        *password* is the password used if using password authentication, or the passphrase to use for unlocking keys that require it

        *key_filename* is a filename where a the private key to be used can be found

        *allow_agent* enables querying SSH agent (if found) for keys

        *hostkey_verify* enables hostkey verification from ~/.ssh/known_hosts

        *look_for_keys* enables looking in the usual locations for ssh keys (e.g. :file:`~/.ssh/id_*`)

        *ssh_config* enables parsing of an OpenSSH configuration file, if set to its path, e.g. :file:`~/.ssh/config` or to True (in this case, use :file:`~/.ssh/config`).
        """
        # Optionaly, parse .ssh/config
        config = {}
        if ssh_config is True:
            ssh_config = "~/.ssh/config" if sys.platform != "win32" else "~/ssh/config"
        if ssh_config is not None:
            config = paramiko.SSHConfig()
            config.parse(open(os.path.expanduser(ssh_config)))
            config = config.lookup(host)
            host = config.get("hostname", host)
            if username is None:
                username = config.get("user")
            if key_filename is None:
                key_filename = config.get("identityfile")

        if username is None:
            username = getpass.getuser()

        sock = None
        if config.get("proxycommand"):
            sock = paramiko.proxy.ProxyCommand(config.get("proxycommand"))
        else:
            for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM):
                af, socktype, proto, canonname, sa = res
                try:
                    sock = socket.socket(af, socktype, proto)
                    sock.settimeout(timeout)
                except socket.error:
                    continue
                try:
                    sock.connect(sa)
                except socket.error:
                    sock.close()
                    continue
                break
            else:
                raise SSHError("Could not open socket to %s:%s" % (host, port))

        t = self._transport = paramiko.Transport(sock)
        t.set_log_channel(logger.name)
        if config.get("compression") == 'yes':
            t.use_compression()

        try:
            t.start_client()
        except paramiko.SSHException:
            raise SSHError('Negotiation failed')

        # host key verification
        server_key = t.get_remote_server_key()

        fingerprint = _colonify(hexlify(server_key.get_fingerprint()))

        if hostkey_verify:
            known_host = self._host_keys.check(host, server_key)
            if not known_host and not unknown_host_cb(host, fingerprint):
                raise SSHUnknownHostError(host, fingerprint)

        if key_filename is None:
            key_filenames = []
        elif isinstance(key_filename, (str, bytes)):
            key_filenames = [ key_filename ]
        else:
            key_filenames = key_filename

        self._auth(username, password, key_filenames, allow_agent, look_for_keys)

        self._connected = True # there was no error authenticating
        # TODO: leopoul: Review, test, and if needed rewrite this part
        subsystem_names = self._device_handler.get_ssh_subsystem_names()
        for subname in subsystem_names:
            c = self._channel = self._transport.open_session()
            self._channel_id = c.get_id()
            channel_name = "%s-subsystem-%s" % (subname, str(self._channel_id))
            c.set_name(channel_name)
            try:
                c.invoke_subsystem(subname)
            except paramiko.SSHException as e:
                logger.info("%s (subsystem request rejected)", e)
                handle_exception = self._device_handler.handle_connection_exceptions(self)
                # Ignore the exception, since we continue to try the different
                # subsystem names until we find one that can connect.
                #have to handle exception for each vendor here
                if not handle_exception:
                    continue
            self._channel_name = c.get_name()
            self._post_connect()
            return
        raise SSHError("Could not open connection, possibly due to unacceptable"
                       " SSH subsystem name.")

    def _auth(self, username, password, key_filenames, allow_agent,
              look_for_keys):
        saved_exception = None

        for key_filename in key_filenames:
            for cls in (paramiko.RSAKey, paramiko.DSSKey, paramiko.ECDSAKey):
                try:
                    key = cls.from_private_key_file(key_filename, password)
                    logger.debug("Trying key %s from %s" %
                              (hexlify(key.get_fingerprint()), key_filename))
                    self._transport.auth_publickey(username, key)
                    return
                except Exception as e:
                    saved_exception = e
                    logger.debug(e)

        if allow_agent:
            for key in paramiko.Agent().get_keys():
                try:
                    logger.debug("Trying SSH agent key %s" %
                                 hexlify(key.get_fingerprint()))
                    self._transport.auth_publickey(username, key)
                    return
                except Exception as e:
                    saved_exception = e
                    logger.debug(e)

        keyfiles = []
        if look_for_keys:
            rsa_key = os.path.expanduser("~/.ssh/id_rsa")
            dsa_key = os.path.expanduser("~/.ssh/id_dsa")
            ecdsa_key = os.path.expanduser("~/.ssh/id_ecdsa")
            if os.path.isfile(rsa_key):
                keyfiles.append((paramiko.RSAKey, rsa_key))
            if os.path.isfile(dsa_key):
                keyfiles.append((paramiko.DSSKey, dsa_key))
            if os.path.isfile(ecdsa_key):
                keyfiles.append((paramiko.ECDSAKey, ecdsa_key))
            # look in ~/ssh/ for windows users:
            rsa_key = os.path.expanduser("~/ssh/id_rsa")
            dsa_key = os.path.expanduser("~/ssh/id_dsa")
            ecdsa_key = os.path.expanduser("~/ssh/id_ecdsa")
            if os.path.isfile(rsa_key):
                keyfiles.append((paramiko.RSAKey, rsa_key))
            if os.path.isfile(dsa_key):
                keyfiles.append((paramiko.DSSKey, dsa_key))
            if os.path.isfile(ecdsa_key):
                keyfiles.append((paramiko.ECDSAKey, ecdsa_key))

        for cls, filename in keyfiles:
            try:
                key = cls.from_private_key_file(filename, password)
                logger.debug("Trying discovered key %s in %s" %
                          (hexlify(key.get_fingerprint()), filename))
                self._transport.auth_publickey(username, key)
                return
            except Exception as e:
                saved_exception = e
                logger.debug(e)

        if password is not None:
            try:
                self._transport.auth_password(username, password)
                return
            except Exception as e:
                saved_exception = e
                logger.debug(e)

        if saved_exception is not None:
            # need pep-3134 to do this right
            raise AuthenticationError(repr(saved_exception))

        raise AuthenticationError("No authentication methods available")

    def run(self):
        chan = self._channel
        q = self._q

        def start_delim(data_len): return '\n#%s\n'%(data_len)

        try:
            while True:
                # select on a paramiko ssh channel object does not ever return it in the writable list, so channels don't exactly emulate the socket api
                r, w, e = select([chan], [], [], TICK)
                # will wakeup evey TICK seconds to check if something to send, more if something to read (due to select returning chan in readable list)
                if r:
                    data = chan.recv(BUF_SIZE)
                    if data:
                        self._buffer.write(data)
                        if self._server_capabilities:
                            if 'urn:ietf:params:netconf:base:1.1' in self._server_capabilities and 'urn:ietf:params:netconf:base:1.1' in self._client_capabilities:
                                logger.debug("Selecting netconf:base:1.1 for encoding")
                                self._parse11()
                            elif 'urn:ietf:params:netconf:base:1.0' in self._server_capabilities or 'urn:ietf:params:xml:ns:netconf:base:1.0' in self._server_capabilities or 'urn:ietf:params:netconf:base:1.0' in self._client_capabilities:
                                logger.debug("Selecting netconf:base:1.0 for encoding")
                                self._parse10()
                            else: raise Exception
                        else:
                            self._parse10() # HELLO msg uses EOM markers.
                    else:
                        raise SessionCloseError(self._buffer.getvalue())
                if not q.empty() and chan.send_ready():
                    logger.debug("Sending message")
                    data = q.get()
                    try:
                        # send a HELLO msg using v1.0 EOM markers.
                        validated_element(data, tags='{urn:ietf:params:xml:ns:netconf:base:1.0}hello')
                        data = "%s%s"%(data, MSG_DELIM)
                    except XMLError:
                        # this is not a HELLO msg
                        # we publish v1.1 support
                        if 'urn:ietf:params:netconf:base:1.1' in self._client_capabilities:
                            if self._server_capabilities:
                                if 'urn:ietf:params:netconf:base:1.1' in self._server_capabilities:
                                    # send using v1.1 chunked framing
                                    data = "%s%s%s"%(start_delim(len(data)), data, END_DELIM)
                                elif 'urn:ietf:params:netconf:base:1.0' in self._server_capabilities or 'urn:ietf:params:xml:ns:netconf:base:1.0' in self._server_capabilities:
                                    # send using v1.0 EOM markers
                                    data = "%s%s"%(data, MSG_DELIM)
                                else: raise Exception
                            else:
                                logger.debug('HELLO msg was sent, but server capabilities are still not known')
                                raise Exception
                        # we publish only v1.0 support
                        else:
                            # send using v1.0 EOM markers
                            data = "%s%s"%(data, MSG_DELIM)
                    finally:
                        logger.debug("Sending: %s", data)
                        while data:
                            n = chan.send(data)
                            if n <= 0:
                                raise SessionCloseError(self._buffer.getvalue(), data)
                            data = data[n:]
        except Exception as e:
            logger.debug("Broke out of main loop, error=%r", e)
            self._dispatch_error(e)
            self.close()

    @property
    def transport(self):
        "Underlying `paramiko.Transport <http://www.lag.net/paramiko/docs/paramiko.Transport-class.html>`_ object. This makes it possible to call methods like :meth:`~paramiko.Transport.set_keepalive` on it."
        return self._transport
Пример #27
0
class DocStreamReceiver(ReadBodyProtocol):
    """
    A protocol implementation that can parse incoming data from server based
    on a line format specified on u1db implementation. Except that we split doc
    attributes from content to ease parsing and increment throughput for larger
    documents.
    [\r\n
    {metadata},\r\n
    {doc_info},\r\n
    {content},\r\n
    ...
    {doc_info},\r\n
    {content},\r\n
    ]
    """
    def __init__(self, response, deferred, doc_reader):
        self.deferred = deferred
        self.status = response.code if response else None
        self.message = response.phrase if response else None
        self.headers = response.headers if response else {}
        self.delimiter = '\r\n'
        self.metadata = ''
        self._doc_reader = doc_reader
        self.reset()

    def reset(self):
        self._line = 0
        self._buffer = StringIO()
        self._properly_finished = False

    def connectionLost(self, reason):
        """
        Deliver the accumulated response bytes to the waiting L{Deferred}, if
        the response body has been completely received without error.
        """
        if self.deferred.called:
            return
        try:
            if reason.check(ResponseDone):
                self.dataBuffer = self.metadata
            else:
                self.dataBuffer = self.finish()
        except errors.BrokenSyncStream as e:
            return self.deferred.errback(e)
        return ReadBodyProtocol.connectionLost(self, reason)

    def consumeBufferLines(self):
        """
        Consumes lines from buffer and rewind it, writing remaining data
        that didn't formed a line back into buffer.
        """
        content = self._buffer.getvalue()[0:self._buffer.tell()]
        self._buffer.seek(0)
        lines = content.split(self.delimiter)
        self._buffer.write(lines.pop(-1))
        return lines

    def dataReceived(self, data):
        """
        Buffer incoming data until a line breaks comes in. We check only
        the incoming data for efficiency.
        """
        self._buffer.write(data)
        if '\n' not in data:
            return
        lines = self.consumeBufferLines()
        while lines:
            line, _ = utils.check_and_strip_comma(lines.pop(0))
            self.lineReceived(line)
            self._line += 1

    def lineReceived(self, line):
        """
        Protocol implementation.
        0:      [\r\n
        1:      {metadata},\r\n
        (even): {doc_info},\r\n
        (odd):  {data},\r\n
        (last): ]
        """
        if self._properly_finished:
            raise errors.BrokenSyncStream("Reading a finished stream")
        if ']' == line:
            self._properly_finished = True
        elif self._line == 0:
            if line is not '[':
                raise errors.BrokenSyncStream("Invalid start")
        elif self._line == 1:
            self.metadata = line
            if 'error' in self.metadata:
                raise errors.BrokenSyncStream("Error from server: %s" % line)
            self.total = json.loads(line).get('number_of_changes', -1)
        elif (self._line % 2) == 0:
            self.current_doc = json.loads(line)
            if 'error' in self.current_doc:
                raise errors.BrokenSyncStream("Error from server: %s" % line)
        else:
            d = self._doc_reader(self.current_doc,
                                 line.strip() or None, self.total)
            d.addErrback(self.deferred.errback)

    def finish(self):
        """
        Checks that ']' came and stream was properly closed.
        """
        if not self._properly_finished:
            raise errors.BrokenSyncStream('Stream not properly closed')
        content = self._buffer.getvalue()[0:self._buffer.tell()]
        self._buffer.close()
        return content
Пример #28
0
def create_file_reader(input_files,
                       topology,
                       featurizer,
                       chunksize=None,
                       **kw):
    r"""
    Creates a (possibly featured) file reader by a number of input files and either a topology file or a featurizer.
    Parameters
    ----------
    :param input_files:
        A single input file or a list of input files.
    :param topology:
        A topology file. If given, the featurizer argument can be None.
    :param featurizer:
        A featurizer. If given, the topology file can be None.
    :param chunksize:
        The chunk size with which the corresponding reader gets initialized.
    :return: Returns the reader.
    """
    from pyerna.coordinates.data.numpy_filereader import NumPyFileReader
    from pyerna.coordinates.data.py_csv_reader import PyCSVReader
    from pyerna.coordinates.data import FeatureReader
    from pyerna.coordinates.data.fragmented_trajectory_reader import FragmentedTrajectoryReader

    # fragmented trajectories
    if (isinstance(input_files, (list, tuple)) and len(input_files) > 0
            and any(isinstance(item, (list, tuple)) for item in input_files)):
        return FragmentedTrajectoryReader(input_files, topology, chunksize,
                                          featurizer)

    # normal trajectories
    if (isinstance(input_files, str)
            or (isinstance(input_files, (list, tuple)) and
                (any(isinstance(item, str)
                     for item in input_files) or len(input_files) is 0))):
        reader = None
        # check: if single string create a one-element list
        if isinstance(input_files, str):
            input_list = [input_files]
        elif len(input_files) > 0 and all(
                isinstance(item, str) for item in input_files):
            input_list = input_files
        else:
            if len(input_files) is 0:
                raise ValueError("The passed input list should not be empty.")
            else:
                raise ValueError(
                    "The passed list did not exclusively contain strings or was a list of lists "
                    "(fragmented trajectory).")

        # TODO: this does not handle suffixes like .xyz.gz (rare)
        _, suffix = os.path.splitext(input_list[0])

        suffix = str(suffix)

        # check: do all files have the same file type? If not: raise ValueError.
        if all(item.endswith(suffix) for item in input_list):

            # do all the files exist? If not: Raise value error
            all_exist = True
            from six import StringIO
            err_msg = StringIO()
            for item in input_list:
                if not os.path.isfile(item):
                    err_msg.write('\n' if err_msg.tell() > 0 else "")
                    err_msg.write('File %s did not exist or was no file' %
                                  item)
                    all_exist = False
            if not all_exist:
                raise ValueError(
                    'Some of the given input files were directories'
                    ' or did not exist:\n%s' % err_msg.getvalue())
            featurizer_or_top_provided = featurizer is not None or topology is not None
            # we need to check for h5 first, because of mdtraj custom HDF5 traj format (which is deprecated).
            if suffix in ('.h5', '.hdf5') and not featurizer_or_top_provided:
                # This check is potentially expensive for lots of files, we also re-open the file twice (causing atime updates etc.)
                # So we simply require that no featurizer option is given.
                # and not all((_is_mdtraj_hdf5_file(f) for f in input_files)):
                from pyerna.coordinates.data.h5_reader import H5Reader
                reader = H5Reader(filenames=input_files,
                                  chunk_size=chunksize,
                                  **kw)
            # CASE 1.1: file types are MD files
            elif FeatureReader.supports_format(suffix):
                # check: do we either have a featurizer or a topology file name? If not: raise ValueError.
                # create a MD reader with file names and topology
                if not featurizer_or_top_provided:
                    raise ValueError(
                        'The input files were MD files which makes it mandatory to have either a '
                        'Featurizer or a topology file.')

                if suffix in ('.pdb', '.pdb.gz'):
                    raise ValueError(
                        'PyEMMA can not read PDB-fake-trajectories. '
                        'Please consider using a sane trajectory format (e.g. xtc, dcd).'
                    )

                reader = FeatureReader(input_list,
                                       featurizer=featurizer,
                                       topologyfile=topology,
                                       chunksize=chunksize)
            elif suffix in ('.npy', '.npz'):
                reader = NumPyFileReader(input_list, chunksize=chunksize)
            # otherwise we assume that given files are ascii tabulated data
            else:
                reader = PyCSVReader(input_list, chunksize=chunksize, **kw)
        else:
            raise ValueError(
                'Not all elements in the input list were of the type %s!' %
                suffix)
    else:
        raise ValueError(
            'Input "{}" was no string or list of strings.'.format(input_files))
    return reader
Пример #29
0
class NormalHTTPFile(HTTPFile):
    def __init__(self,
                 path,
                 devid,
                 backup_dests=None,
                 mg=None,
                 fid=None,
                 cls=None,
                 key=None,
                 create_close_arg=None,
                 **kwds):

        super(NormalHTTPFile, self).__init__(mg, fid, key, cls,
                                             create_close_arg)

        if backup_dests is None:
            backup_dests = []
        self._fp = StringIO()
        self._paths = [(devid, path)] + list(backup_dests)
        self._is_closed = 0

    def paths(self):
        return self._paths

    def read(self, n=-1):
        return self._fp.read(n)

    def readline(self, *args, **kwds):
        return self._fp.readline(*args, **kwds)

    def readlines(self, *args, **kwds):
        return self._fp.readlines(*args, **kwds)

    def write(self, content):
        self._fp.write(content)

    def close(self):
        if not self._is_closed:
            self._is_closed = True

            #      content = self._fp.getvalue()
            #      self._fp.close()

            for tried_devid, tried_path in self._paths:
                try:
                    #          self._request(tried_path, "PUT", content)
                    self._fp.seek(0)
                    put.putfile(self._fp, tried_path)
                    devid = tried_devid
                    path = tried_path
                    break
                except HTTPError as e:
                    continue
            else:
                devid = None
                path = None

            self._fp.seek(0, 2)
            size = self._fp.tell()
            self._fp.close()
            if devid:
                params = {
                    'fid': self.fid,
                    'domain': self.mg.domain,
                    'key': self.key,
                    'path': path,
                    'devid': devid,
                    'size': size
                }
                if self.create_close_arg:
                    params.update(self.create_close_arg)
                try:
                    self.mg.backend.do_request('create_close', params)
                except MogileFSError as e:
                    if e.err != 'empty_file':
                        raise

    def seek(self, pos, mode=0):
        return self._fp.seek(pos, mode)

    def tell(self):
        return self._fp.tell()
Пример #30
0
    def _flushHeaderMessage(self, buf, wout, wsz):
        """Write a message for CLIENT_TYPE.HEADER

        This method writes a message using the same logic as
        THeaderTransport._flushHeaderMessage
        but mutates fields included in self._fuzz_fields
        """
        transform_data = StringIO()
        num_transforms = len(self._THeaderTransport__write_transforms)
        for trans_id in self._THeaderTransport__write_transforms:
            trans_id = self._get_fuzzy_field('transform_id', trans_id, 'i32')
            transform_data.write(getVarint(trans_id))

        # Add in special flags.
        if self._THeaderTransport__identity:
            id_version = self._get_fuzzy_field(
                'version', self.ID_VERSION, 'str')
            self._THeaderTransport__write_headers[self.ID_VERSION_HEADER] = (
                id_version)
            identity = self._get_fuzzy_field(
                'identity', self._THeaderTransport__identity, 'str')
            self._THeaderTransport__write_headers[self.IDENTITY_HEADER] = (
                identity)

        info_data = StringIO()

        # Write persistent kv-headers
        _flush_info_headers(
            info_data,
            self._THeaderTransport__write_persistent_headers,
            INFO.PERSISTENT)

        # Write non-persistent kv-headers
        _flush_info_headers(
            info_data,
            self._THeaderTransport__write_headers,
            INFO.NORMAL)

        header_data = StringIO()
        proto_id = self._get_fuzzy_field(
            'proto_id', self._THeaderTransport__proto_id, 'i32')
        header_data.write(getVarint(proto_id))
        num_transforms = self._get_fuzzy_field(
            'num_transforms', num_transforms, 'i32')
        header_data.write(getVarint(num_transforms))

        header_size = (transform_data.tell() +
                       header_data.tell() +
                       info_data.tell())

        padding_size = 4 - (header_size % 4)

        # Fuzz padding size, but do not let total header size exceed 2**16 - 1
        padding_size = min((2 ** 16 - 1) - header_size,
            self._get_fuzzy_field('padding', padding_size, 'i16'))
        header_size = header_size + padding_size

        wsz += header_size + 10

        self._write_fuzzy_field(buf, 'length', wsz, 'i32')
        self._write_fuzzy_field(buf, 'magic', HEADER_MAGIC >> 16, 'i16')
        self._write_fuzzy_field(
            buf, 'flags', self.header_flags(), 'i16')
        self._write_fuzzy_field(
            buf, 'seq_id', self.seq_id, 'i32')
        self._write_fuzzy_field(buf, 'header_size', header_size // 4, 'i16')

        buf.write(header_data.getvalue())
        buf.write(transform_data.getvalue())
        buf.write(info_data.getvalue())

        # Pad out the header with 0x00
        if 'padding_bytes' in self._fuzz_fields:
            # Print that padding bytes are being fuzzed in favor of printing
            # the value of each individual padding byte
            self._print("Fuzzing %d padding bytes" % padding_size)
        old_verbose, self._verbose = self._verbose, False
        for _ in sm.xrange(padding_size):
            self._write_fuzzy_field(
                buf, 'padding_bytes', six.int2byte(0), 'char')
        self._verbose = old_verbose

        self._write_fuzzy_field(buf, 'payload', wout, 'str')
Пример #31
0
class NormalHTTPFile(HTTPFile):
  def __init__(self,
               path,
               devid,
               backup_dests=None,
               mg=None,
               fid=None,
               cls=None,
               key=None,
               create_close_arg=None,
               **kwds):

    super(NormalHTTPFile, self).__init__(mg, fid, key, cls, create_close_arg)

    if backup_dests is None:
      backup_dests = []
    self._fp = StringIO()
    self._paths = [(devid, path)] + list(backup_dests)
    self._is_closed = 0

  def paths(self):
    return self._paths

  def read(self, n= -1):
    return self._fp.read(n)

  def readline(self, *args, **kwds):
    return self._fp.readline(*args, **kwds)

  def readlines(self, *args, **kwds):
    return self._fp.readlines(*args, **kwds)

  def write(self, content):
    self._fp.write(content)

  def close(self):
    if not self._is_closed:
      self._is_closed = True

#      content = self._fp.getvalue()
#      self._fp.close()

      for tried_devid, tried_path in self._paths:
        try:
#          self._request(tried_path, "PUT", content)
          self._fp.seek(0)
          put.putfile(self._fp, tried_path)
          devid = tried_devid
          path = tried_path
          break
        except HTTPError as e:
          continue
      else:
        devid = None
        path = None

      self._fp.seek(0, 2)
      size = self._fp.tell()
      self._fp.close()
      if devid:
        params = {
                   'fid'   : self.fid,
                   'domain': self.mg.domain,
                   'key'   : self.key,
                   'path'  : path,
                   'devid' : devid,
                   'size'  : size
                 }
        if self.create_close_arg:
          params.update(self.create_close_arg)
        try:
          self.mg.backend.do_request('create_close', params)
        except MogileFSError as e:
          if e.err != 'empty_file':
            raise

  def seek(self, pos, mode=0):
    return self._fp.seek(pos, mode)

  def tell(self):
    return self._fp.tell()
Пример #32
0
    def _flushHeaderMessage(self, buf, wout, wsz):
        """Write a message for CLIENT_TYPE.HEADER

        This method writes a message using the same logic as
        THeaderTransport._flushHeaderMessage
        but mutates fields included in self._fuzz_fields
        """
        cls = self.__class__

        transform_data = StringIO()
        num_transforms = len(self._THeaderTransport__write_transforms)
        for trans_id in self._THeaderTransport__write_transforms:
            trans_id = self._get_fuzzy_field('transform_id', trans_id, 'i32')
            transform_data.write(getVarint(trans_id))

        # Add in special flags.
        if self._THeaderTransport__identity:
            id_version = self._get_fuzzy_field(
                'version', self.ID_VERSION, 'str')
            self._THeaderTransport__write_headers[self.ID_VERSION_HEADER] = (
                id_version)
            identity = self._get_fuzzy_field(
                'identity', self._THeaderTransport__identity, 'str')
            self._THeaderTransport__write_headers[self.IDENTITY_HEADER] = (
                identity)

        info_data = StringIO()

        # Write persistent kv-headers
        _flush_info_headers(
            info_data,
            self._THeaderTransport__write_persistent_headers,
            INFO.PERSISTENT)

        # Write non-persistent kv-headers
        _flush_info_headers(
            info_data,
            self._THeaderTransport__write_headers,
            INFO.NORMAL)

        header_data = StringIO()
        proto_id = self._get_fuzzy_field(
            'proto_id', self._THeaderTransport__proto_id, 'i32')
        header_data.write(getVarint(proto_id))
        num_transforms = self._get_fuzzy_field(
            'num_transforms', num_transforms, 'i32')
        header_data.write(getVarint(num_transforms))

        header_size = (transform_data.tell() +
                       header_data.tell() +
                       info_data.tell())

        padding_size = 4 - (header_size % 4)

        # Fuzz padding size, but do not let total header size exceed 2**16 - 1
        padding_size = min((2 ** 16 - 1) - header_size,
            self._get_fuzzy_field('padding', padding_size, 'i16'))
        header_size = header_size + padding_size

        wsz += header_size + 10

        self._write_fuzzy_field(buf, 'length', wsz, 'i32')
        self._write_fuzzy_field(buf, 'magic', HEADER_MAGIC >> 16, 'i16')
        self._write_fuzzy_field(
            buf, 'flags', self.header_flags(), 'i16')
        self._write_fuzzy_field(
            buf, 'seq_id', self.seq_id, 'i32')
        self._write_fuzzy_field(buf, 'header_size', header_size // 4, 'i16')

        buf.write(header_data.getvalue())
        buf.write(transform_data.getvalue())
        buf.write(info_data.getvalue())

        # Pad out the header with 0x00
        if 'padding_bytes' in self._fuzz_fields:
            # Print that padding bytes are being fuzzed in favor of printing
            # the value of each individual padding byte
            self._print("Fuzzing %d padding bytes" % padding_size)
        old_verbose, self._verbose = self._verbose, False
        for _ in sm.xrange(padding_size):
            self._write_fuzzy_field(
                buf, 'padding_bytes', six.int2byte(0), 'char')
        self._verbose = old_verbose

        self._write_fuzzy_field(buf, 'payload', wout, 'str')
Пример #33
0
    def _flushHeaderMessage(self, buf, wout, wsz):
        """Write a message for self.HEADERS_CLIENT_TYPE

        This method writes a message using the same logic as
        THeaderTransport._flushHeaderMessage
        but mutates fields included in self._fuzz_fields
        """
        cls = self.__class__

        transform_data = StringIO()
        num_transforms = len(self._THeaderTransport__write_transforms)
        for trans_id in self._THeaderTransport__write_transforms:
            trans_id = self._get_fuzzy_field('transform_id', trans_id, 'i32')
            transform_data.write(getVarint(trans_id))

        if self._THeaderTransport__hmac_func:
            num_transforms += 1
            transform_data.write(getVarint(self.HMAC_TRANSFORM))
            transform_data.write(b'\0')  # size of hmac, fixup later.

        # Add in special flags.
        if self._THeaderTransport__identity:
            id_version = self._get_fuzzy_field('version', self.ID_VERSION,
                                               'str')
            self._THeaderTransport__write_headers[self.ID_VERSION_HEADER] = (
                id_version)
            identity = self._get_fuzzy_field('identity',
                                             self._THeaderTransport__identity,
                                             'str')
            self._THeaderTransport__write_headers[self.IDENTITY_HEADER] = (
                identity)

        info_data = StringIO()

        # Write persistent kv-headers
        cls._flush_info_headers(
            info_data, self._THeaderTransport__write_persistent_headers,
            self.INFO_PKEYVALUE)

        # Write non-persistent kv-headers
        cls._flush_info_headers(info_data,
                                self._THeaderTransport__write_headers,
                                self.INFO_KEYVALUE)

        header_data = StringIO()
        proto_id = self._get_fuzzy_field('proto_id',
                                         self._THeaderTransport__proto_id,
                                         'i32')
        header_data.write(getVarint(proto_id))
        num_transforms = self._get_fuzzy_field('num_transforms',
                                               num_transforms, 'i32')
        header_data.write(getVarint(num_transforms))

        header_size = (transform_data.tell() + header_data.tell() +
                       info_data.tell())

        padding_size = 4 - (header_size % 4)

        # Fuzz padding size, but do not let total header size exceed 2**16 - 1
        padding_size = min((2**16 - 1) - header_size,
                           self._get_fuzzy_field('padding', padding_size,
                                                 'i16'))
        header_size = header_size + padding_size

        wsz += header_size + 10

        self._write_fuzzy_field(buf, 'length', wsz, 'i32')
        self._write_fuzzy_field(buf, 'magic', self.HEADER_MAGIC, 'i16')
        self._write_fuzzy_field(buf, 'flags', self._THeaderTransport__flags,
                                'i16')
        self._write_fuzzy_field(buf, 'seq_id', self._THeaderTransport__seq_id,
                                'i32')
        self._write_fuzzy_field(buf, 'header_size', header_size // 4, 'i16')

        buf.write(header_data.getvalue())
        buf.write(transform_data.getvalue())
        hmac_loc = buf.tell() - 1  # Fixup hmac size later
        buf.write(info_data.getvalue())

        # Pad out the header with 0x00
        if 'padding_bytes' in self._fuzz_fields:
            # Print that padding bytes are being fuzzed in favor of printing
            # the value of each individual padding byte
            self._print("Fuzzing %d padding bytes" % padding_size)
        old_verbose, self._verbose = self._verbose, False
        for _ in sm.xrange(padding_size):
            self._write_fuzzy_field(buf, 'padding_bytes', six.int2byte(0),
                                    'char')
        self._verbose = old_verbose

        self._write_fuzzy_field(buf, 'payload', wout, 'str')

        # HMAC calculation should always be last.
        if self._THeaderTransport__hmac_func:
            hmac_data = buf.getvalue()[4:]
            hmac = self._THeaderTransport__hmac_func(hmac_data)

            # Fill in hmac size.
            buf.seek(hmac_loc)
            self._write_hmac_size(buf, hmac)
            buf.seek(0, os.SEEK_END)
            self._write_hmac(buf, hmac)

            # Fix packet size since we appended data.
            new_sz = buf.tell() - 4
            buf.seek(0)
            self._write_wsz(buf, new_sz)
Пример #34
0
def sitemap_generator(request, maps, page, current_site):
    output = StringIO()
    xml = SimplerXMLGenerator(output, settings.DEFAULT_CHARSET)
    xml.startDocument()
    ns = {
        'xmlns':'http://www.sitemaps.org/schemas/sitemap/0.9',
        'xmlns:image':"http://www.google.com/schemas/sitemap-image/1.1",
    }
    xml.startElement('urlset', ns)
    yield output.getvalue()
    pos = output.tell()
    for site in maps:
        if callable(site):
            if issubclass(site, RequestSitemap):
                site = site(request=request)
            else:
                site = site()
        elif hasattr(site, 'request'):
            site.request = request
        for url in site.get_urls(page=page, site=current_site):
            xml.startElement('url', {})
            xml.addQuickElement('loc', url['location'])
            try:
                if url['lastmod']:
                    xml.addQuickElement('lastmod', url['lastmod'].strftime('%Y-%m-%d'))
            except (KeyError, AttributeError):
                pass
            try:
                if url['changefreq']:
                    xml.addQuickElement('changefreq', url['changefreq'])
            except KeyError:
                pass
            try:
                if url['priority']:
                    xml.addQuickElement('priority', url['priority'])
            except KeyError:
                pass

            try:
                # This will generate image links, if the item has an 'image' attribute
                img = url['item'].image
                xml.startElement('image:image', {})
                xml.addQuickElement('image:loc', request.build_absolute_uri(img.url))
                try:
                    # if it also has name and description attributes, it will add those
                    # to the image sitemaps
                    xml.addQuickElement('image:title', url['item'].name)
                    xml.addQuickElement('image:caption', url['item'].description)
                except: pass
                xml.endElement('image:image')
            except:
                pass

            xml.endElement('url')
            output.seek(pos)
            yield output.read()
            pos = output.tell()
    xml.endElement('urlset')
    xml.endDocument()
    output.seek(pos)
    last = output.read()
    output.close()
    yield last
Пример #35
0
class BufferedFile (object):
    """
    Reusable base class to implement python-style file buffering around a
    simpler stream.
    """

    _DEFAULT_BUFSIZE = 8192

    SEEK_SET = 0
    SEEK_CUR = 1
    SEEK_END = 2

    FLAG_READ = 0x1
    FLAG_WRITE = 0x2
    FLAG_APPEND = 0x4
    FLAG_BINARY = 0x10
    FLAG_BUFFERED = 0x20
    FLAG_LINE_BUFFERED = 0x40
    FLAG_UNIVERSAL_NEWLINE = 0x80

    def __init__(self):
        self.newlines = None
        self._flags = 0
        self._bufsize = self._DEFAULT_BUFSIZE
        self._wbuffer = StringIO()
        self._rbuffer = ''
        self._at_trailing_cr = False
        self._closed = False
        # pos - position within the file, according to the user
        # realpos - position according the OS
        # (these may be different because we buffer for line reading)
        self._pos = self._realpos = 0
        # size only matters for seekable files
        self._size = 0

    def __del__(self):
        self.close()

    def __iter__(self):
        """
        Returns an iterator that can be used to iterate over the lines in this
        file.  This iterator happens to return the file itself, since a file is
        its own iterator.

        @raise ValueError: if the file is closed.

        @return: an interator.
        @rtype: iterator
        """
        if self._closed:
            raise ValueError('I/O operation on closed file')
        return self

    def close(self):
        """
        Close the file.  Future read and write operations will fail.
        """
        self.flush()
        self._closed = True

    def flush(self):
        """
        Write out any data in the write buffer.  This may do nothing if write
        buffering is not turned on.
        """
        self._write_all(self._wbuffer.getvalue())
        self._wbuffer = StringIO()
        return

    def next(self):
        """
        Returns the next line from the input, or raises L{StopIteration} when
        EOF is hit.  Unlike python file objects, it's okay to mix calls to
        C{next} and L{readline}.

        @raise StopIteration: when the end of the file is reached.

        @return: a line read from the file.
        @rtype: str
        """
        line = self.readline()
        if not line:
            raise StopIteration
        return line

    def read(self, size=None):
        """
        Read at most C{size} bytes from the file (less if we hit the end of the
        file first).  If the C{size} argument is negative or omitted, read all
        the remaining data in the file.

        @param size: maximum number of bytes to read
        @type size: int
        @return: data read from the file, or an empty string if EOF was
            encountered immediately
        @rtype: str
        """
        if self._closed:
            raise IOError('File is closed')
        if not (self._flags & self.FLAG_READ):
            raise IOError('File is not open for reading')
        if (size is None) or (size < 0):
            # go for broke
            result = self._rbuffer
            self._rbuffer = ''
            self._pos += len(result)
            while True:
                try:
                    new_data = self._read(self._DEFAULT_BUFSIZE)
                except EOFError:
                    new_data = None
                if (new_data is None) or (len(new_data) == 0):
                    break
                result += new_data
                self._realpos += len(new_data)
                self._pos += len(new_data)
            return result
        if size <= len(self._rbuffer):
            result = self._rbuffer[:size]
            self._rbuffer = self._rbuffer[size:]
            self._pos += len(result)
            return result
        while len(self._rbuffer) < size:
            read_size = size - len(self._rbuffer)
            if self._flags & self.FLAG_BUFFERED:
                read_size = max(self._bufsize, read_size)
            try:
                new_data = self._read(read_size)
            except EOFError:
                new_data = None
            if (new_data is None) or (len(new_data) == 0):
                break
            self._rbuffer += new_data
            self._realpos += len(new_data)
        result = self._rbuffer[:size]
        self._rbuffer = self._rbuffer[size:]
        self._pos += len(result)
        return result

    def readline(self, size=None):
        """
        Read one entire line from the file.  A trailing newline character is
        kept in the string (but may be absent when a file ends with an
        incomplete line).  If the size argument is present and non-negative, it
        is a maximum byte count (including the trailing newline) and an
        incomplete line may be returned.  An empty string is returned only when
        EOF is encountered immediately.

        @note: Unlike stdio's C{fgets()}, the returned string contains null
        characters (C{'\\0'}) if they occurred in the input.

        @param size: maximum length of returned string.
        @type size: int
        @return: next line of the file, or an empty string if the end of the
            file has been reached.
        @rtype: str
        """
        # it's almost silly how complex this function is.
        if self._closed:
            raise IOError('File is closed')
        if not (self._flags & self.FLAG_READ):
            raise IOError('File not open for reading')
        line = self._rbuffer
        while True:
            if self._at_trailing_cr and (self._flags & self.FLAG_UNIVERSAL_NEWLINE) and (len(line) > 0):
                # edge case: the newline may be '\r\n' and we may have read
                # only the first '\r' last time.
                if line[0] == '\n':
                    line = line[1:]
                    self._record_newline('\r\n')
                else:
                    self._record_newline('\r')
                self._at_trailing_cr = False
            # check size before looking for a linefeed, in case we already have
            # enough.
            if (size is not None) and (size >= 0):
                if len(line) >= size:
                    # truncate line and return
                    self._rbuffer = line[size:]
                    line = line[:size]
                    self._pos += len(line)
                    return line
                n = size - len(line)
            else:
                n = self._bufsize
            if ('\n' in line) or ((self._flags & self.FLAG_UNIVERSAL_NEWLINE) and ('\r' in line)):
                break
            try:
                new_data = self._read(n)
            except EOFError:
                new_data = None
            if (new_data is None) or (len(new_data) == 0):
                self._rbuffer = ''
                self._pos += len(line)
                return line
            line += new_data
            self._realpos += len(new_data)
        # find the newline
        pos = line.find('\n')
        if self._flags & self.FLAG_UNIVERSAL_NEWLINE:
            rpos = line.find('\r')
            if (rpos >= 0) and ((rpos < pos) or (pos < 0)):
                pos = rpos
        xpos = pos + 1
        if (line[pos] == '\r') and (xpos < len(line)) and (line[xpos] == '\n'):
            xpos += 1
        self._rbuffer = line[xpos:]
        lf = line[pos:xpos]
        line = line[:pos] + '\n'
        if (len(self._rbuffer) == 0) and (lf == '\r'):
            # we could read the line up to a '\r' and there could still be a
            # '\n' following that we read next time.  note that and eat it.
            self._at_trailing_cr = True
        else:
            self._record_newline(lf)
        self._pos += len(line)
        return line

    def readlines(self, sizehint=None):
        """
        Read all remaining lines using L{readline} and return them as a list.
        If the optional C{sizehint} argument is present, instead of reading up
        to EOF, whole lines totalling approximately sizehint bytes (possibly
        after rounding up to an internal buffer size) are read.

        @param sizehint: desired maximum number of bytes to read.
        @type sizehint: int
        @return: list of lines read from the file.
        @rtype: list
        """
        lines = []
        bytes = 0
        while True:
            line = self.readline()
            if len(line) == 0:
                break
            lines.append(line)
            bytes += len(line)
            if (sizehint is not None) and (bytes >= sizehint):
                break
        return lines

    def seek(self, offset, whence=0):
        """
        Set the file's current position, like stdio's C{fseek}.  Not all file
        objects support seeking.

        @note: If a file is opened in append mode (C{'a'} or C{'a+'}), any seek
            operations will be undone at the next write (as the file position
            will move back to the end of the file).

        @param offset: position to move to within the file, relative to
            C{whence}.
        @type offset: int
        @param whence: type of movement: 0 = absolute; 1 = relative to the
            current position; 2 = relative to the end of the file.
        @type whence: int

        @raise IOError: if the file doesn't support random access.
        """
        raise IOError('File does not support seeking.')

    def tell(self):
        """
        Return the file's current position.  This may not be accurate or
        useful if the underlying file doesn't support random access, or was
        opened in append mode.

        @return: file position (in bytes).
        @rtype: int
        """
        return self._pos

    def write(self, data):
        """
        Write data to the file.  If write buffering is on (C{bufsize} was
        specified and non-zero), some or all of the data may not actually be
        written yet.  (Use L{flush} or L{close} to force buffered data to be
        written out.)

        @param data: data to write.
        @type data: str
        """
        if self._closed:
            raise IOError('File is closed')
        if not (self._flags & self.FLAG_WRITE):
            raise IOError('File not open for writing')
        if not (self._flags & self.FLAG_BUFFERED):
            self._write_all(data)
            return
        self._wbuffer.write(data)
        if self._flags & self.FLAG_LINE_BUFFERED:
            # only scan the new data for linefeed, to avoid wasting time.
            last_newline_pos = data.rfind('\n')
            if last_newline_pos >= 0:
                wbuf = self._wbuffer.getvalue()
                last_newline_pos += len(wbuf) - len(data)
                self._write_all(wbuf[:last_newline_pos + 1])
                self._wbuffer = StringIO()
                self._wbuffer.write(wbuf[last_newline_pos + 1:])
            return
        # even if we're line buffering, if the buffer has grown past the
        # buffer size, force a flush.
        if self._wbuffer.tell() >= self._bufsize:
            self.flush()
        return

    def writelines(self, sequence):
        """
        Write a sequence of strings to the file.  The sequence can be any
        iterable object producing strings, typically a list of strings.  (The
        name is intended to match L{readlines}; C{writelines} does not add line
        separators.)

        @param sequence: an iterable sequence of strings.
        @type sequence: sequence
        """
        for line in sequence:
            self.write(line)
        return

    def xreadlines(self):
        """
        Identical to C{iter(f)}.  This is a deprecated file interface that
        predates python iterator support.

        @return: an iterator.
        @rtype: iterator
        """
        return self

    @property
    def closed(self):
        return self._closed


    ###  overrides...


    def _read(self, size):
        """
        I{(subclass override)}
        Read data from the stream.  Return C{None} or raise C{EOFError} to
        indicate EOF.
        """
        raise EOFError()

    def _write(self, data):
        """
        I{(subclass override)}
        Write data into the stream.
        """
        raise IOError('write not implemented')

    def _get_size(self):
        """
        I{(subclass override)}
        Return the size of the file.  This is called from within L{_set_mode}
        if the file is opened in append mode, so the file position can be
        tracked and L{seek} and L{tell} will work correctly.  If the file is
        a stream that can't be randomly accessed, you don't need to override
        this method,
        """
        return 0


    ###  internals...


    def _set_mode(self, mode='r', bufsize=-1):
        """
        Subclasses call this method to initialize the BufferedFile.
        """
        # set bufsize in any event, because it's used for readline().
        self._bufsize = self._DEFAULT_BUFSIZE
        if bufsize < 0:
            # do no buffering by default, because otherwise writes will get
            # buffered in a way that will probably confuse people.
            bufsize = 0
        if bufsize == 1:
            # apparently, line buffering only affects writes.  reads are only
            # buffered if you call readline (directly or indirectly: iterating
            # over a file will indirectly call readline).
            self._flags |= self.FLAG_BUFFERED | self.FLAG_LINE_BUFFERED
        elif bufsize > 1:
            self._bufsize = bufsize
            self._flags |= self.FLAG_BUFFERED
            self._flags &= ~self.FLAG_LINE_BUFFERED
        elif bufsize == 0:
            # unbuffered
            self._flags &= ~(self.FLAG_BUFFERED | self.FLAG_LINE_BUFFERED)

        if ('r' in mode) or ('+' in mode):
            self._flags |= self.FLAG_READ
        if ('w' in mode) or ('+' in mode):
            self._flags |= self.FLAG_WRITE
        if ('a' in mode):
            self._flags |= self.FLAG_WRITE | self.FLAG_APPEND
            self._size = self._get_size()
            self._pos = self._realpos = self._size
        if ('b' in mode):
            self._flags |= self.FLAG_BINARY
        if ('U' in mode):
            self._flags |= self.FLAG_UNIVERSAL_NEWLINE
            # built-in file objects have this attribute to store which kinds of
            # line terminations they've seen:
            # <http://www.python.org/doc/current/lib/built-in-funcs.html>
            self.newlines = None

    def _write_all(self, data):
        # the underlying stream may be something that does partial writes (like
        # a socket).
        while len(data) > 0:
            count = self._write(data)
            data = data[count:]
            if self._flags & self.FLAG_APPEND:
                self._size += count
                self._pos = self._realpos = self._size
            else:
                self._pos += count
                self._realpos += count
        return None

    def _record_newline(self, newline):
        # silliness about tracking what kinds of newlines we've seen.
        # i don't understand why it can be None, a string, or a tuple, instead
        # of just always being a tuple, but we'll emulate that behavior anyway.
        if not (self._flags & self.FLAG_UNIVERSAL_NEWLINE):
            return
        if self.newlines is None:
            self.newlines = newline
        elif (type(self.newlines) is str) and (self.newlines != newline):
            self.newlines = (self.newlines, newline)
        elif newline not in self.newlines:
            self.newlines += (newline,)
Пример #36
0
class DocStreamReceiver(ReadBodyProtocol):
    """
    A protocol implementation that can parse incoming data from server based
    on a line format specified on u1db implementation. Except that we split doc
    attributes from content to ease parsing and increment throughput for larger
    documents.
    [\r\n
    {metadata},\r\n
    {doc_info},\r\n
    {content},\r\n
    ...
    {doc_info},\r\n
    {content},\r\n
    ]
    """

    def __init__(self, response, deferred, doc_reader):
        self.deferred = deferred
        self.status = response.code if response else None
        self.message = response.phrase if response else None
        self.headers = response.headers if response else {}
        self.delimiter = '\r\n'
        self.metadata = ''
        self._doc_reader = doc_reader
        self.reset()

    def reset(self):
        self._line = 0
        self._buffer = StringIO()
        self._properly_finished = False

    def connectionLost(self, reason):
        """
        Deliver the accumulated response bytes to the waiting L{Deferred}, if
        the response body has been completely received without error.
        """
        if self.deferred.called:
            return
        try:
            if reason.check(ResponseDone):
                self.dataBuffer = self.metadata
            else:
                self.dataBuffer = self.finish()
        except errors.BrokenSyncStream as e:
            return self.deferred.errback(e)
        return ReadBodyProtocol.connectionLost(self, reason)

    def consumeBufferLines(self):
        """
        Consumes lines from buffer and rewind it, writing remaining data
        that didn't formed a line back into buffer.
        """
        content = self._buffer.getvalue()[0:self._buffer.tell()]
        self._buffer.seek(0)
        lines = content.split(self.delimiter)
        self._buffer.write(lines.pop(-1))
        return lines

    def dataReceived(self, data):
        """
        Buffer incoming data until a line breaks comes in. We check only
        the incoming data for efficiency.
        """
        self._buffer.write(data)
        if '\n' not in data:
            return
        lines = self.consumeBufferLines()
        while lines:
            line, _ = utils.check_and_strip_comma(lines.pop(0))
            self.lineReceived(line)
            self._line += 1

    def lineReceived(self, line):
        """
        Protocol implementation.
        0:      [\r\n
        1:      {metadata},\r\n
        (even): {doc_info},\r\n
        (odd):  {data},\r\n
        (last): ]
        """
        if self._properly_finished:
            raise errors.BrokenSyncStream("Reading a finished stream")
        if ']' == line:
            self._properly_finished = True
        elif self._line == 0:
            if line is not '[':
                raise errors.BrokenSyncStream("Invalid start")
        elif self._line == 1:
            self.metadata = line
            if 'error' in self.metadata:
                raise errors.BrokenSyncStream("Error from server: %s" % line)
            self.total = json.loads(line).get('number_of_changes', -1)
        elif (self._line % 2) == 0:
            self.current_doc = json.loads(line)
            if 'error' in self.current_doc:
                raise errors.BrokenSyncStream("Error from server: %s" % line)
        else:
            d = self._doc_reader(
                self.current_doc, line.strip() or None, self.total)
            d.addErrback(self.deferred.errback)

    def finish(self):
        """
        Checks that ']' came and stream was properly closed.
        """
        if not self._properly_finished:
            raise errors.BrokenSyncStream('Stream not properly closed')
        content = self._buffer.getvalue()[0:self._buffer.tell()]
        self._buffer.close()
        return content
Пример #37
0
class Message(object):
    """
    An SSH2 I{Message} is a stream of bytes that encodes some combination of
    strings, integers, bools, and infinite-precision integers (known in python
    as I{long}s).  This class builds or breaks down such a byte stream.

    Normally you don't need to deal with anything this low-level, but it's
    exposed for people implementing custom extensions, or features that
    paramiko doesn't support yet.
    """

    def __init__(self, content=None):
        """
        Create a new SSH2 Message.

        @param content: the byte stream to use as the Message content (passed
            in only when decomposing a Message).
        @type content: string
        """
        if content != None:
            self.packet = StringIO(content)
        else:
            self.packet = StringIO()

    def __str__(self):
        """
        Return the byte stream content of this Message, as a string.

        @return: the contents of this Message.
        @rtype: string
        """
        return self.packet.getvalue()

    def __repr__(self):
        """
        Returns a string representation of this object, for debugging.

        @rtype: string
        """
        return "paramiko.Message(" + repr(self.packet.getvalue()) + ")"

    def rewind(self):
        """
        Rewind the message to the beginning as if no items had been parsed
        out of it yet.
        """
        self.packet.seek(0)

    def get_remainder(self):
        """
        Return the bytes of this Message that haven't already been parsed and
        returned.

        @return: a string of the bytes not parsed yet.
        @rtype: string
        """
        position = self.packet.tell()
        remainder = self.packet.read()
        self.packet.seek(position)
        return remainder

    def get_so_far(self):
        """
        Returns the bytes of this Message that have been parsed and returned.
        The string passed into a Message's constructor can be regenerated by
        concatenating C{get_so_far} and L{get_remainder}.

        @return: a string of the bytes parsed so far.
        @rtype: string
        """
        position = self.packet.tell()
        self.rewind()
        return self.packet.read(position)

    def get_bytes(self, n):
        """
        Return the next C{n} bytes of the Message, without decomposing into
        an int, string, etc.  Just the raw bytes are returned.

        @return: a string of the next C{n} bytes of the Message, or a string
            of C{n} zero bytes, if there aren't C{n} bytes remaining.
        @rtype: string
        """
        b = self.packet.read(n)
        if len(b) < n:
            return b + "\x00" * (n - len(b))
        return b

    def get_byte(self):
        """
        Return the next byte of the Message, without decomposing it.  This
        is equivalent to L{get_bytes(1)<get_bytes>}.

        @return: the next byte of the Message, or C{'\000'} if there aren't
            any bytes remaining.
        @rtype: string
        """
        return self.get_bytes(1)

    def get_boolean(self):
        """
        Fetch a boolean from the stream.

        @return: C{True} or C{False} (from the Message).
        @rtype: bool
        """
        b = self.get_bytes(1)
        return b != "\x00"

    def get_int(self):
        """
        Fetch an int from the stream.

        @return: a 32-bit unsigned integer.
        @rtype: int
        """
        return struct.unpack(">I", self.get_bytes(4))[0]

    def get_int64(self):
        """
        Fetch a 64-bit int from the stream.

        @return: a 64-bit unsigned integer.
        @rtype: long
        """
        return struct.unpack(">Q", self.get_bytes(8))[0]

    def get_mpint(self):
        """
        Fetch a long int (mpint) from the stream.

        @return: an arbitrary-length integer.
        @rtype: long
        """
        return util.inflate_long(self.get_string())

    def get_string(self):
        """
        Fetch a string from the stream.  This could be a byte string and may
        contain unprintable characters.  (It's not unheard of for a string to
        contain another byte-stream Message.)

        @return: a string.
        @rtype: string
        """
        return self.get_bytes(self.get_int())

    def get_list(self):
        """
        Fetch a list of strings from the stream.  These are trivially encoded
        as comma-separated values in a string.

        @return: a list of strings.
        @rtype: list of strings
        """
        return self.get_string().split(",")

    def add_bytes(self, b):
        """
        Write bytes to the stream, without any formatting.

        @param b: bytes to add
        @type b: str
        """
        self.packet.write(b)
        return self

    def add_byte(self, b):
        """
        Write a single byte to the stream, without any formatting.

        @param b: byte to add
        @type b: str
        """
        self.packet.write(b)
        return self

    def add_boolean(self, b):
        """
        Add a boolean value to the stream.

        @param b: boolean value to add
        @type b: bool
        """
        if b:
            self.add_byte("\x01")
        else:
            self.add_byte("\x00")
        return self

    def add_int(self, n):
        """
        Add an integer to the stream.

        @param n: integer to add
        @type n: int
        """
        self.packet.write(struct.pack(">I", n))
        return self

    def add_int64(self, n):
        """
        Add a 64-bit int to the stream.

        @param n: long int to add
        @type n: long
        """
        self.packet.write(struct.pack(">Q", n))
        return self

    def add_mpint(self, z):
        """
        Add a long int to the stream, encoded as an infinite-precision
        integer.  This method only works on positive numbers.

        @param z: long int to add
        @type z: long
        """
        self.add_string(util.deflate_long(z))
        return self

    def add_string(self, s):
        """
        Add a string to the stream.

        @param s: string to add
        @type s: str
        """
        self.add_int(len(s))
        self.packet.write(s)
        return self

    def add_list(self, l):
        """
        Add a list of strings to the stream.  They are encoded identically to
        a single string of values separated by commas.  (Yes, really, that's
        how SSH2 does it.)

        @param l: list of strings to add
        @type l: list(str)
        """
        self.add_string(",".join(l))
        return self

    def _add(self, i):
        if type(i) is str:
            return self.add_string(i)
        elif type(i) is int:
            return self.add_int(i)
        elif type(i) is long:
            if i > long(0xFFFFFFFF):
                return self.add_mpint(i)
            else:
                return self.add_int(i)
        elif type(i) is bool:
            return self.add_boolean(i)
        elif type(i) is list:
            return self.add_list(i)
        else:
            raise Exception("Unknown type")

    def add(self, *seq):
        """
        Add a sequence of items to the stream.  The values are encoded based
        on their type: str, int, bool, list, or long.

        @param seq: the sequence of items
        @type seq: sequence

        @bug: longs are encoded non-deterministically.  Don't use this method.
        """
        for item in seq:
            self._add(item)
Пример #38
0
class SSHSession(Session):

    "Implements a :rfc:`4742` NETCONF session over SSH."

    def __init__(self, device_handler):
        capabilities = Capabilities(device_handler.get_capabilities())
        Session.__init__(self, capabilities)
        self._host_keys = paramiko.HostKeys()
        self._transport = None
        self._connected = False
        self._channel = None
        self._channel_id = None
        self._channel_name = None
        self._buffer = StringIO()
        # parsing-related, see _parse()
        self._device_handler = device_handler
        self._parsing_state10 = 0
        self._parsing_pos10 = 0
        self._parsing_pos11 = 0
        self._parsing_state11 = 0
        self._expchunksize = 0
        self._curchunksize = 0
        self._inendpos = 0
        self._size_num_list = []
        self._message_list = []

    def _parse(self):
        "Messages ae delimited by MSG_DELIM. The buffer could have grown by a maximum of BUF_SIZE bytes everytime this method is called. Retains state across method calls and if a byte has been read it will not be considered again."
        return self._parse10()

    def _parse10(self):
        """Messages are delimited by MSG_DELIM. The buffer could have grown by
        a maximum of BUF_SIZE bytes everytime this method is called. Retains
        state across method calls and if a chunk has been read it will not be
        considered again."""

        logger.debug("parsing netconf v1.0")
        buf = self._buffer
        buf.seek(self._parsing_pos10)
        if MSG_DELIM in buf.read().decode('UTF-8'):
            buf.seek(0)
            msg, _, remaining = buf.read().decode('UTF-8').partition(MSG_DELIM)
            msg = msg.strip()
            if sys.version < '3':
                self._dispatch_message(msg.encode())
            else:
                self._dispatch_message(msg)
            # create new buffer which contains remaining of old buffer
            self._buffer = StringIO()
            self._buffer.write(remaining.encode())
            self._parsing_pos10 = 0
        else:
            # handle case that MSG_DELIM is split over two chunks
            self._parsing_pos10 = buf.tell() - MSG_DELIM_LEN
            if self._parsing_pos10 < 0:
                self._parsing_pos10 = 0

    def _parse11(self):
        logger.debug("parsing netconf v1.1")
        expchunksize = self._expchunksize
        curchunksize = self._curchunksize
        idle, instart, inmsg, inbetween, inend = range(5)
        state = self._parsing_state11
        inendpos = self._inendpos
        num_list = self._size_num_list
        MAX_STARTCHUNK_SIZE = 12  # \#+4294967295+\n
        pre = 'invalid base:1:1 frame'
        buf = self._buffer
        buf.seek(self._parsing_pos11)
        message_list = self._message_list  # a message is a list of chunks
        chunk_list = []  # a chunk is a list of characters

        while True:
            x = buf.read(1)
            if not x:
                logger.debug('No more data to read')
                # Store the current chunk to the message list
                chunk = b''.join(chunk_list)
                message_list.append(textify(chunk))
                break  # done reading
            logger.debug('x: %s', x)
            if state == idle:
                if x == b'\n':
                    state = instart
                    inendpos = 1
                else:
                    logger.debug('%s (%s: expect newline)' % (pre, state))
                    raise Exception
            elif state == instart:
                if inendpos == 1:
                    if x == b'#':
                        inendpos += 1
                    else:
                        logger.debug('%s (%s: expect "#")' % (pre, state))
                        raise Exception
                elif inendpos == 2:
                    if x.isdigit():
                        inendpos += 1  # == 3 now #
                        num_list.append(x)
                    else:
                        logger.debug('%s (%s: expect digit)' % (pre, state))
                        raise Exception
                else:
                    if inendpos == MAX_STARTCHUNK_SIZE:
                        logger.debug('%s (%s: no. too long)' % (pre, state))
                        raise Exception
                    elif x == b'\n':
                        num = b''.join(num_list)
                        num_list = []  # Reset num_list
                        try:
                            num = int(num)
                        except:
                            logger.debug('%s (%s: invalid no.)' % (pre, state))
                            raise Exception
                        else:
                            state = inmsg
                            expchunksize = num
                            logger.debug('response length: %d' % expchunksize)
                            curchunksize = 0
                            inendpos += 1
                    elif x.isdigit():
                        inendpos += 1  # > 3 now #
                        num_list.append(x)
                    else:
                        logger.debug('%s (%s: expect digit)' % (pre, state))
                        raise Exception
            elif state == inmsg:
                chunk_list.append(x)
                curchunksize += 1
                chunkleft = expchunksize - curchunksize
                if chunkleft == 0:
                    inendpos = 0
                    state = inbetween
                    chunk = b''.join(chunk_list)
                    message_list.append(textify(chunk))
                    chunk_list = []  # Reset chunk_list
                    logger.debug('parsed new chunk: %s' % (chunk))
            elif state == inbetween:
                if inendpos == 0:
                    if x == b'\n': inendpos += 1
                    else:
                        logger.debug('%s (%s: expect newline)' % (pre, state))
                        raise Exception
                elif inendpos == 1:
                    if x == b'#': inendpos += 1
                    else:
                        logger.debug('%s (%s: expect "#")' % (pre, state))
                        raise Exception
                else:
                    inendpos += 1  # == 3 now #
                    if x == b'#':
                        state = inend
                    elif x.isdigit():
                        # More trunks
                        state = instart
                        num_list = []
                        num_list.append(x)
                    else:
                        logger.debug('%s (%s: expect "#")' % (pre, state))
                        raise Exception
            elif state == inend:
                if inendpos == 3:
                    if x == b'\n':
                        inendpos = 0
                        state = idle
                        logger.debug('dispatching message')
                        self._dispatch_message(''.join(message_list))
                        # reset
                        rest = buf.read()
                        buf = BytesIO()
                        buf.write(rest)
                        buf.seek(0)
                        message_list = []
                        self._message_list = message_list
                        chunk_list = []
                        expchunksize = chunksize = 0
                        parsing_state11 = idle
                        inendpos = parsing_pos11 = 0
                        break
                    else:
                        logger.debug('%s (%s: expect newline)' % (pre, state))
                        raise Exception
            else:
                logger.debug('%s (%s invalid state)' % (pre, state))
                raise Exception

        self._expchunksize = expchunksize
        self._curchunksize = curchunksize
        self._parsing_state11 = state
        self._inendpos = inendpos
        self._size_num_list = num_list
        self._buffer = buf
        self._parsing_pos11 = self._buffer.tell()
        logger.debug('parse11 ending ...')

    def load_known_hosts(self, filename=None):
        """Load host keys from an openssh :file:`known_hosts`-style file. Can
        be called multiple times.

        If *filename* is not specified, looks in the default locations i.e. :file:`~/.ssh/known_hosts` and :file:`~/ssh/known_hosts` for Windows.
        """

        if filename is None:
            filename = os.path.expanduser('~/.ssh/known_hosts')
            try:
                self._host_keys.load(filename)
            except IOError:
                # for windows
                filename = os.path.expanduser('~/ssh/known_hosts')
                try:
                    self._host_keys.load(filename)
                except IOError:
                    pass
        else:
            self._host_keys.load(filename)

    def close(self):
        if self._transport.is_active():
            self._transport.close()
        self._channel = None
        self._connected = False

    # REMEMBER to update transport.rst if sig. changes, since it is hardcoded there
    def connect(self,
                host,
                port=830,
                timeout=None,
                unknown_host_cb=default_unknown_host_cb,
                username=None,
                password=None,
                key_filename=None,
                allow_agent=True,
                hostkey_verify=True,
                look_for_keys=True,
                ssh_config=None):
        """Connect via SSH and initialize the NETCONF session. First attempts the publickey authentication method and then password authentication.

        To disable attempting publickey authentication altogether, call with *allow_agent* and *look_for_keys* as `False`.

        *host* is the hostname or IP address to connect to

        *port* is by default 830, but some devices use the default SSH port of 22 so this may need to be specified

        *timeout* is an optional timeout for socket connect

        *unknown_host_cb* is called when the server host key is not recognized. It takes two arguments, the hostname and the fingerprint (see the signature of :func:`default_unknown_host_cb`)

        *username* is the username to use for SSH authentication

        *password* is the password used if using password authentication, or the passphrase to use for unlocking keys that require it

        *key_filename* is a filename where a the private key to be used can be found

        *allow_agent* enables querying SSH agent (if found) for keys

        *hostkey_verify* enables hostkey verification from ~/.ssh/known_hosts

        *look_for_keys* enables looking in the usual locations for ssh keys (e.g. :file:`~/.ssh/id_*`)

        *ssh_config* enables parsing of an OpenSSH configuration file, if set to its path, e.g. :file:`~/.ssh/config` or to True (in this case, use :file:`~/.ssh/config`).
        """
        # Optionaly, parse .ssh/config
        config = {}
        if ssh_config is True:
            ssh_config = "~/.ssh/config" if sys.platform != "win32" else "~/ssh/config"
        if ssh_config is not None:
            config = paramiko.SSHConfig()
            config.parse(open(os.path.expanduser(ssh_config)))
            config = config.lookup(host)
            host = config.get("hostname", host)
            if username is None:
                username = config.get("user")
            if key_filename is None:
                key_filename = config.get("identityfile")

        if username is None:
            username = getpass.getuser()

        sock = None
        if config.get("proxycommand"):
            sock = paramiko.proxy.ProxyCommand(config.get("proxycommand"))
        else:
            for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC,
                                          socket.SOCK_STREAM):
                af, socktype, proto, canonname, sa = res
                try:
                    sock = socket.socket(af, socktype, proto)
                    sock.settimeout(timeout)
                except socket.error:
                    continue
                try:
                    sock.connect(sa)
                except socket.error:
                    sock.close()
                    continue
                break
            else:
                raise SSHError("Could not open socket to %s:%s" % (host, port))

        t = self._transport = paramiko.Transport(sock)
        t.set_log_channel(logger.name)
        if config.get("compression") == 'yes':
            t.use_compression()

        try:
            t.start_client()
        except paramiko.SSHException:
            raise SSHError('Negotiation failed')

        # host key verification
        server_key = t.get_remote_server_key()

        fingerprint = _colonify(hexlify(server_key.get_fingerprint()))

        if hostkey_verify:
            known_host = self._host_keys.check(host, server_key)
            if not known_host and not unknown_host_cb(host, fingerprint):
                raise SSHUnknownHostError(host, fingerprint)

        if key_filename is None:
            key_filenames = []
        elif isinstance(key_filename, (str, bytes)):
            key_filenames = [key_filename]
        else:
            key_filenames = key_filename

        self._auth(username, password, key_filenames, allow_agent,
                   look_for_keys)

        self._connected = True  # there was no error authenticating
        # TODO: leopoul: Review, test, and if needed rewrite this part
        subsystem_names = self._device_handler.get_ssh_subsystem_names()
        for subname in subsystem_names:
            c = self._channel = self._transport.open_session()
            self._channel_id = c.get_id()
            channel_name = "%s-subsystem-%s" % (subname, str(self._channel_id))
            c.set_name(channel_name)
            try:
                c.invoke_subsystem(subname)
            except paramiko.SSHException as e:
                logger.info("%s (subsystem request rejected)", e)
                handle_exception = self._device_handler.handle_connection_exceptions(
                    self)
                # Ignore the exception, since we continue to try the different
                # subsystem names until we find one that can connect.
                #have to handle exception for each vendor here
                if not handle_exception:
                    continue
            self._channel_name = c.get_name()
            self._post_connect()
            return
        raise SSHError(
            "Could not open connection, possibly due to unacceptable"
            " SSH subsystem name.")

    def _auth(self, username, password, key_filenames, allow_agent,
              look_for_keys):
        saved_exception = None

        for key_filename in key_filenames:
            for cls in (paramiko.RSAKey, paramiko.DSSKey):
                try:
                    key = cls.from_private_key_file(key_filename, password)
                    logger.debug(
                        "Trying key %s from %s" %
                        (hexlify(key.get_fingerprint()), key_filename))
                    self._transport.auth_publickey(username, key)
                    return
                except Exception as e:
                    saved_exception = e
                    logger.debug(e)

        if allow_agent:
            for key in paramiko.Agent().get_keys():
                try:
                    logger.debug("Trying SSH agent key %s" %
                                 hexlify(key.get_fingerprint()))
                    self._transport.auth_publickey(username, key)
                    return
                except Exception as e:
                    saved_exception = e
                    logger.debug(e)

        keyfiles = []
        if look_for_keys:
            rsa_key = os.path.expanduser("~/.ssh/id_rsa")
            dsa_key = os.path.expanduser("~/.ssh/id_dsa")
            if os.path.isfile(rsa_key):
                keyfiles.append((paramiko.RSAKey, rsa_key))
            if os.path.isfile(dsa_key):
                keyfiles.append((paramiko.DSSKey, dsa_key))
            # look in ~/ssh/ for windows users:
            rsa_key = os.path.expanduser("~/ssh/id_rsa")
            dsa_key = os.path.expanduser("~/ssh/id_dsa")
            if os.path.isfile(rsa_key):
                keyfiles.append((paramiko.RSAKey, rsa_key))
            if os.path.isfile(dsa_key):
                keyfiles.append((paramiko.DSSKey, dsa_key))

        for cls, filename in keyfiles:
            try:
                key = cls.from_private_key_file(filename, password)
                logger.debug("Trying discovered key %s in %s" %
                             (hexlify(key.get_fingerprint()), filename))
                self._transport.auth_publickey(username, key)
                return
            except Exception as e:
                saved_exception = e
                logger.debug(e)

        if password is not None:
            try:
                self._transport.auth_password(username, password)
                return
            except Exception as e:
                saved_exception = e
                logger.debug(e)

        if saved_exception is not None:
            # need pep-3134 to do this right
            raise AuthenticationError(repr(saved_exception))

        raise AuthenticationError("No authentication methods available")

    def run(self):
        chan = self._channel
        q = self._q

        def start_delim(data_len):
            return '\n#%s\n' % (data_len)

        try:
            while True:
                # select on a paramiko ssh channel object does not ever return it in the writable list, so channels don't exactly emulate the socket api
                r, w, e = select([chan], [], [], TICK)
                # will wakeup evey TICK seconds to check if something to send, more if something to read (due to select returning chan in readable list)
                if r:
                    data = chan.recv(BUF_SIZE)
                    if data:
                        self._buffer.write(data)
                        if self._server_capabilities:
                            if 'urn:ietf:params:netconf:base:1.1' in self._server_capabilities and 'urn:ietf:params:netconf:base:1.1' in self._client_capabilities:
                                logger.debug(
                                    "Selecting netconf:base:1.1 for encoding")
                                self._parse11()
                            elif 'urn:ietf:params:netconf:base:1.0' in self._server_capabilities or 'urn:ietf:params:xml:ns:netconf:base:1.0' in self._server_capabilities or 'urn:ietf:params:netconf:base:1.0' in self._client_capabilities:
                                logger.debug(
                                    "Selecting netconf:base:1.0 for encoding")
                                self._parse10()
                            else:
                                raise Exception
                        else:
                            self._parse10()  # HELLO msg uses EOM markers.
                    else:
                        raise SessionCloseError(self._buffer.getvalue())
                if not q.empty() and chan.send_ready():
                    logger.debug("Sending message")
                    data = q.get()
                    try:
                        # send a HELLO msg using v1.0 EOM markers.
                        validated_element(
                            data,
                            tags=
                            '{urn:ietf:params:xml:ns:netconf:base:1.0}hello')
                        data = "%s%s" % (data, MSG_DELIM)
                    except XMLError:
                        # this is not a HELLO msg
                        # we publish v1.1 support
                        if 'urn:ietf:params:netconf:base:1.1' in self._client_capabilities:
                            if self._server_capabilities:
                                if 'urn:ietf:params:netconf:base:1.1' in self._server_capabilities:
                                    # send using v1.1 chunked framing
                                    data = "%s%s%s" % (start_delim(
                                        len(data)), data, END_DELIM)
                                elif 'urn:ietf:params:netconf:base:1.0' in self._server_capabilities or 'urn:ietf:params:xml:ns:netconf:base:1.0' in self._server_capabilities:
                                    # send using v1.0 EOM markers
                                    data = "%s%s" % (data, MSG_DELIM)
                                else:
                                    raise Exception
                            else:
                                logger.debug(
                                    'HELLO msg was sent, but server capabilities are still not known'
                                )
                                raise Exception
                        # we publish only v1.0 support
                        else:
                            # send using v1.0 EOM markers
                            data = "%s%s" % (data, MSG_DELIM)
                    finally:
                        logger.debug("Sending: %s", data)
                        while data:
                            n = chan.send(data)
                            if n <= 0:
                                raise SessionCloseError(
                                    self._buffer.getvalue(), data)
                            data = data[n:]
        except Exception as e:
            logger.debug("Broke out of main loop, error=%r", e)
            self._dispatch_error(e)
            self.close()

    @property
    def transport(self):
        "Underlying `paramiko.Transport <http://www.lag.net/paramiko/docs/paramiko.Transport-class.html>`_ object. This makes it possible to call methods like :meth:`~paramiko.Transport.set_keepalive` on it."
        return self._transport