Example #1
0
    def test_8_large_readv(self):
        """
        verify that a very large readv is broken up correctly and still
        returned as a single blob.
        """
        sftp = get_sftp()
        kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
                f.set_pipelined(True)
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')

            self.assertEqual(
                sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)

            with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
                data = list(f.readv([(23 * 1024, 128 * 1024)]))
                self.assertEqual(1, len(data))
                data = data[0]
                self.assertEqual(128 * 1024, len(data))

            sys.stderr.write(' ')
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #2
0
    def test_1_lots_of_files(self):
        """
        create a bunch of files over the same session.
        """
        sftp = get_sftp()
        numfiles = 100
        try:
            for i in range(numfiles):
                with sftp.open('%s/file%d.txt' % (FOLDER, i), 'w', 1) as f:
                    f.write('this is file #%d.\n' % i)
                sftp.chmod('%s/file%d.txt' % (FOLDER, i), o660)

            # now make sure every file is there, by creating a list of filenmes
            # and reading them in random order.
            numlist = list(range(numfiles))
            while len(numlist) > 0:
                r = numlist[random.randint(0, len(numlist) - 1)]
                with sftp.open('%s/file%d.txt' % (FOLDER, r)) as f:
                    self.assertEqual(f.readline(), 'this is file #%d.\n' % r)
                numlist.remove(r)
        finally:
            for i in range(numfiles):
                try:
                    sftp.remove('%s/file%d.txt' % (FOLDER, i))
                except:
                    pass
Example #3
0
    def test_2_big_file(self):
        """
        write a 1MB file with no buffering.
        """
        sftp = get_sftp()
        kblob = (1024 * 'x')
        start = time.time()
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'w') as f:
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')

            self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
            end = time.time()
            sys.stderr.write('%ds ' % round(end - start))
            
            start = time.time()
            with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
                for n in range(1024):
                    data = f.read(1024)
                    self.assertEqual(data, kblob)

            end = time.time()
            sys.stderr.write('%ds ' % round(end - start))
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #4
0
    def test_7_prefetch_readv(self):
        """
        verify that prefetch and readv don't conflict with each other.
        """
        sftp = get_sftp()
        kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
                f.set_pipelined(True)
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')
            
            self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)

            with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
                f.prefetch()
                data = f.read(1024)
                self.assertEqual(data, kblob)

                chunk_size = 793
                base_offset = 512 * 1024
                k2blob = kblob + kblob
                chunks = [(base_offset + (chunk_size * i), chunk_size) for i in range(20)]
                for data in f.readv(chunks):
                    offset = base_offset % 1024
                    self.assertEqual(chunk_size, len(data))
                    self.assertEqual(k2blob[offset:offset + chunk_size], data)
                    base_offset += chunk_size

            sys.stderr.write(' ')
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #5
0
    def test_A_big_file_renegotiate(self):
        """
        write a 1MB file, forcing key renegotiation in the middle.
        """
        sftp = get_sftp()
        t = sftp.sock.get_transport()
        t.packetizer.REKEY_BYTES = 512 * 1024
        k32blob = (32 * 1024 * 'x')
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f:
                for i in range(32):
                    f.write(k32blob)

            self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
            self.assertNotEqual(t.H, t.session_id)
            
            # try to read it too.
            with sftp.open('%s/hongry.txt' % FOLDER, 'r', 128 * 1024) as f:
                f.prefetch()
                total = 0
                while total < 1024 * 1024:
                    total += len(f.read(32 * 1024))
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
            t.packetizer.REKEY_BYTES = pow(2, 30)
Example #6
0
    def test_8_large_readv(self):
        """
        verify that a very large readv is broken up correctly and still
        returned as a single blob.
        """
        sftp = get_sftp()
        kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
                f.set_pipelined(True)
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')

            self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
            
            with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
                data = list(f.readv([(23 * 1024, 128 * 1024)]))
                self.assertEqual(1, len(data))
                data = data[0]
                self.assertEqual(128 * 1024, len(data))
            
            sys.stderr.write(' ')
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #7
0
    def test_A_big_file_renegotiate(self):
        """
        write a 1MB file, forcing key renegotiation in the middle.
        """
        sftp = get_sftp()
        t = sftp.sock.get_transport()
        t.packetizer.REKEY_BYTES = 512 * 1024
        k32blob = (32 * 1024 * 'x')
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f:
                for i in range(32):
                    f.write(k32blob)

            self.assertEqual(
                sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
            self.assertNotEqual(t.H, t.session_id)

            # try to read it too.
            with sftp.open('%s/hongry.txt' % FOLDER, 'r', 128 * 1024) as f:
                f.prefetch()
                total = 0
                while total < 1024 * 1024:
                    total += len(f.read(32 * 1024))
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
            t.packetizer.REKEY_BYTES = pow(2, 30)
Example #8
0
    def test_6_lots_of_prefetching(self):
        """
        prefetch a 1MB file a bunch of times, discarding the file object
        without using it, to verify that paramiko doesn't get confused.
        """
        sftp = get_sftp()
        kblob = (1024 * 'x')
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'w') as f:
                f.set_pipelined(True)
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')

            self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)

            for i in range(10):
                with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
                    f.prefetch()
            with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
                f.prefetch()
                for n in range(1024):
                    data = f.read(1024)
                    self.assertEqual(data, kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #9
0
 def test_4_prefetch_seek(self):
     sftp = get_sftp()
     kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
     try:
         with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
             f.set_pipelined(True)
             for n in range(1024):
                 f.write(kblob)
                 if n % 128 == 0:
                     sys.stderr.write('.')
         sys.stderr.write(' ')
         
         self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
         
         start = time.time()
         k2blob = kblob + kblob
         chunk = 793
         for i in range(10):
             with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
                 f.prefetch()
                 base_offset = (512 * 1024) + 17 * random.randint(1000, 2000)
                 offsets = [base_offset + j * chunk for j in range(100)]
                 # randomly seek around and read them out
                 for j in range(100):
                     offset = offsets[random.randint(0, len(offsets) - 1)]
                     offsets.remove(offset)
                     f.seek(offset)
                     data = f.read(chunk)
                     n_offset = offset % 1024
                     self.assertEqual(data, k2blob[n_offset:n_offset + chunk])
                     offset += chunk
         end = time.time()
         sys.stderr.write('%ds ' % round(end - start))
     finally:
         sftp.remove('%s/hongry.txt' % FOLDER)
Example #10
0
    def test_1_lots_of_files(self):
        """
        create a bunch of files over the same session.
        """
        sftp = get_sftp()
        numfiles = 100
        try:
            for i in range(numfiles):
                with sftp.open('%s/file%d.txt' % (FOLDER, i), 'w', 1) as f:
                    f.write('this is file #%d.\n' % i)
                sftp.chmod('%s/file%d.txt' % (FOLDER, i), o660)

            # now make sure every file is there, by creating a list of filenmes
            # and reading them in random order.
            numlist = list(range(numfiles))
            while len(numlist) > 0:
                r = numlist[random.randint(0, len(numlist) - 1)]
                with sftp.open('%s/file%d.txt' % (FOLDER, r)) as f:
                    self.assertEqual(f.readline(), 'this is file #%d.\n' % r)
                numlist.remove(r)
        finally:
            for i in range(numfiles):
                try:
                    sftp.remove('%s/file%d.txt' % (FOLDER, i))
                except:
                    pass
Example #11
0
    def test_6_lots_of_prefetching(self):
        """
        prefetch a 1MB file a bunch of times, discarding the file object
        without using it, to verify that paramiko doesn't get confused.
        """
        sftp = get_sftp()
        kblob = (1024 * 'x')
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'w') as f:
                f.set_pipelined(True)
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')

            self.assertEqual(
                sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)

            for i in range(10):
                with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
                    f.prefetch()
            with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
                f.prefetch()
                for n in range(1024):
                    data = f.read(1024)
                    self.assertEqual(data, kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #12
0
    def test_2_big_file(self):
        """
        write a 1MB file with no buffering.
        """
        sftp = get_sftp()
        kblob = (1024 * 'x')
        start = time.time()
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'w') as f:
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')

            self.assertEqual(
                sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
            end = time.time()
            sys.stderr.write('%ds ' % round(end - start))

            start = time.time()
            with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
                for n in range(1024):
                    data = f.read(1024)
                    self.assertEqual(data, kblob)

            end = time.time()
            sys.stderr.write('%ds ' % round(end - start))
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #13
0
 def setUp(self):
     global FOLDER
     sftp = get_sftp()
     for i in range(1000):
         FOLDER = FOLDER[:-3] + '%03d' % i
         try:
             sftp.mkdir(FOLDER)
             break
         except (IOError, OSError):
             pass
Example #14
0
 def setUp(self):
     global FOLDER
     sftp = get_sftp()
     for i in range(1000):
         FOLDER = FOLDER[:-3] + '%03d' % i
         try:
             sftp.mkdir(FOLDER)
             break
         except (IOError, OSError):
             pass
Example #15
0
    def test_9_big_file_big_buffer(self):
        """
        write a 1MB file, with no linefeeds, and a big buffer.
        """
        sftp = get_sftp()
        mblob = (1024 * 1024 * 'x')
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f:
                f.write(mblob)

            self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #16
0
    def test_9_big_file_big_buffer(self):
        """
        write a 1MB file, with no linefeeds, and a big buffer.
        """
        sftp = get_sftp()
        mblob = (1024 * 1024 * 'x')
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f:
                f.write(mblob)

            self.assertEqual(
                sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #17
0
    def test_3_big_file_pipelined(self):
        """
        write a 1MB file, with no linefeeds, using pipelining.
        """
        sftp = get_sftp()
        kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
        start = time.time()
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
                f.set_pipelined(True)
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')

            self.assertEqual(
                sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
            end = time.time()
            sys.stderr.write('%ds ' % round(end - start))

            start = time.time()
            with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
                file_size = f.stat().st_size
                f.prefetch(file_size)

                # read on odd boundaries to make sure the bytes aren't getting scrambled
                n = 0
                k2blob = kblob + kblob
                chunk = 629
                size = 1024 * 1024
                while n < size:
                    if n + chunk > size:
                        chunk = size - n
                    data = f.read(chunk)
                    offset = n % 1024
                    self.assertEqual(data, k2blob[offset:offset + chunk])
                    n += chunk

            end = time.time()
            sys.stderr.write('%ds ' % round(end - start))
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #18
0
    def test_3_big_file_pipelined(self):
        """
        write a 1MB file, with no linefeeds, using pipelining.
        """
        sftp = get_sftp()
        kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
        start = time.time()
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
                f.set_pipelined(True)
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')

            self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
            end = time.time()
            sys.stderr.write('%ds ' % round(end - start))
            
            start = time.time()
            with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
                file_size = f.stat().st_size
                f.prefetch(file_size)

                # read on odd boundaries to make sure the bytes aren't getting scrambled
                n = 0
                k2blob = kblob + kblob
                chunk = 629
                size = 1024 * 1024
                while n < size:
                    if n + chunk > size:
                        chunk = size - n
                    data = f.read(chunk)
                    offset = n % 1024
                    self.assertEqual(data, k2blob[offset:offset + chunk])
                    n += chunk

            end = time.time()
            sys.stderr.write('%ds ' % round(end - start))
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #19
0
    def test_4_prefetch_seek(self):
        sftp = get_sftp()
        kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
                f.set_pipelined(True)
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')

            self.assertEqual(
                sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)

            start = time.time()
            k2blob = kblob + kblob
            chunk = 793
            for i in range(10):
                with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
                    file_size = f.stat().st_size
                    f.prefetch(file_size)
                    base_offset = (512 *
                                   1024) + 17 * random.randint(1000, 2000)
                    offsets = [base_offset + j * chunk for j in range(100)]
                    # randomly seek around and read them out
                    for j in range(100):
                        offset = offsets[random.randint(0, len(offsets) - 1)]
                        offsets.remove(offset)
                        f.seek(offset)
                        data = f.read(chunk)
                        n_offset = offset % 1024
                        self.assertEqual(data,
                                         k2blob[n_offset:n_offset + chunk])
                        offset += chunk
            end = time.time()
            sys.stderr.write('%ds ' % round(end - start))
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #20
0
    def test_5_readv_seek(self):
        sftp = get_sftp()
        kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
                f.set_pipelined(True)
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')

            self.assertEqual(
                sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)

            start = time.time()
            k2blob = kblob + kblob
            chunk = 793
            for i in range(10):
                with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
                    base_offset = (512 *
                                   1024) + 17 * random.randint(1000, 2000)
                    # make a bunch of offsets and put them in random order
                    offsets = [base_offset + j * chunk for j in range(100)]
                    readv_list = []
                    for j in range(100):
                        o = offsets[random.randint(0, len(offsets) - 1)]
                        offsets.remove(o)
                        readv_list.append((o, chunk))
                    ret = f.readv(readv_list)
                    for i in range(len(readv_list)):
                        offset = readv_list[i][0]
                        n_offset = offset % 1024
                        self.assertEqual(next(ret),
                                         k2blob[n_offset:n_offset + chunk])
            end = time.time()
            sys.stderr.write('%ds ' % round(end - start))
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #21
0
    def test_7_prefetch_readv(self):
        """
        verify that prefetch and readv don't conflict with each other.
        """
        sftp = get_sftp()
        kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
                f.set_pipelined(True)
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')

            self.assertEqual(
                sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)

            with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
                file_size = f.stat().st_size
                f.prefetch(file_size)
                data = f.read(1024)
                self.assertEqual(data, kblob)

                chunk_size = 793
                base_offset = 512 * 1024
                k2blob = kblob + kblob
                chunks = [(base_offset + (chunk_size * i), chunk_size)
                          for i in range(20)]
                for data in f.readv(chunks):
                    offset = base_offset % 1024
                    self.assertEqual(chunk_size, len(data))
                    self.assertEqual(k2blob[offset:offset + chunk_size], data)
                    base_offset += chunk_size

            sys.stderr.write(' ')
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #22
0
    def test_5_readv_seek(self):
        sftp = get_sftp()
        kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
        try:
            with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f:
                f.set_pipelined(True)
                for n in range(1024):
                    f.write(kblob)
                    if n % 128 == 0:
                        sys.stderr.write('.')
            sys.stderr.write(' ')

            self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)

            start = time.time()
            k2blob = kblob + kblob
            chunk = 793
            for i in range(10):
                with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
                    base_offset = (512 * 1024) + 17 * random.randint(1000, 2000)
                    # make a bunch of offsets and put them in random order
                    offsets = [base_offset + j * chunk for j in range(100)]
                    readv_list = []
                    for j in range(100):
                        o = offsets[random.randint(0, len(offsets) - 1)]
                        offsets.remove(o)
                        readv_list.append((o, chunk))
                    ret = f.readv(readv_list)
                    for i in range(len(readv_list)):
                        offset = readv_list[i][0]
                        n_offset = offset % 1024
                        self.assertEqual(next(ret), k2blob[n_offset:n_offset + chunk])
            end = time.time()
            sys.stderr.write('%ds ' % round(end - start))
        finally:
            sftp.remove('%s/hongry.txt' % FOLDER)
Example #23
0
 def tearDown(self):
     sftp = get_sftp()
     sftp.rmdir(FOLDER)
Example #24
0
 def tearDown(self):
     sftp = get_sftp()
     sftp.rmdir(FOLDER)