Esempio n. 1
0
 def test_access_log_headers_only(self):
     app = proxy_logging.ProxyLoggingMiddleware(
         FakeApp(), {
             'log_headers': 'yes',
             'access_log_headers_only': 'FIRST, seCond'
         })
     app.access_logger = FakeLogger()
     req = Request.blank('/',
                         environ={'REQUEST_METHOD': 'GET'},
                         headers={
                             'First': '1',
                             'Second': '2',
                             'Third': '3'
                         })
     resp = app(req.environ, start_response)
     # exhaust generator
     [x for x in resp]
     log_parts = self._log_parts(app)
     headers = unquote(log_parts[14]).split('\n')
     self.assertIn('First: 1', headers)
     self.assertIn('Second: 2', headers)
     self.assertNotIn('Third: 3', headers)
     self.assertNotIn('Host: localhost:80', headers)
Esempio n. 2
0
 def setUp(self):
     utils.HASH_PATH_SUFFIX = 'endcap'
     utils.HASH_PATH_PREFIX = ''
     # Setup a test ring (stolen from common/test_ring.py)
     self.testdir = tempfile.mkdtemp()
     self.devices = os.path.join(self.testdir, 'node')
     rmtree(self.testdir, ignore_errors=1)
     os.mkdir(self.testdir)
     os.mkdir(self.devices)
     os.mkdir(os.path.join(self.devices, 'sda'))
     self.objects = os.path.join(self.devices, 'sda', 'objects')
     os.mkdir(self.objects)
     self.parts = {}
     for part in ['0', '1', '2', '3']:
         self.parts[part] = os.path.join(self.objects, part)
         os.mkdir(os.path.join(self.objects, part))
     self.ring = _create_test_ring(self.testdir)
     self.conf = dict(
         swift_dir=self.testdir, devices=self.devices, mount_check='false',
         timeout='300', stats_interval='1')
     self.replicator = object_replicator.ObjectReplicator(
         self.conf)
     self.replicator.logger = FakeLogger()
Esempio n. 3
0
    def test_hash_suffix_multi_file_two(self):
        df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
        mkdirs(df.datadir)
        for tdiff in [1, 50, 100, 500]:
            suffs = ['.meta', '.data']
            if tdiff > 50:
                suffs.append('.ts')
            for suff in suffs:
                f = open(os.path.join(df.datadir,
                        normalize_timestamp(int(time.time()) - tdiff) + suff),
                         'wb')
                f.write('1234567890')
                f.close()

        ohash = hash_path('a', 'c', 'o')
        data_dir = ohash[-3:]
        whole_path_from = os.path.join(self.objects, '0', data_dir)
        hsh_path = os.listdir(whole_path_from)[0]
        whole_hsh_path = os.path.join(whole_path_from, hsh_path)

        object_replicator.hash_suffix(whole_path_from, 99)
        # only the meta and data should be left
        self.assertEquals(len(os.listdir(whole_hsh_path)), 2)
Esempio n. 4
0
    def test_invalidate_hash(self):
        def assertFileData(file_path, data):
            with open(file_path, 'r') as fp:
                fdata = fp.read()
                self.assertEquals(pickle.loads(fdata), pickle.loads(data))

        df = diskfile.DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o',
                               FakeLogger())
        mkdirs(df.datadir)
        ohash = hash_path('a', 'c', 'o')
        data_dir = ohash[-3:]
        whole_path_from = os.path.join(self.objects, '0', data_dir)
        hashes_file = os.path.join(self.objects, '0', diskfile.HASH_FILE)
        # test that non existent file except caught
        self.assertEquals(diskfile.invalidate_hash(whole_path_from), None)
        # test that hashes get cleared
        check_pickle_data = pickle.dumps({data_dir: None},
                                         diskfile.PICKLE_PROTOCOL)
        for data_hash in [{data_dir: None}, {data_dir: 'abcdefg'}]:
            with open(hashes_file, 'wb') as fp:
                pickle.dump(data_hash, fp, diskfile.PICKLE_PROTOCOL)
            diskfile.invalidate_hash(whole_path_from)
            assertFileData(hashes_file, check_pickle_data)
 def test_log_request_stat_method_filtering_default(self):
     method_map = {
         'foo': 'BAD_METHOD',
         '': 'BAD_METHOD',
         'PUTT': 'BAD_METHOD',
         'SPECIAL': 'BAD_METHOD',
         'GET': 'GET',
         'PUT': 'PUT',
         'COPY': 'COPY',
         'HEAD': 'HEAD',
         'POST': 'POST',
         'DELETE': 'DELETE',
         'OPTIONS': 'OPTIONS',
     }
     for method, exp_method in method_map.iteritems():
         app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
         app.access_logger = FakeLogger()
         req = Request.blank('/v1/a/', environ={'REQUEST_METHOD': method})
         app.log_request(req.environ, 299, 11, 3, 1.17, False)
         self.assertTiming('account.%s.299.timing' % exp_method, app,
                           exp_timing=1.17 * 1000)
         self.assertUpdateStats('account.%s.299.xfer' % exp_method,
                                11 + 3, app)
Esempio n. 6
0
    def test_run_forever_catches_usual_exceptions(self):
        raises = [0]

        def raise_exceptions():
            raises[0] += 1
            if raises[0] < 2:
                raise Exception('exception %d' % raises[0])
            raise SystemExit('exiting exception %d' % raises[0])

        x = expirer.ObjectExpirer({})
        x.logger = FakeLogger()
        orig_sleep = expirer.sleep
        try:
            expirer.sleep = not_sleep
            x.run_once = raise_exceptions
            x.run_forever()
        except SystemExit as err:
            pass
        finally:
            expirer.sleep = orig_sleep
        self.assertEqual(str(err), 'exiting exception 2')
        self.assertEqual(x.logger.log_dict['exception'],
                         [(('Unhandled exception', ), {}, 'exception 1')])
Esempio n. 7
0
    def setUp(self):
        self.testdir = os.path.join(mkdtemp(), 'tmp_test_object_auditor')
        self.devices = os.path.join(self.testdir, 'node')
        self.rcache = os.path.join(self.testdir, 'object.recon')
        self.logger = FakeLogger()
        rmtree(self.testdir, ignore_errors=1)
        mkdirs(os.path.join(self.devices, 'sda'))
        self.objects = os.path.join(self.devices, 'sda', 'objects')

        os.mkdir(os.path.join(self.devices, 'sdb'))
        self.objects_2 = os.path.join(self.devices, 'sdb', 'objects')

        os.mkdir(self.objects)
        self.parts = {}
        for part in ['0', '1', '2', '3']:
            self.parts[part] = os.path.join(self.objects, part)
            os.mkdir(os.path.join(self.objects, part))

        self.conf = dict(devices=self.devices,
                         mount_check='false',
                         object_size_stats='10,100,1024,10240')
        self.df_mgr = DiskFileManager(self.conf, self.logger)
        self.disk_file = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
    def test_os_error(self):
        fname = 'container-sync-realms.conf'
        fcontents = ''
        with temptree([fname], [fcontents]) as tempdir:
            logger = FakeLogger()
            fpath = os.path.join(tempdir, fname)

            def _mock_getmtime(path):
                raise OSError(errno.EACCES,
                              os.strerror(errno.EACCES) + ": '%s'" % (fpath))

            with patch('os.path.getmtime', _mock_getmtime):
                csr = ContainerSyncRealms(fpath, logger)

            self.assertEqual(
                logger.all_log_lines(), {
                    'error': [
                        "Could not load '%s': [Errno 13] Permission denied: "
                        "'%s'" % (fpath, fpath)
                    ]
                })
            self.assertEqual(csr.mtime_check_interval, 300)
            self.assertEqual(csr.realms(), [])
Esempio n. 9
0
    def test_run_once_with_disk_unmounted(self, mock_ismount):
        mock_ismount.return_value = False
        cu = object_updater.ObjectUpdater({
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
            'interval': '1',
            'concurrency': '1',
            'node_timeout': '15'
        })
        cu.run_once()
        async_dir = os.path.join(self.sda1, ASYNCDIR)
        os.mkdir(async_dir)
        cu.run_once()
        self.assert_(os.path.exists(async_dir))
        # mount_check == False means no call to ismount
        self.assertEqual([], mock_ismount.mock_calls)

        cu = object_updater.ObjectUpdater({
            'devices': self.devices_dir,
            'mount_check': 'TrUe',
            'swift_dir': self.testdir,
            'interval': '1',
            'concurrency': '1',
            'node_timeout': '15'
        })
        odd_dir = os.path.join(async_dir, 'not really supposed to be here')
        os.mkdir(odd_dir)
        cu.logger = FakeLogger()
        cu.run_once()
        self.assert_(os.path.exists(async_dir))
        self.assert_(os.path.exists(odd_dir))  # skipped because not mounted!
        # mount_check == True means ismount was checked
        self.assertEqual([
            mock.call(self.sda1),
        ], mock_ismount.mock_calls)
        self.assertEqual(cu.logger.get_increment_counts(), {'errors': 1})
Esempio n. 10
0
    def test_success_gets_counted(self):
        class InternalClient(object):
            def __init__(self, containers, objects):
                self.containers = containers
                self.objects = objects

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def delete_container(*a, **kw):
                pass

            def delete_object(*a, **kw):
                pass

            def iter_objects(self, *a, **kw):
                return self.objects

        x = expirer.ObjectExpirer({})
        x.logger = FakeLogger()
        x.delete_actual_object = lambda o, t: None
        self.assertEquals(x.report_objects, 0)
        x.swift = InternalClient([{
            'name': str(int(time() - 86400))
        }], [{
            'name': '%d-actual-obj' % int(time() - 86400)
        }])
        x.run_once()
        self.assertEquals(x.report_objects, 1)
        self.assertEquals(
            x.logger.log_dict['info'],
            [(('Pass beginning; 1 possible containers; '
               '2 possible objects', ), {}),
             (('Pass completed in 0s; 1 objects expired', ), {})])
    def test_missing_from_realms_conf(self):
        self.conf = {'swift_dir': self.tempdir, 'current': 'foo/bar'}
        self.sync = container_sync.ContainerSync(self.app,
                                                 self.conf,
                                                 logger=FakeLogger())
        self.assertEqual('FOO', self.sync.realm)
        self.assertEqual('BAR', self.sync.cluster)
        info = {}

        def capture_swift_info(key, **options):
            info[key] = options

        with mock.patch(
                'swift.common.middleware.container_sync.register_swift_info',
                new=capture_swift_info):
            self.sync.register_info()

        for realm, realm_info in info['container_sync']['realms'].items():
            for cluster, options in realm_info['clusters'].items():
                self.assertEqual(options.get('current', False), False)

        for line in self.sync.logger.get_lines_for_level('error'):
            self.assertEqual(line, 'Unknown current '
                             '//REALM/CLUSTER (//FOO/BAR)')
 def test_log_request_stat_method_filtering_custom(self):
     method_map = {
         'foo': 'BAD_METHOD',
         '': 'BAD_METHOD',
         'PUTT': 'BAD_METHOD',
         'SPECIAL': 'SPECIAL',  # will be configured
         'GET': 'GET',
         'PUT': 'PUT',
         'COPY': 'BAD_METHOD',  # prove no one's special
     }
     # this conf var supports optional leading access_
     for conf_key in ['access_log_statsd_valid_http_methods',
                      'log_statsd_valid_http_methods']:
         for method, exp_method in method_map.iteritems():
             app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {
                 conf_key: 'SPECIAL,  GET,PUT ',  # crazy spaces ok
             })
             app.access_logger = FakeLogger()
             req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method})
             app.log_request(req.environ, 911, 4, 43, 1.01, False)
             self.assertTiming('container.%s.911.timing' % exp_method, app,
                             exp_timing=1.01 * 1000)
             self.assertUpdateStats('container.%s.911.xfer' % exp_method,
                                 4 + 43, app)
Esempio n. 13
0
 def test_delete_partition_with_handoff_delete_failures(self):
     with mock.patch('swift.obj.replicator.http_connect',
                     mock_http_connect(200)):
         self.replicator.handoff_delete = 2
         df = diskfile.DiskFile(self.devices, 'sda', '1', 'a', 'c', 'o',
                                FakeLogger())
         mkdirs(df.datadir)
         print df.datadir
         f = open(
             os.path.join(df.datadir,
                          normalize_timestamp(time.time()) + '.data'), 'wb')
         f.write('1234567890')
         f.close()
         ohash = hash_path('a', 'c', 'o')
         data_dir = ohash[-3:]
         whole_path_from = os.path.join(self.objects, '1', data_dir)
         part_path = os.path.join(self.objects, '1')
         self.assertTrue(os.access(part_path, os.F_OK))
         nodes = [
             node for node in self.ring.get_part_nodes(1)
             if node['ip'] not in _ips()
         ]
         process_arg_checker = []
         for i, node in enumerate(nodes):
             rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
             if i in (0, 1):
                 # force two of the rsync calls to fail
                 ret_code = 1
             else:
                 ret_code = 0
             process_arg_checker.append(
                 (ret_code, '', ['rsync', whole_path_from, rsync_mod]))
         with _mock_process(process_arg_checker):
             self.replicator.replicate()
         # The file should still exist
         self.assertTrue(os.access(part_path, os.F_OK))
Esempio n. 14
0
    def test_ratelimit_max_rate_multiple_acc(self):
        num_calls = 4
        current_rate = 2
        conf_dict = {'account_ratelimit': current_rate,
                     'max_sleep_time_seconds': 2}
        fake_memcache = FakeMemcache()

        the_app = ratelimit.RateLimitMiddleware(None, conf_dict,
                                                logger=FakeLogger())
        the_app.memcache_client = fake_memcache
        req = lambda: None
        req.method = 'PUT'

        class rate_caller(Thread):

            def __init__(self, name):
                self.myname = name
                Thread.__init__(self)

            def run(self):
                for j in range(num_calls):
                    self.result = the_app.handle_ratelimit(req, self.myname,
                                                           'c', None)

        nt = 15
        begin = time.time()
        threads = []
        for i in range(nt):
            rc = rate_caller('a%s' % i)
            rc.start()
            threads.append(rc)
        for thread in threads:
            thread.join()

        time_took = time.time() - begin
        self.assertEquals(1.5, round(time_took, 1))
Esempio n. 15
0
 def setUp(self):
     self._orig_tpool_exc = tpool.execute
     tpool.execute = lambda f, *args, **kwargs: f(*args, **kwargs)
     self.lg = FakeLogger()
     _initxattr()
     _mock_clear_metadata()
     self._saved_df_wm = swiftonfile.swift.obj.diskfile.write_metadata
     self._saved_df_rm = swiftonfile.swift.obj.diskfile.read_metadata
     swiftonfile.swift.obj.diskfile.write_metadata = _mock_write_metadata
     swiftonfile.swift.obj.diskfile.read_metadata = _mock_read_metadata
     self._saved_ut_wm = swiftonfile.swift.common.utils.write_metadata
     self._saved_ut_rm = swiftonfile.swift.common.utils.read_metadata
     swiftonfile.swift.common.utils.write_metadata = _mock_write_metadata
     swiftonfile.swift.common.utils.read_metadata = _mock_read_metadata
     self._saved_do_fsync = swiftonfile.swift.obj.diskfile.do_fsync
     swiftonfile.swift.obj.diskfile.do_fsync = _mock_do_fsync
     self._saved_fallocate = swiftonfile.swift.obj.diskfile.fallocate
     swiftonfile.swift.obj.diskfile.fallocate = _mock_fallocate
     self.td = tempfile.mkdtemp()
     self.conf = dict(devices=self.td,
                      mb_per_sync=2,
                      keep_cache_size=(1024 * 1024),
                      mount_check=False)
     self.mgr = DiskFileManager(self.conf, self.lg)
Esempio n. 16
0
    def test_no_content_length_no_transfer_encoding_with_generator(self):

        class BodyGen(object):
            def __init__(self, data):
                self.data = data

            def __iter__(self):
                yield self.data

        app = proxy_logging.ProxyLoggingMiddleware(
            FakeAppNoContentLengthNoTransferEncoding(
                body=BodyGen('abc'),
            ), {})
        app.access_logger = FakeLogger()
        req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
        resp = app(req.environ, start_response)
        resp_body = ''.join(resp)
        log_parts = self._log_parts(app)
        self.assertEquals(log_parts[3], 'GET')
        self.assertEquals(log_parts[4], '/')
        self.assertEquals(log_parts[5], 'HTTP/1.0')
        self.assertEquals(log_parts[6], '200')
        self.assertEquals(resp_body, 'abc')
        self.assertEquals(log_parts[11], '3')
    def test_log_request_stat_type_good(self):
        """
        log_request() should send timing and byte-count counters for GET
        requests.  Also, __call__()'s iter_response() function should
        statsd-log time to first byte (calling the passed-in start_response
        function), but only for GET requests.
        """
        stub_times = []

        def stub_time():
            return stub_times.pop(0)

        path_types = {
            '/v1/a': 'account',
            '/v1/a/': 'account',
            '/v1/a/c': 'container',
            '/v1/a/c/': 'container',
            '/v1/a/c/o': 'object',
            '/v1/a/c/o/': 'object',
            '/v1/a/c/o/p': 'object',
            '/v1/a/c/o/p/': 'object',
            '/v1/a/c/o/p/p2': 'object',
        }
        for path, exp_type in path_types.iteritems():
            orig_time = time.time
            try:
                time.time = stub_time
                # GET
                app = proxy_logging.ProxyLoggingMiddleware(
                    FakeApp(body='7654321', response_str='321 Fubar'), {})
                app.access_logger = FakeLogger()
                req = Request.blank(path,
                                    environ={
                                        'REQUEST_METHOD': 'GET',
                                        'wsgi.input': StringIO.StringIO('4321')
                                    })
                stub_times = [18.0, 20.71828182846]
                iter_response = app(req.environ, lambda *_: None)
                self.assertEqual('7654321', ''.join(iter_response))
                self.assertTiming('%s.GET.321.timing' % exp_type,
                                  app,
                                  exp_timing=2.71828182846 * 1000)
                self.assertTimingSince('%s.GET.321.first-byte.timing' %
                                       exp_type,
                                       app,
                                       exp_start=18.0)
                self.assertUpdateStats('%s.GET.321.xfer' % exp_type, 4 + 7,
                                       app)

                # GET with swift.proxy_access_log_made already set
                app = proxy_logging.ProxyLoggingMiddleware(
                    FakeApp(body='7654321', response_str='321 Fubar'), {})
                app.access_logger = FakeLogger()
                req = Request.blank(path,
                                    environ={
                                        'REQUEST_METHOD': 'GET',
                                        'swift.proxy_access_log_made': True,
                                        'wsgi.input': StringIO.StringIO('4321')
                                    })
                stub_times = [18.0, 20.71828182846]
                iter_response = app(req.environ, lambda *_: None)
                self.assertEqual('7654321', ''.join(iter_response))
                self.assertEqual([], app.access_logger.log_dict['timing'])
                self.assertEqual([],
                                 app.access_logger.log_dict['timing_since'])
                self.assertEqual([],
                                 app.access_logger.log_dict['update_stats'])

                # PUT (no first-byte timing!)
                app = proxy_logging.ProxyLoggingMiddleware(
                    FakeApp(body='87654321', response_str='314 PiTown'), {})
                app.access_logger = FakeLogger()
                req = Request.blank(path,
                                    environ={
                                        'REQUEST_METHOD': 'PUT',
                                        'wsgi.input':
                                        StringIO.StringIO('654321')
                                    })
                # (it's not a GET, so time() doesn't have a 2nd call)
                stub_times = [58.2, 58.2 + 7.3321]
                iter_response = app(req.environ, lambda *_: None)
                self.assertEqual('87654321', ''.join(iter_response))
                self.assertTiming('%s.PUT.314.timing' % exp_type,
                                  app,
                                  exp_timing=7.3321 * 1000)
                self.assertNotTiming('%s.GET.314.first-byte.timing' % exp_type,
                                     app)
                self.assertNotTiming('%s.PUT.314.first-byte.timing' % exp_type,
                                     app)
                self.assertUpdateStats('%s.PUT.314.xfer' % exp_type, 6 + 8,
                                       app)
            finally:
                time.time = orig_time
Esempio n. 18
0
 def limit_filter(app):
     return ratelimit.RateLimitMiddleware(app, conf, logger=FakeLogger())
Esempio n. 19
0
    def test_log_auth_token(self):
        auth_token = 'b05bf940-0464-4c0e-8c70-87717d2d73e8'

        # Default - reveal_sensitive_prefix is 16
        # No x-auth-token header
        app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
        app.access_logger = FakeLogger()
        req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
        resp = app(req.environ, start_response)
        resp_body = ''.join(resp)
        log_parts = self._log_parts(app)
        self.assertEquals(log_parts[9], '-')
        # Has x-auth-token header
        app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
        app.access_logger = FakeLogger()
        req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
                                          'HTTP_X_AUTH_TOKEN': auth_token})
        resp = app(req.environ, start_response)
        resp_body = ''.join(resp)
        log_parts = self._log_parts(app)
        self.assertEquals(log_parts[9], 'b05bf940-0464-4c...')

        # Truncate to first 8 characters
        app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {
            'reveal_sensitive_prefix': '8'})
        app.access_logger = FakeLogger()
        req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
        resp = app(req.environ, start_response)
        resp_body = ''.join(resp)
        log_parts = self._log_parts(app)
        self.assertEquals(log_parts[9], '-')
        app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {
            'reveal_sensitive_prefix': '8'})
        app.access_logger = FakeLogger()
        req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
                                          'HTTP_X_AUTH_TOKEN': auth_token})
        resp = app(req.environ, start_response)
        resp_body = ''.join(resp)
        log_parts = self._log_parts(app)
        self.assertEquals(log_parts[9], 'b05bf940...')

        # Token length and reveal_sensitive_prefix are same (no truncate)
        app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {
            'reveal_sensitive_prefix': str(len(auth_token))})
        app.access_logger = FakeLogger()
        req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
                                          'HTTP_X_AUTH_TOKEN': auth_token})
        resp = app(req.environ, start_response)
        resp_body = ''.join(resp)
        log_parts = self._log_parts(app)
        self.assertEquals(log_parts[9], auth_token)

        # No effective limit on auth token
        app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {
            'reveal_sensitive_prefix': constraints.MAX_HEADER_SIZE})
        app.access_logger = FakeLogger()
        req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
                                          'HTTP_X_AUTH_TOKEN': auth_token})
        resp = app(req.environ, start_response)
        resp_body = ''.join(resp)
        log_parts = self._log_parts(app)
        self.assertEquals(log_parts[9], auth_token)

        # Don't log x-auth-token
        app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {
            'reveal_sensitive_prefix': '0'})
        app.access_logger = FakeLogger()
        req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
        resp = app(req.environ, start_response)
        resp_body = ''.join(resp)
        log_parts = self._log_parts(app)
        self.assertEquals(log_parts[9], '-')
        app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {
            'reveal_sensitive_prefix': '0'})
        app.access_logger = FakeLogger()
        req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
                                          'HTTP_X_AUTH_TOKEN': auth_token})
        resp = app(req.environ, start_response)
        resp_body = ''.join(resp)
        log_parts = self._log_parts(app)
        self.assertEquals(log_parts[9], '...')

        # Avoids pyflakes error, "local variable 'resp_body' is assigned to
        # but never used
        self.assertTrue(resp_body is not None)
Esempio n. 20
0
    def test_sweep_logs(self):
        asyncdir = os.path.join(self.sda1, ASYNCDIR_BASE)
        prefix_dir = os.path.join(asyncdir, 'abc')
        mkpath(prefix_dir)

        for o, t in [('abc', 123), ('def', 234), ('ghi', 345), ('jkl', 456),
                     ('mno', 567)]:
            ohash = hash_path('account', 'container', o)
            o_path = os.path.join(prefix_dir,
                                  ohash + '-' + normalize_timestamp(t))
            write_pickle({}, o_path)

        class MockObjectUpdater(object_updater.ObjectUpdater):
            def process_object_update(self, update_path, device, policy):
                os.unlink(update_path)
                self.stats.successes += 1
                self.stats.unlinks += 1

        logger = FakeLogger()
        ou = MockObjectUpdater(
            {
                'devices': self.devices_dir,
                'mount_check': 'false',
                'swift_dir': self.testdir,
                'interval': '1',
                'concurrency': '1',
                'report_interval': '10.0',
                'node_timeout': '5'
            },
            logger=logger)

        now = [time()]

        def mock_time_function():
            rv = now[0]
            now[0] += 5
            return rv

        # With 10s between updates, time() advancing 5s every time we look,
        # and 5 async_pendings on disk, we should get at least two progress
        # lines.
        with mock.patch('swift.obj.updater.time',
                        mock.MagicMock(time=mock_time_function)), \
                mock.patch.object(object_updater, 'ContextPool', MockPool):
            ou.object_sweep(self.sda1)

        info_lines = logger.get_lines_for_level('info')
        self.assertEqual(4, len(info_lines))
        self.assertIn("sweep starting", info_lines[0])
        self.assertIn(self.sda1, info_lines[0])

        self.assertIn("sweep progress", info_lines[1])
        # the space ensures it's a positive number
        self.assertIn(
            "2 successes, 0 failures, 0 quarantines, 2 unlinks, 0 errors, "
            "0 redirects", info_lines[1])
        self.assertIn(self.sda1, info_lines[1])

        self.assertIn("sweep progress", info_lines[2])
        self.assertIn(
            "4 successes, 0 failures, 0 quarantines, 4 unlinks, 0 errors, "
            "0 redirects", info_lines[2])
        self.assertIn(self.sda1, info_lines[2])

        self.assertIn("sweep complete", info_lines[3])
        self.assertIn(
            "5 successes, 0 failures, 0 quarantines, 5 unlinks, 0 errors, "
            "0 redirects", info_lines[3])
        self.assertIn(self.sda1, info_lines[3])
Esempio n. 21
0
    def test_log_request_stat_type_good(self):
        """
        log_request() should send timing and byte-count counters for GET
        requests.  Also, __call__()'s iter_response() function should
        statsd-log time to first byte (calling the passed-in start_response
        function), but only for GET requests.
        """
        stub_times = []

        def stub_time():
            return stub_times.pop(0)

        path_types = {
            '/v1/a': 'account',
            '/v1/a/': 'account',
            '/v1/a/c': 'container',
            '/v1/a/c/': 'container',
            '/v1/a/c/o': 'object',
            '/v1/a/c/o/': 'object',
            '/v1/a/c/o/p': 'object',
            '/v1/a/c/o/p/': 'object',
            '/v1/a/c/o/p/p2': 'object',
        }
        with mock.patch("time.time", stub_time):
            for path, exp_type in path_types.items():
                # GET
                app = proxy_logging.ProxyLoggingMiddleware(
                    FakeApp(body='7654321', response_str='321 Fubar'), {})
                app.access_logger = FakeLogger()
                req = Request.blank(path, environ={
                    'REQUEST_METHOD': 'GET',
                    'wsgi.input': BytesIO(b'4321')})
                stub_times = [18.0, 20.71828182846]
                iter_response = app(req.environ, lambda *_: None)

                self.assertEqual('7654321', ''.join(iter_response))
                self.assertTiming('%s.GET.321.timing' % exp_type, app,
                                  exp_timing=2.71828182846 * 1000)
                self.assertTimingSince(
                    '%s.GET.321.first-byte.timing' % exp_type, app,
                    exp_start=18.0)
                if exp_type == 'object':
                    # Object operations also return stats by policy
                    # In this case, the value needs to match the timing for GET
                    self.assertTiming('%s.policy.0.GET.321.timing' % exp_type,
                                      app, exp_timing=2.71828182846 * 1000)
                    self.assertUpdateStats([('%s.GET.321.xfer' % exp_type,
                                             4 + 7),
                                            ('object.policy.0.GET.321.xfer',
                                            4 + 7)],
                                           app)
                else:
                    self.assertUpdateStats([('%s.GET.321.xfer' % exp_type,
                                            4 + 7)],
                                           app)

                # GET Repeat the test above, but with a non-existent policy
                # Do this only for object types
                if exp_type == 'object':
                    app = proxy_logging.ProxyLoggingMiddleware(
                        FakeApp(body='7654321', response_str='321 Fubar',
                                policy_idx='-1'), {})
                    app.access_logger = FakeLogger()
                    req = Request.blank(path, environ={
                        'REQUEST_METHOD': 'GET',
                        'wsgi.input': BytesIO(b'4321')})
                    stub_times = [18.0, 20.71828182846]
                    iter_response = app(req.environ, lambda *_: None)

                    self.assertEqual('7654321', ''.join(iter_response))
                    self.assertTiming('%s.GET.321.timing' % exp_type, app,
                                      exp_timing=2.71828182846 * 1000)
                    self.assertTimingSince(
                        '%s.GET.321.first-byte.timing' % exp_type, app,
                        exp_start=18.0)
                    # No results returned for the non-existent policy
                    self.assertUpdateStats([('%s.GET.321.xfer' % exp_type,
                                            4 + 7)],
                                           app)

                # GET with swift.proxy_access_log_made already set
                app = proxy_logging.ProxyLoggingMiddleware(
                    FakeApp(body='7654321', response_str='321 Fubar'), {})
                app.access_logger = FakeLogger()
                req = Request.blank(path, environ={
                    'REQUEST_METHOD': 'GET',
                    'swift.proxy_access_log_made': True,
                    'wsgi.input': BytesIO(b'4321')})
                stub_times = [18.0, 20.71828182846]
                iter_response = app(req.environ, lambda *_: None)
                self.assertEqual('7654321', ''.join(iter_response))
                self.assertEqual([], app.access_logger.log_dict['timing'])
                self.assertEqual([],
                                 app.access_logger.log_dict['timing_since'])
                self.assertEqual([],
                                 app.access_logger.log_dict['update_stats'])

                # PUT (no first-byte timing!)
                app = proxy_logging.ProxyLoggingMiddleware(
                    FakeApp(body='87654321', response_str='314 PiTown'), {})
                app.access_logger = FakeLogger()
                req = Request.blank(path, environ={
                    'REQUEST_METHOD': 'PUT',
                    'wsgi.input': BytesIO(b'654321')})
                # (it's not a GET, so time() doesn't have a 2nd call)
                stub_times = [58.2, 58.2 + 7.3321]
                iter_response = app(req.environ, lambda *_: None)
                self.assertEqual('87654321', ''.join(iter_response))
                self.assertTiming('%s.PUT.314.timing' % exp_type, app,
                                  exp_timing=7.3321 * 1000)
                self.assertNotTiming(
                    '%s.GET.314.first-byte.timing' % exp_type, app)
                self.assertNotTiming(
                    '%s.PUT.314.first-byte.timing' % exp_type, app)
                if exp_type == 'object':
                    # Object operations also return stats by policy In this
                    # case, the value needs to match the timing for PUT.
                    self.assertTiming('%s.policy.0.PUT.314.timing' %
                                      exp_type, app,
                                      exp_timing=7.3321 * 1000)
                    self.assertUpdateStats(
                        [('object.PUT.314.xfer', 6 + 8),
                         ('object.policy.0.PUT.314.xfer', 6 + 8)], app)
                else:
                    self.assertUpdateStats(
                        [('%s.PUT.314.xfer' % exp_type, 6 + 8)], app)

                # PUT Repeat the test above, but with a non-existent policy
                # Do this only for object types
                if exp_type == 'object':
                    app = proxy_logging.ProxyLoggingMiddleware(
                        FakeApp(body='87654321', response_str='314 PiTown',
                                policy_idx='-1'), {})
                    app.access_logger = FakeLogger()
                    req = Request.blank(path, environ={
                        'REQUEST_METHOD': 'PUT',
                        'wsgi.input': BytesIO(b'654321')})
                    # (it's not a GET, so time() doesn't have a 2nd call)
                    stub_times = [58.2, 58.2 + 7.3321]
                    iter_response = app(req.environ, lambda *_: None)
                    self.assertEqual('87654321', ''.join(iter_response))
                    self.assertTiming('%s.PUT.314.timing' % exp_type, app,
                                      exp_timing=7.3321 * 1000)
                    self.assertNotTiming(
                        '%s.GET.314.first-byte.timing' % exp_type, app)
                    self.assertNotTiming(
                        '%s.PUT.314.first-byte.timing' % exp_type, app)
                    # No results returned for the non-existent policy
                    self.assertUpdateStats([('object.PUT.314.xfer', 6 + 8)],
                                           app)
Esempio n. 22
0
 def setUp(self):
     self.app = FakeSwift()
     self.decrypter = decrypter.Decrypter(self.app, {})
     self.decrypter.logger = FakeLogger()
Esempio n. 23
0
    def test_container_sync_row_put(self):
        orig_shuffle = sync.shuffle
        orig_put_object = sync.put_object
        orig_direct_get_object = sync.direct_get_object
        try:
            sync.shuffle = lambda x: x

            def fake_put_object(sync_to,
                                name=None,
                                headers=None,
                                contents=None,
                                proxy=None):
                self.assertEquals(sync_to, 'http://sync/to/path')
                self.assertEquals(name, 'object')
                self.assertEquals(
                    headers, {
                        'x-container-sync-key': 'key',
                        'x-timestamp': '1.2',
                        'other-header': 'other header value',
                        'etag': 'etagvalue'
                    })
                self.assertEquals(contents.read(), 'contents')
                self.assertEquals(proxy, 'http://proxy')

            sync.put_object = fake_put_object

            cs = sync.ContainerSync({},
                                    container_ring=FakeRing(),
                                    object_ring=FakeRing())
            cs.proxy = 'http://proxy'

            def fake_direct_get_object(node,
                                       part,
                                       account,
                                       container,
                                       obj,
                                       resp_chunk_size=1):
                return ({
                    'other-header': 'other header value',
                    'etag': '"etagvalue"',
                    'x-timestamp': '1.2'
                }, iter('contents'))

            sync.direct_get_object = fake_direct_get_object
            # Success as everything says it worked
            self.assertTrue(
                cs.container_sync_row(
                    {
                        'deleted': False,
                        'name': 'object',
                        'created_at': '1.2'
                    }, 'http://sync/to/path', 'key',
                    FakeContainerBroker('broker'), {
                        'account': 'a',
                        'container': 'c'
                    }))
            self.assertEquals(cs.container_puts, 1)

            def fake_direct_get_object(node,
                                       part,
                                       account,
                                       container,
                                       obj,
                                       resp_chunk_size=1):
                return ({
                    'date': 'date value',
                    'last-modified': 'last modified value',
                    'x-timestamp': '1.2',
                    'other-header': 'other header value',
                    'etag': '"etagvalue"'
                }, iter('contents'))

            sync.direct_get_object = fake_direct_get_object
            # Success as everything says it worked, also checks 'date' and
            # 'last-modified' headers are removed and that 'etag' header is
            # stripped of double quotes.
            self.assertTrue(
                cs.container_sync_row(
                    {
                        'deleted': False,
                        'name': 'object',
                        'created_at': '1.2'
                    }, 'http://sync/to/path', 'key',
                    FakeContainerBroker('broker'), {
                        'account': 'a',
                        'container': 'c'
                    }))
            self.assertEquals(cs.container_puts, 2)

            exc = []

            def fake_direct_get_object(node,
                                       part,
                                       account,
                                       container,
                                       obj,
                                       resp_chunk_size=1):
                exc.append(Exception('test exception'))
                raise exc[-1]

            sync.direct_get_object = fake_direct_get_object
            # Fail due to completely unexpected exception
            self.assertFalse(
                cs.container_sync_row(
                    {
                        'deleted': False,
                        'name': 'object',
                        'created_at': '1.2'
                    }, 'http://sync/to/path', 'key',
                    FakeContainerBroker('broker'), {
                        'account': 'a',
                        'container': 'c'
                    }))
            self.assertEquals(cs.container_puts, 2)
            self.assertEquals(len(exc), 3)
            self.assertEquals(str(exc[-1]), 'test exception')

            exc = []

            def fake_direct_get_object(node,
                                       part,
                                       account,
                                       container,
                                       obj,
                                       resp_chunk_size=1):
                exc.append(ClientException('test client exception'))
                raise exc[-1]

            sync.direct_get_object = fake_direct_get_object
            # Fail due to all direct_get_object calls failing
            self.assertFalse(
                cs.container_sync_row(
                    {
                        'deleted': False,
                        'name': 'object',
                        'created_at': '1.2'
                    }, 'http://sync/to/path', 'key',
                    FakeContainerBroker('broker'), {
                        'account': 'a',
                        'container': 'c'
                    }))
            self.assertEquals(cs.container_puts, 2)
            self.assertEquals(len(exc), 3)
            self.assertEquals(str(exc[-1]), 'test client exception')

            def fake_direct_get_object(node,
                                       part,
                                       account,
                                       container,
                                       obj,
                                       resp_chunk_size=1):
                return ({
                    'other-header': 'other header value',
                    'x-timestamp': '1.2',
                    'etag': '"etagvalue"'
                }, iter('contents'))

            def fake_put_object(sync_to,
                                name=None,
                                headers=None,
                                contents=None,
                                proxy=None):
                raise ClientException('test client exception', http_status=401)

            sync.direct_get_object = fake_direct_get_object
            sync.put_object = fake_put_object
            cs.logger = FakeLogger()
            # Fail due to 401
            self.assertFalse(
                cs.container_sync_row(
                    {
                        'deleted': False,
                        'name': 'object',
                        'created_at': '1.2'
                    }, 'http://sync/to/path', 'key',
                    FakeContainerBroker('broker'), {
                        'account': 'a',
                        'container': 'c'
                    }))
            self.assertEquals(cs.container_puts, 2)
            self.assert_(
                re.match('Unauth ', cs.logger.log_dict['info'][0][0][0]))

            def fake_put_object(sync_to,
                                name=None,
                                headers=None,
                                contents=None,
                                proxy=None):
                raise ClientException('test client exception', http_status=404)

            sync.put_object = fake_put_object
            # Fail due to 404
            cs.logger = FakeLogger()
            self.assertFalse(
                cs.container_sync_row(
                    {
                        'deleted': False,
                        'name': 'object',
                        'created_at': '1.2'
                    }, 'http://sync/to/path', 'key',
                    FakeContainerBroker('broker'), {
                        'account': 'a',
                        'container': 'c'
                    }))
            self.assertEquals(cs.container_puts, 2)
            self.assert_(
                re.match('Not found ', cs.logger.log_dict['info'][0][0][0]))

            def fake_put_object(sync_to,
                                name=None,
                                headers=None,
                                contents=None,
                                proxy=None):
                raise ClientException('test client exception', http_status=503)

            sync.put_object = fake_put_object
            # Fail due to 503
            self.assertFalse(
                cs.container_sync_row(
                    {
                        'deleted': False,
                        'name': 'object',
                        'created_at': '1.2'
                    }, 'http://sync/to/path', 'key',
                    FakeContainerBroker('broker'), {
                        'account': 'a',
                        'container': 'c'
                    }))
            self.assertEquals(cs.container_puts, 2)
            self.assertTrue(cs.logger.log_dict['exception'][0][0]
                            [0].startswith('ERROR Syncing '))
        finally:
            sync.shuffle = orig_shuffle
            sync.put_object = orig_put_object
            sync.direct_get_object = orig_direct_get_object
Esempio n. 24
0
    def test_run_once_recover_from_timeout(self):
        replicator = object_replicator.ObjectReplicator(
            dict(swift_dir=self.testdir,
                 devices=self.devices,
                 mount_check='false',
                 timeout='300',
                 stats_interval='1'))
        was_connector = object_replicator.http_connect
        was_get_hashes = object_replicator.get_hashes
        was_execute = tpool.execute
        self.get_hash_count = 0
        try:

            def fake_get_hashes(*args, **kwargs):
                self.get_hash_count += 1
                if self.get_hash_count == 3:
                    # raise timeout on last call to get hashes
                    raise Timeout()
                return 2, {'abc': 'def'}

            def fake_exc(tester, *args, **kwargs):
                if 'Error syncing partition' in args[0]:
                    tester.i_failed = True

            self.i_failed = False
            object_replicator.http_connect = mock_http_connect(200)
            object_replicator.get_hashes = fake_get_hashes
            replicator.logger.exception = \
                lambda *args, **kwargs: fake_exc(self, *args, **kwargs)
            # Write some files into '1' and run replicate- they should be moved
            # to the other partitions and then node should get deleted.
            cur_part = '1'
            df = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o',
                          FakeLogger())
            mkdirs(df.datadir)
            f = open(
                os.path.join(df.datadir,
                             normalize_timestamp(time.time()) + '.data'), 'wb')
            f.write('1234567890')
            f.close()
            ohash = hash_path('a', 'c', 'o')
            data_dir = ohash[-3:]
            whole_path_from = os.path.join(self.objects, cur_part, data_dir)
            process_arg_checker = []
            nodes = [node for node in
                     self.ring.get_part_nodes(int(cur_part)) \
                         if node['ip'] not in _ips()]
            for node in nodes:
                rsync_mod = '%s::object/sda/objects/%s' % (node['ip'],
                                                           cur_part)
                process_arg_checker.append(
                    (0, '', ['rsync', whole_path_from, rsync_mod]))
            self.assertTrue(
                os.access(os.path.join(self.objects, '1', data_dir, ohash),
                          os.F_OK))
            with _mock_process(process_arg_checker):
                replicator.run_once()
            self.assertFalse(process_errors)
            self.assertFalse(self.i_failed)
        finally:
            object_replicator.http_connect = was_connector
            object_replicator.get_hashes = was_get_hashes
            tpool.execute = was_execute
Esempio n. 25
0
    def _test_container_sync_row_put(self, realm, realm_key):
        orig_uuid = sync.uuid
        orig_shuffle = sync.shuffle
        orig_put_object = sync.put_object
        orig_direct_get_object = sync.direct_get_object
        try:
            class FakeUUID(object):
                class uuid4(object):
                    hex = 'abcdef'

            sync.uuid = FakeUUID
            sync.shuffle = lambda x: x
            fake_logger = FakeLogger()

            def fake_put_object(sync_to, name=None, headers=None,
                                contents=None, proxy=None, logger=None):
                self.assertEquals(sync_to, 'http://sync/to/path')
                self.assertEquals(name, 'object')
                if realm:
                    self.assertEqual(headers, {
                        'x-container-sync-auth':
                        'US abcdef ef62c64bb88a33fa00722daa23d5d43253164962',
                        'x-timestamp': '1.2',
                        'etag': 'etagvalue',
                        'other-header': 'other header value',
                        'content-type': 'text/plain'})
                else:
                    self.assertEquals(headers, {
                        'x-container-sync-key': 'key',
                        'x-timestamp': '1.2',
                        'other-header': 'other header value',
                        'etag': 'etagvalue',
                        'content-type': 'text/plain'})
                self.assertEquals(contents.read(), 'contents')
                self.assertEquals(proxy, 'http://proxy')
                self.assertEqual(logger, fake_logger)

            sync.put_object = fake_put_object

            cs = sync.ContainerSync({}, container_ring=FakeRing(),
                                    object_ring=FakeRing())
            cs.logger = fake_logger
            cs.http_proxies = ['http://proxy']

            def fake_direct_get_object(*args, **kwargs):
                return ({'other-header': 'other header value',
                         'etag': '"etagvalue"', 'x-timestamp': '1.2',
                         'content-type': 'text/plain; swift_bytes=123'},
                        iter('contents'))

            sync.direct_get_object = fake_direct_get_object
            # Success as everything says it worked
            self.assertTrue(cs.container_sync_row(
                {'deleted': False,
                 'name': 'object',
                 'created_at': '1.2'}, 'http://sync/to/path',
                'key', FakeContainerBroker('broker'), {
                    'account': 'a',
                    'container': 'c'}, realm, realm_key))
            self.assertEquals(cs.container_puts, 1)

            def fake_direct_get_object(*args, **kwargs):
                return ({'date': 'date value',
                         'last-modified': 'last modified value',
                         'x-timestamp': '1.2',
                         'other-header': 'other header value',
                         'etag': '"etagvalue"',
                         'content-type': 'text/plain; swift_bytes=123'},
                        iter('contents'))

            sync.direct_get_object = fake_direct_get_object
            # Success as everything says it worked, also checks 'date' and
            # 'last-modified' headers are removed and that 'etag' header is
            # stripped of double quotes.
            self.assertTrue(cs.container_sync_row(
                {'deleted': False,
                 'name': 'object',
                 'created_at': '1.2'}, 'http://sync/to/path',
                'key', FakeContainerBroker('broker'), {
                    'account': 'a',
                    'container': 'c'}, realm, realm_key))
            self.assertEquals(cs.container_puts, 2)

            exc = []

            def fake_direct_get_object(*args, **kwargs):
                exc.append(Exception('test exception'))
                raise exc[-1]

            sync.direct_get_object = fake_direct_get_object
            # Fail due to completely unexpected exception
            self.assertFalse(cs.container_sync_row(
                {'deleted': False,
                 'name': 'object',
                 'created_at': '1.2'}, 'http://sync/to/path',
                'key', FakeContainerBroker('broker'), {
                    'account': 'a',
                    'container': 'c'}, realm, realm_key))
            self.assertEquals(cs.container_puts, 2)
            self.assertEquals(len(exc), 3)
            self.assertEquals(str(exc[-1]), 'test exception')

            exc = []

            def fake_direct_get_object(*args, **kwargs):
                if len(exc) == 0:
                    exc.append(Exception('test other exception'))
                else:
                    exc.append(ClientException('test client exception'))
                raise exc[-1]

            sync.direct_get_object = fake_direct_get_object
            # Fail due to all direct_get_object calls failing
            self.assertFalse(cs.container_sync_row(
                {'deleted': False,
                 'name': 'object',
                 'created_at': '1.2'}, 'http://sync/to/path',
                'key', FakeContainerBroker('broker'), {
                    'account': 'a',
                    'container': 'c'}, realm, realm_key))
            self.assertEquals(cs.container_puts, 2)
            self.assertEquals(len(exc), 3)
            self.assertEquals(str(exc[-3]), 'test other exception')
            self.assertEquals(str(exc[-2]), 'test client exception')
            self.assertEquals(str(exc[-1]), 'test client exception')

            def fake_direct_get_object(*args, **kwargs):
                return ({'other-header': 'other header value',
                         'x-timestamp': '1.2', 'etag': '"etagvalue"'},
                        iter('contents'))

            def fake_put_object(*args, **kwargs):
                raise ClientException('test client exception', http_status=401)

            sync.direct_get_object = fake_direct_get_object
            sync.put_object = fake_put_object
            cs.logger = FakeLogger()
            # Fail due to 401
            self.assertFalse(cs.container_sync_row(
                {'deleted': False,
                 'name': 'object',
                 'created_at': '1.2'}, 'http://sync/to/path',
                'key', FakeContainerBroker('broker'), {
                    'account': 'a',
                    'container': 'c'}, realm, realm_key))
            self.assertEquals(cs.container_puts, 2)
            self.assert_(re.match('Unauth ',
                                  cs.logger.log_dict['info'][0][0][0]))

            def fake_put_object(*args, **kwargs):
                raise ClientException('test client exception', http_status=404)

            sync.put_object = fake_put_object
            # Fail due to 404
            cs.logger = FakeLogger()
            self.assertFalse(cs.container_sync_row(
                {'deleted': False,
                 'name': 'object',
                 'created_at': '1.2'}, 'http://sync/to/path',
                'key', FakeContainerBroker('broker'), {
                    'account': 'a',
                    'container': 'c'}, realm, realm_key))
            self.assertEquals(cs.container_puts, 2)
            self.assert_(re.match('Not found ',
                                  cs.logger.log_dict['info'][0][0][0]))

            def fake_put_object(*args, **kwargs):
                raise ClientException('test client exception', http_status=503)

            sync.put_object = fake_put_object
            # Fail due to 503
            self.assertFalse(cs.container_sync_row(
                {'deleted': False,
                 'name': 'object',
                 'created_at': '1.2'}, 'http://sync/to/path',
                'key', FakeContainerBroker('broker'), {
                    'account': 'a',
                    'container': 'c'}, realm, realm_key))
            self.assertEquals(cs.container_puts, 2)
            self.assertTrue(
                cs.logger.log_dict['exception'][0][0][0].startswith(
                    'ERROR Syncing '))
        finally:
            sync.uuid = orig_uuid
            sync.shuffle = orig_shuffle
            sync.put_object = orig_put_object
            sync.direct_get_object = orig_direct_get_object
Esempio n. 26
0
    def _test_container_sync_row_delete(self, realm, realm_key):
        orig_uuid = sync.uuid
        orig_delete_object = sync.delete_object
        try:
            class FakeUUID(object):
                class uuid4(object):
                    hex = 'abcdef'

            sync.uuid = FakeUUID
            fake_logger = FakeLogger()

            def fake_delete_object(path, name=None, headers=None, proxy=None,
                                   logger=None):
                self.assertEquals(path, 'http://sync/to/path')
                self.assertEquals(name, 'object')
                if realm:
                    self.assertEquals(headers, {
                        'x-container-sync-auth':
                        'US abcdef 90e95aabb45a6cdc0892a3db5535e7f918428c90',
                        'x-timestamp': '1.2'})
                else:
                    self.assertEquals(
                        headers,
                        {'x-container-sync-key': 'key', 'x-timestamp': '1.2'})
                self.assertEquals(proxy, 'http://proxy')
                self.assertEqual(logger, fake_logger)

            sync.delete_object = fake_delete_object
            cs = sync.ContainerSync({}, container_ring=FakeRing(),
                                    object_ring=FakeRing())
            cs.logger = fake_logger
            cs.http_proxies = ['http://proxy']
            # Success
            self.assertTrue(cs.container_sync_row(
                {'deleted': True,
                 'name': 'object',
                 'created_at': '1.2'}, 'http://sync/to/path',
                'key', FakeContainerBroker('broker'), 'info', realm,
                realm_key))
            self.assertEquals(cs.container_deletes, 1)

            exc = []

            def fake_delete_object(*args, **kwargs):
                exc.append(Exception('test exception'))
                raise exc[-1]

            sync.delete_object = fake_delete_object
            # Failure because of delete_object exception
            self.assertFalse(cs.container_sync_row(
                {'deleted': True,
                 'name': 'object',
                 'created_at': '1.2'}, 'http://sync/to/path',
                'key', FakeContainerBroker('broker'), 'info', realm,
                realm_key))
            self.assertEquals(cs.container_deletes, 1)
            self.assertEquals(len(exc), 1)
            self.assertEquals(str(exc[-1]), 'test exception')

            def fake_delete_object(*args, **kwargs):
                exc.append(ClientException('test client exception'))
                raise exc[-1]

            sync.delete_object = fake_delete_object
            # Failure because of delete_object exception
            self.assertFalse(cs.container_sync_row(
                {'deleted': True,
                 'name': 'object',
                 'created_at': '1.2'}, 'http://sync/to/path',
                'key', FakeContainerBroker('broker'), 'info', realm,
                realm_key))
            self.assertEquals(cs.container_deletes, 1)
            self.assertEquals(len(exc), 2)
            self.assertEquals(str(exc[-1]), 'test client exception')

            def fake_delete_object(*args, **kwargs):
                exc.append(ClientException('test client exception',
                                           http_status=404))
                raise exc[-1]

            sync.delete_object = fake_delete_object
            # Success because the object wasn't even found
            self.assertTrue(cs.container_sync_row(
                {'deleted': True,
                 'name': 'object',
                 'created_at': '1.2'}, 'http://sync/to/path',
                'key', FakeContainerBroker('broker'), 'info', realm,
                realm_key))
            self.assertEquals(cs.container_deletes, 2)
            self.assertEquals(len(exc), 3)
            self.assertEquals(str(exc[-1]), 'test client exception: 404')
        finally:
            sync.uuid = orig_uuid
            sync.delete_object = orig_delete_object
Esempio n. 27
0
    def _get_disk_file(self,
                       invalid_type=None,
                       obj_name='o',
                       fsize=1024,
                       csize=8,
                       mark_deleted=False,
                       ts=None,
                       iter_hook=None,
                       mount_check=False,
                       extra_metadata=None):
        '''returns a DiskFile'''
        df = diskfile.DiskFile(self.testdir, 'sda1', '0', 'a', 'c', obj_name,
                               FakeLogger())
        data = '0' * fsize
        etag = md5()
        if ts:
            timestamp = ts
        else:
            timestamp = str(normalize_timestamp(time()))
        with df.writer() as writer:
            writer.write(data)
            etag.update(data)
            etag = etag.hexdigest()
            metadata = {
                'ETag': etag,
                'X-Timestamp': timestamp,
                'Content-Length': str(os.fstat(writer.fd).st_size),
            }
            metadata.update(extra_metadata or {})
            writer.put(metadata)
            if invalid_type == 'ETag':
                etag = md5()
                etag.update('1' + '0' * (fsize - 1))
                etag = etag.hexdigest()
                metadata['ETag'] = etag
                diskfile.write_metadata(writer.fd, metadata)
            if invalid_type == 'Content-Length':
                metadata['Content-Length'] = fsize - 1
                diskfile.write_metadata(writer.fd, metadata)

        if mark_deleted:
            metadata = {'X-Timestamp': timestamp, 'deleted': True}
            df.put_metadata(metadata, tombstone=True)

        df = diskfile.DiskFile(self.testdir,
                               'sda1',
                               '0',
                               'a',
                               'c',
                               obj_name,
                               FakeLogger(),
                               keep_data_fp=True,
                               disk_chunk_size=csize,
                               iter_hook=iter_hook,
                               mount_check=mount_check)
        if invalid_type == 'Zero-Byte':
            os.remove(df.data_file)
            fp = open(df.data_file, 'w')
            fp.close()
        df.unit_test_len = fsize
        return df
Esempio n. 28
0
    def _test_ondisk_data_after_write_with_crypto(self, policy_name):
        policy = storage_policy.POLICIES.get_by_name(policy_name)
        self._create_container(self.proxy_app, policy_name=policy_name)
        self._put_object(self.crypto_app, self.plaintext)
        self._post_object(self.crypto_app)

        # Verify container listing etag is encrypted by direct GET to container
        # server. We can use any server for all nodes since they all share same
        # devices dir.
        cont_server = self._test_context['test_servers'][3]
        cont_ring = Ring(self._test_context['testdir'], ring_name='container')
        part, nodes = cont_ring.get_nodes('a', self.container_name)
        for node in nodes:
            req = Request.blank('/%s/%s/a/%s' %
                                (node['device'], part, self.container_name),
                                method='GET',
                                query_string='format=json')
            resp = req.get_response(cont_server)
            listing = json.loads(resp.body)
            # sanity checks...
            self.assertEqual(1, len(listing))
            self.assertEqual('o', listing[0]['name'])
            self.assertEqual('application/test', listing[0]['content_type'])
            # verify encrypted etag value
            parts = listing[0]['hash'].rsplit(';', 1)
            crypto_meta_param = parts[1].strip()
            crypto_meta = crypto_meta_param[len('swift_meta='):]
            listing_etag_iv = load_crypto_meta(crypto_meta)['iv']
            exp_enc_listing_etag = base64.b64encode(
                encrypt(self.plaintext_etag.encode('ascii'),
                        self.km.create_key('/a/%s' % self.container_name),
                        listing_etag_iv)).decode('ascii')
            self.assertEqual(exp_enc_listing_etag, parts[0])

        # Verify diskfile data and metadata is encrypted
        ring_object = self.proxy_app.get_object_ring(int(policy))
        partition, nodes = ring_object.get_nodes('a', self.container_name, 'o')
        conf = {
            'devices': self._test_context["testdir"],
            'mount_check': 'false'
        }
        df_mgr = diskfile.DiskFileRouter(conf, FakeLogger())[policy]
        ondisk_data = []
        exp_enc_body = None
        for node_index, node in enumerate(nodes):
            df = df_mgr.get_diskfile(node['device'],
                                     partition,
                                     'a',
                                     self.container_name,
                                     'o',
                                     policy=policy)
            with df.open():
                meta = df.get_metadata()
                contents = b''.join(df.reader())
                metadata = dict((k.lower(), v) for k, v in meta.items())
                # verify on disk data - body
                body_iv = load_crypto_meta(
                    metadata['x-object-sysmeta-crypto-body-meta'])['iv']
                body_key_meta = load_crypto_meta(
                    metadata['x-object-sysmeta-crypto-body-meta'])['body_key']
                obj_key = self.km.create_key('/a/%s/o' % self.container_name)
                body_key = Crypto().unwrap_key(obj_key, body_key_meta)
                exp_enc_body = encrypt(self.plaintext, body_key, body_iv)
                ondisk_data.append((node, contents))

                # verify on disk user metadata
                enc_val, meta = metadata[
                    'x-object-transient-sysmeta-crypto-meta-fruit'].split(';')
                meta = meta.strip()[len('swift_meta='):]
                metadata_iv = load_crypto_meta(meta)['iv']
                exp_enc_meta = base64.b64encode(
                    encrypt(b'Kiwi', obj_key, metadata_iv)).decode('ascii')
                self.assertEqual(exp_enc_meta, enc_val)
                self.assertNotIn('x-object-meta-fruit', metadata)

                self.assertIn('x-object-transient-sysmeta-crypto-meta',
                              metadata)
                meta = load_crypto_meta(
                    metadata['x-object-transient-sysmeta-crypto-meta'])
                self.assertIn('key_id', meta)
                self.assertIn('path', meta['key_id'])
                self.assertEqual(
                    '/a/%s/%s' % (self.container_name, self.object_name),
                    meta['key_id']['path'])
                self.assertIn('v', meta['key_id'])
                self.assertEqual('2', meta['key_id']['v'])
                self.assertIn('cipher', meta)
                self.assertEqual(Crypto.cipher, meta['cipher'])

                # verify etag
                actual_enc_etag, _junk, actual_etag_meta = metadata[
                    'x-object-sysmeta-crypto-etag'].partition('; swift_meta=')
                etag_iv = load_crypto_meta(actual_etag_meta)['iv']
                exp_enc_etag = base64.b64encode(
                    encrypt(self.plaintext_etag.encode('ascii'), obj_key,
                            etag_iv)).decode('ascii')
                self.assertEqual(exp_enc_etag, actual_enc_etag)

                # verify etag hmac
                exp_etag_mac = hmac.new(obj_key,
                                        self.plaintext_etag.encode('ascii'),
                                        digestmod=hashlib.sha256).digest()
                exp_etag_mac = base64.b64encode(exp_etag_mac).decode('ascii')
                self.assertEqual(exp_etag_mac,
                                 metadata['x-object-sysmeta-crypto-etag-mac'])

                # verify etag override for container updates
                override = 'x-object-sysmeta-container-update-override-etag'
                parts = metadata[override].rsplit(';', 1)
                crypto_meta_param = parts[1].strip()
                crypto_meta = crypto_meta_param[len('swift_meta='):]
                listing_etag_iv = load_crypto_meta(crypto_meta)['iv']
                cont_key = self.km.create_key('/a/%s' % self.container_name)
                exp_enc_listing_etag = base64.b64encode(
                    encrypt(self.plaintext_etag.encode('ascii'), cont_key,
                            listing_etag_iv)).decode('ascii')
                self.assertEqual(exp_enc_listing_etag, parts[0])

        self._check_GET_and_HEAD(self.crypto_app)
        return exp_enc_body, ondisk_data
Esempio n. 29
0
    def setUp(self):
        self.testdir = os.path.join(mkdtemp(), 'tmp_test_object_auditor')
        self.devices = os.path.join(self.testdir, 'node')
        self.rcache = os.path.join(self.testdir, 'object.recon')
        self.logger = FakeLogger()
        rmtree(self.testdir, ignore_errors=1)
        mkdirs(os.path.join(self.devices, 'sda'))
        os.mkdir(os.path.join(self.devices, 'sdb'))

        # policy 0
        self.objects = os.path.join(self.devices, 'sda',
                                    get_data_dir(POLICIES[0]))
        self.objects_2 = os.path.join(self.devices, 'sdb',
                                      get_data_dir(POLICIES[0]))
        os.mkdir(self.objects)
        # policy 1
        self.objects_p1 = os.path.join(self.devices, 'sda',
                                       get_data_dir(POLICIES[1]))
        self.objects_2_p1 = os.path.join(self.devices, 'sdb',
                                         get_data_dir(POLICIES[1]))
        os.mkdir(self.objects_p1)
        # policy 2
        self.objects_p2 = os.path.join(self.devices, 'sda',
                                       get_data_dir(POLICIES[2]))
        self.objects_2_p2 = os.path.join(self.devices, 'sdb',
                                         get_data_dir(POLICIES[2]))
        os.mkdir(self.objects_p2)

        self.parts = {}
        self.parts_p1 = {}
        self.parts_p2 = {}
        for part in ['0', '1', '2', '3']:
            self.parts[part] = os.path.join(self.objects, part)
            self.parts_p1[part] = os.path.join(self.objects_p1, part)
            self.parts_p2[part] = os.path.join(self.objects_p2, part)
            os.mkdir(os.path.join(self.objects, part))
            os.mkdir(os.path.join(self.objects_p1, part))
            os.mkdir(os.path.join(self.objects_p2, part))

        self.conf = dict(devices=self.devices,
                         mount_check='false',
                         object_size_stats='10,100,1024,10240')
        self.df_mgr = DiskFileManager(self.conf, self.logger)
        self.ec_df_mgr = ECDiskFileManager(self.conf, self.logger)

        # diskfiles for policy 0, 1, 2
        self.disk_file = self.df_mgr.get_diskfile('sda',
                                                  '0',
                                                  'a',
                                                  'c',
                                                  'o',
                                                  policy=POLICIES[0])
        self.disk_file_p1 = self.df_mgr.get_diskfile('sda',
                                                     '0',
                                                     'a',
                                                     'c',
                                                     'o',
                                                     policy=POLICIES[1])
        self.disk_file_ec = self.ec_df_mgr.get_diskfile('sda',
                                                        '0',
                                                        'a',
                                                        'c',
                                                        'o',
                                                        policy=POLICIES[2],
                                                        frag_index=1)
Esempio n. 30
0
 def test_disk_file_mkstemp_creates_dir(self):
     tmpdir = os.path.join(self.testdir, 'sda1', 'tmp')
     os.rmdir(tmpdir)
     with diskfile.DiskFile(self.testdir, 'sda1', '0', 'a', 'c', 'o',
                            FakeLogger()).writer():
         self.assert_(os.path.exists(tmpdir))