示例#1
0
    def RunCommand(self):
        """Command entry point for the version command."""
        long_form = False
        if self.sub_opts:
            for o, _ in self.sub_opts:
                if o == '-l':
                    long_form = True

        config_paths = ', '.join(GetFriendlyConfigFilePaths())

        shipped_checksum = gslib.CHECKSUM
        try:
            cur_checksum = self._ComputeCodeChecksum()
        except IOError:
            cur_checksum = 'MISSING FILES'
        if shipped_checksum == cur_checksum:
            checksum_ok_str = 'OK'
        else:
            checksum_ok_str = '!= %s' % shipped_checksum

        sys.stdout.write('gsutil version: %s\n' % gslib.VERSION)

        if long_form:

            long_form_output = (
                'checksum: {checksum} ({checksum_ok})\n'
                'boto version: {boto_version}\n'
                'python version: {python_version}\n'
                'OS: {os_version}\n'
                'multiprocessing available: {multiprocessing_available}\n'
                'using cloud sdk: {cloud_sdk}\n'
                'pass cloud sdk credentials to gsutil: {cloud_sdk_credentials}\n'
                'config path(s): {config_paths}\n'
                'gsutil path: {gsutil_path}\n'
                'compiled crcmod: {compiled_crcmod}\n'
                'installed via package manager: {is_package_install}\n'
                'editable install: {is_editable_install}\n')

            sys.stdout.write(
                long_form_output.format(
                    checksum=cur_checksum,
                    checksum_ok=checksum_ok_str,
                    boto_version=boto.__version__,
                    python_version=sys.version.replace('\n', ''),
                    os_version='%s %s' %
                    (platform.system(), platform.release()),
                    multiprocessing_available=(
                        CheckMultiprocessingAvailableAndInit().is_available),
                    cloud_sdk=system_util.InvokedViaCloudSdk(),
                    cloud_sdk_credentials=system_util.
                    CloudSdkCredPassingEnabled(),
                    config_paths=config_paths,
                    gsutil_path=GetCloudSdkGsutilWrapperScriptPath()
                    or gslib.GSUTIL_PATH,
                    compiled_crcmod=UsingCrcmodExtension(crcmod),
                    is_package_install=gslib.IS_PACKAGE_INSTALL,
                    is_editable_install=gslib.IS_EDITABLE_INSTALL,
                ))

        return 0
示例#2
0
  def Wrapper(*args, **kwargs):
    # Run the test normally once.
    func(*args, **kwargs)

    if not RUN_S3_TESTS and UsingCrcmodExtension(crcmod):
      # Try again, forcing parallel upload and sliced download.
      with SetBotoConfigForTest([
          ('GSUtil', 'parallel_composite_upload_threshold', '1'),
          ('GSUtil', 'sliced_object_download_threshold', '1'),
          ('GSUtil', 'sliced_object_download_max_components', '3'),
          ('GSUtil', 'check_hashes', 'always')]):
        func(*args, **kwargs)
def GetDownloadHashAlgs(logger, consider_md5=False, consider_crc32c=False):
    """Returns a dict of hash algorithms for validating an object.

  Args:
    logger: logging.Logger for outputting log messages.
    consider_md5: If True, consider using a md5 hash.
    consider_crc32c: If True, consider using a crc32c hash.

  Returns:
    Dict of (string, hash algorithm).

  Raises:
    CommandException if hash algorithms satisfying the boto config file
    cannot be returned.
  """
    check_hashes_config = config.get('GSUtil', 'check_hashes',
                                     CHECK_HASH_IF_FAST_ELSE_FAIL)
    if check_hashes_config == CHECK_HASH_NEVER:
        return {}

    hash_algs = {}
    if consider_md5:
        hash_algs['md5'] = md5
    elif consider_crc32c:
        # If the cloud provider supplies a CRC, we'll compute a checksum to
        # validate if we're using a native crcmod installation and MD5 isn't
        # offered as an alternative.
        if UsingCrcmodExtension():
            hash_algs['crc32c'] = lambda: crcmod.predefined.Crc('crc-32c')
        elif not hash_algs:
            if check_hashes_config == CHECK_HASH_IF_FAST_ELSE_FAIL:
                raise CommandException(_SLOW_CRC_EXCEPTION_TEXT)
            elif check_hashes_config == CHECK_HASH_IF_FAST_ELSE_SKIP:
                logger.warn(_NO_HASH_CHECK_WARNING)
            elif check_hashes_config == CHECK_HASH_ALWAYS:
                logger.warn(_SLOW_CRCMOD_DOWNLOAD_WARNING)
                hash_algs['crc32c'] = lambda: crcmod.predefined.Crc('crc-32c')
            else:
                raise CommandException(
                    'Your boto config \'check_hashes\' option is misconfigured.'
                )

    return hash_algs
class TestMvE2ETests(testcase.GsUtilIntegrationTestCase):
    """Integration tests for mv command."""
    def test_moving(self):
        """Tests moving two buckets, one with 2 objects and one with 0 objects."""
        bucket1_uri = self.CreateBucket(test_objects=2)
        self.AssertNObjectsInBucket(bucket1_uri, 2)
        bucket2_uri = self.CreateBucket()
        self.AssertNObjectsInBucket(bucket2_uri, 0)

        # Move two objects from bucket1 to bucket2.
        objs = [
            self.StorageUriCloneReplaceKey(bucket1_uri, key).versionless_uri
            for key in bucket1_uri.list_bucket()
        ]
        cmd = (['-m', 'mv'] + objs + [suri(bucket2_uri)])
        stderr = self.RunGsUtil(cmd, return_stderr=True)
        # Rewrite API may output an additional 'Copying' progress notification.
        self.assertGreaterEqual(
            stderr.count('Copying'), 2,
            'stderr did not contain 2 "Copying" lines:\n%s' % stderr)
        self.assertLessEqual(
            stderr.count('Copying'), 4,
            'stderr did not contain <= 4 "Copying" lines:\n%s' % stderr)
        self.assertEqual(
            stderr.count('Copying') % 2, 0,
            'stderr did not contain even number of "Copying" lines:\n%s' %
            stderr)
        self.assertEqual(
            stderr.count('Removing'), 2,
            'stderr did not contain 2 "Removing" lines:\n%s' % stderr)

        self.AssertNObjectsInBucket(bucket1_uri, 0)
        self.AssertNObjectsInBucket(bucket2_uri, 2)

        # Remove one of the objects.
        objs = [
            self.StorageUriCloneReplaceKey(bucket2_uri, key).versionless_uri
            for key in bucket2_uri.list_bucket()
        ]
        obj1 = objs[0]
        self.RunGsUtil(['rm', obj1])

        self.AssertNObjectsInBucket(bucket1_uri, 0)
        self.AssertNObjectsInBucket(bucket2_uri, 1)

        # Move the 1 remaining object back.
        objs = [
            suri(self.StorageUriCloneReplaceKey(bucket2_uri, key))
            for key in bucket2_uri.list_bucket()
        ]
        cmd = (['-m', 'mv'] + objs + [suri(bucket1_uri)])
        stderr = self.RunGsUtil(cmd, return_stderr=True)
        # Rewrite API may output an additional 'Copying' progress notification.
        self.assertGreaterEqual(
            stderr.count('Copying'), 1,
            'stderr did not contain >= 1 "Copying" lines:\n%s' % stderr)
        self.assertLessEqual(
            stderr.count('Copying'), 2,
            'stderr did not contain <= 2 "Copying" lines:\n%s' % stderr)
        self.assertEqual(stderr.count('Removing'), 1)

        self.AssertNObjectsInBucket(bucket1_uri, 1)
        self.AssertNObjectsInBucket(bucket2_uri, 0)

    def test_move_bucket_to_dir(self):
        """Tests moving a local directory to a bucket."""
        bucket_uri = self.CreateBucket(test_objects=2)
        self.AssertNObjectsInBucket(bucket_uri, 2)
        tmpdir = self.CreateTempDir()
        self.RunGsUtil(['mv', suri(bucket_uri, '*'), tmpdir])
        dir_list = []
        for dirname, _, filenames in os.walk(tmpdir):
            for filename in filenames:
                dir_list.append(os.path.join(dirname, filename))
        self.assertEqual(len(dir_list), 2)
        self.AssertNObjectsInBucket(bucket_uri, 0)

    def test_move_dir_to_bucket(self):
        """Tests moving a local directory to a bucket."""
        bucket_uri = self.CreateBucket()
        dir_to_move = self.CreateTempDir(test_files=2)
        self.RunGsUtil(['mv', dir_to_move, suri(bucket_uri)])
        self.AssertNObjectsInBucket(bucket_uri, 2)

    @SequentialAndParallelTransfer
    def test_stdin_args(self):
        """Tests mv with the -I option."""
        tmpdir = self.CreateTempDir()
        fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents=b'data1')
        fpath2 = self.CreateTempFile(tmpdir=tmpdir, contents=b'data2')
        bucket_uri = self.CreateBucket()
        self.RunGsUtil(['mv', '-I', suri(bucket_uri)],
                       stdin='\n'.join((fpath1, fpath2)))

        # Use @Retry as hedge against bucket listing eventual consistency.
        @Retry(AssertionError, tries=3, timeout_secs=1)
        def _Check1():
            stdout = self.RunGsUtil(['ls', suri(bucket_uri)],
                                    return_stdout=True)
            self.assertIn(os.path.basename(fpath1), stdout)
            self.assertIn(os.path.basename(fpath2), stdout)
            self.assertNumLines(stdout, 2)

        _Check1()

    def test_mv_no_clobber(self):
        """Tests mv with the -n option."""
        fpath1 = self.CreateTempFile(contents=b'data1')
        bucket_uri = self.CreateBucket()
        object_uri = self.CreateObject(bucket_uri=bucket_uri,
                                       contents=b'data2')
        stderr = self.RunGsUtil(
            ['mv', '-n', fpath1, suri(object_uri)], return_stderr=True)
        # Copy should be skipped and source file should not be removed.
        self.assertIn('Skipping existing item: %s' % suri(object_uri), stderr)
        self.assertNotIn('Removing %s' % suri(fpath1), stderr)
        # Object content should be unchanged.
        contents = self.RunGsUtil(['cat', suri(object_uri)],
                                  return_stdout=True)
        self.assertEqual(contents, 'data2')

    @unittest.skipIf(IS_WINDOWS, 'POSIX attributes not available on Windows.')
    @unittest.skipUnless(UsingCrcmodExtension(), 'Test requires fast crcmod.')
    def test_mv_preserve_posix_bucket_to_dir_no_errors(self):
        """Tests use of the -P flag with mv from a bucket to a local dir.

    Specifically tests combinations of POSIX attributes in metadata that will
    pass validation.
    """
        bucket_uri = self.CreateBucket()
        tmpdir = self.CreateTempDir()
        TestCpMvPOSIXBucketToLocalNoErrors(self,
                                           bucket_uri,
                                           tmpdir,
                                           is_cp=False)

    @unittest.skipIf(IS_WINDOWS, 'POSIX attributes not available on Windows.')
    def test_mv_preserve_posix_bucket_to_dir_errors(self):
        """Tests use of the -P flag with mv from a bucket to a local dir.

    Specifically, combinations of POSIX attributes in metadata that will fail
    validation.
    """
        bucket_uri = self.CreateBucket()
        tmpdir = self.CreateTempDir()

        obj = self.CreateObject(bucket_uri=bucket_uri,
                                object_name='obj',
                                contents=b'obj')
        TestCpMvPOSIXBucketToLocalErrors(self,
                                         bucket_uri,
                                         obj,
                                         tmpdir,
                                         is_cp=False)

    @unittest.skipIf(IS_WINDOWS, 'POSIX attributes not available on Windows.')
    def test_mv_preseve_posix_dir_to_bucket_no_errors(self):
        """Tests use of the -P flag with mv from a local dir to a bucket."""
        bucket_uri = self.CreateBucket()
        TestCpMvPOSIXLocalToBucketNoErrors(self, bucket_uri, is_cp=False)

    @SkipForS3('Test is only relevant for gs storage classes.')
    def test_mv_early_deletion_warning(self):
        """Tests that mv on a recent nearline object warns about early deletion."""
        if self.test_api == ApiSelector.XML:
            return unittest.skip('boto does not return object storage class')

        bucket_uri = self.CreateBucket(storage_class='NEARLINE')
        object_uri = self.CreateObject(bucket_uri=bucket_uri, contents=b'obj')
        stderr = self.RunGsUtil(
            ['mv', suri(object_uri),
             suri(bucket_uri, 'foo')],
            return_stderr=True)
        self.assertIn(
            'Warning: moving nearline object %s may incur an early deletion '
            'charge, because the original object is less than 30 days old '
            'according to the local system time.' % suri(object_uri), stderr)

    def test_move_bucket_objects_with_duplicate_names_to_dir(self):
        """Tests moving multiple top-level items to a bucket."""
        bucket_uri = self.CreateBucket()
        self.CreateObject(bucket_uri=bucket_uri,
                          object_name='dir1/file.txt',
                          contents=b'data')
        self.CreateObject(bucket_uri=bucket_uri,
                          object_name='dir2/file.txt',
                          contents=b'data')
        self.AssertNObjectsInBucket(bucket_uri, 2)

        tmpdir = self.CreateTempDir()
        self.RunGsUtil(['mv', suri(bucket_uri, '*'), tmpdir])

        file_list = []
        for dirname, _, filenames in os.walk(tmpdir):
            for filename in filenames:
                file_list.append(os.path.join(dirname, filename))
        self.assertEqual(len(file_list), 2)
        self.assertIn('{}{}dir1{}file.txt'.format(tmpdir, os.sep, os.sep),
                      file_list)
        self.assertIn('{}{}dir2{}file.txt'.format(tmpdir, os.sep, os.sep),
                      file_list)
        self.AssertNObjectsInBucket(bucket_uri, 0)