def test_local_directory(self):
     """
     Generate an entire local directory.
     """
     input_local_dir = {'src': {'path': self.local_dir,
                                'type': 'local'},
                        'dest': {'path': 'bucket/',
                                 'type': 's3'},
                        'dir_op': True, 'use_src_name': True}
     params = {'region': 'us-east-1'}
     files = FileGenerator(self.service,
                           self.endpoint,'').call(input_local_dir)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt',
                          compare_key='text1.txt', size=size,
                          last_update=last_update, src_type='local',
                          dest_type='s3', operation_name='',
                          service=None, endpoint=None)
     path = self.local_dir + 'another_directory' + os.sep \
         + 'text2.txt'
     size, last_update = get_file_stat(path)
     file_info2 = FileInfo(src=path,
                           dest='bucket/another_directory/text2.txt',
                           compare_key='another_directory/text2.txt',
                           size=size, last_update=last_update,
                           src_type='local',
                           dest_type='s3', operation_name='',
                           service=None, endpoint=None)
     ref_list = [file_info2, file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
 def test_local_directory(self):
     """
     Generate an entire local directory.
     """
     input_local_dir = {'src': {'path': self.local_dir,
                                'type': 'local'},
                        'dest': {'path': 'bucket/',
                                 'type': 's3'},
                        'dir_op': True, 'use_src_name': True}
     params = {'region': 'us-east-1'}
     files = FileGenerator(self.service, self.endpoint,
                           '', params).call(input_local_dir)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt',
                          compare_key='text1.txt', size=size,
                          last_update=last_update, src_type='local',
                          dest_type='s3', operation_name='',
                          service=None, endpoint=None)
     path = self.local_dir + 'another_directory' + os.sep \
         + 'text2.txt'
     size, last_update = get_file_stat(path)
     file_info2 = FileInfo(src=path,
                           dest='bucket/another_directory/text2.txt',
                           compare_key='another_directory/text2.txt',
                           size=size, last_update=last_update,
                           src_type='local',
                           dest_type='s3', operation_name='',
                           service=None, endpoint=None)
     ref_list = [file_info2, file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
示例#3
0
 def test_get_file_stat_error_message(self):
     patch_attribute = 'awscli.customizations.s3.utils.datetime'
     with mock.patch(patch_attribute) as f:
         with mock.patch('os.stat'):
             f.fromtimestamp.side_effect = ValueError(
                 "timestamp out of range for platform "
                 "localtime()/gmtime() function")
             with self.assertRaisesRegexp(ValueError, 'myfilename\.txt'):
                 get_file_stat('myfilename.txt')
示例#4
0
 def test_get_file_stat_error_message(self):
     patch_attribute = 'awscli.customizations.s3.utils.datetime'
     with mock.patch(patch_attribute) as f:
         with mock.patch('os.stat'):
             f.fromtimestamp.side_effect = ValueError(
                 "timestamp out of range for platform "
                 "localtime()/gmtime() function")
             with self.assertRaisesRegexp(ValueError, 'myfilename\.txt'):
                 get_file_stat('myfilename.txt')
示例#5
0
    def list_files(self, path, dir_op):
        """
        This function yields the appropriate local file or local files
        under a directory depending on if the operation is on a directory.
        For directories a depth first search is implemented in order to
        follow the same sorted pattern as a s3 list objects operation
        outputs.  It yields the file's source path, size, and last
        update
        """
        join, isdir, isfile = os.path.join, os.path.isdir, os.path.isfile
        error, listdir = os.error, os.listdir
        if not self.should_ignore_file(path):
            if not dir_op:
                size, last_update = get_file_stat(path)
                last_update = self._validate_update_time(last_update, path)
                yield path, {'Size': size, 'LastModified': last_update}

            else:
                # We need to list files in byte order based on the full
                # expanded path of the key: 'test/1/2/3.txt'  However,
                # listdir() will only give us contents a single directory
                # at a time, so we'll get 'test'.  At the same time we don't
                # want to load the entire list of files into memory.  This
                # is handled by first going through the current directory
                # contents and adding the directory separator to any
                # directories.  We can then sort the contents,
                # and ensure byte order.
                listdir_names = listdir(path)
                names = []
                for name in listdir_names:
                    if not self.should_ignore_file_with_decoding_warnings(
                            path, name):
                        file_path = join(path, name)
                        if isdir(file_path):
                            name = name + os.path.sep
                        names.append(name)
                self.normalize_sort(names, os.sep, '/')
                for name in names:
                    file_path = join(path, name)
                    if isdir(file_path):
                        # Anything in a directory will have a prefix of
                        # this current directory and will come before the
                        # remaining contents in this directory.  This
                        # means we need to recurse into this sub directory
                        # before yielding the rest of this directory's
                        # contents.
                        for x in self.list_files(file_path, dir_op):
                            yield x
                    else:
                        size, last_update = get_file_stat(file_path)
                        last_update = self._validate_update_time(
                            last_update, path)
                        yield (file_path, {
                            'Size': size,
                            'LastModified': last_update
                        })
示例#6
0
    def list_files(self, path, dir_op):
        """
        This function yields the appropriate local file or local files
        under a directory depending on if the operation is on a directory.
        For directories a depth first search is implemented in order to
        follow the same sorted pattern as a s3 list objects operation
        outputs.  It yields the file's source path, size, and last
        update
        """
        join, isdir, isfile = os.path.join, os.path.isdir, os.path.isfile
        error, listdir = os.error, os.listdir
        if not self.should_ignore_file(path):
            if not dir_op:
                size, last_update = get_file_stat(path)
                last_update = self._validate_update_time(last_update, path)
                yield path, {'Size': size, 'LastModified': last_update}

            else:
                # We need to list files in byte order based on the full
                # expanded path of the key: 'test/1/2/3.txt'  However,
                # listdir() will only give us contents a single directory
                # at a time, so we'll get 'test'.  At the same time we don't
                # want to load the entire list of files into memory.  This
                # is handled by first going through the current directory
                # contents and adding the directory separator to any
                # directories.  We can then sort the contents,
                # and ensure byte order.
                listdir_names = listdir(path)
                names = []
                for name in listdir_names:
                    if not self.should_ignore_file_with_decoding_warnings(
                            path, name):
                        file_path = join(path, name)
                        if isdir(file_path):
                            name = name + os.path.sep
                        names.append(name)
                self.normalize_sort(names, os.sep, '/')
                for name in names:
                    file_path = join(path, name)
                    if isdir(file_path):
                        # Anything in a directory will have a prefix of
                        # this current directory and will come before the
                        # remaining contents in this directory.  This
                        # means we need to recurse into this sub directory
                        # before yielding the rest of this directory's
                        # contents.
                        for x in self.list_files(file_path, dir_op):
                            yield x
                    else:
                        size, last_update = get_file_stat(file_path)
                        last_update = self._validate_update_time(
                            last_update, path)
                        yield (
                            file_path,
                            {'Size': size, 'LastModified': last_update}
                        )
 def test_successfully_sets_utime(self):
     now = datetime.datetime.now(tzlocal())
     epoch_now = time.mktime(now.timetuple())
     with temporary_file('w') as f:
         set_file_utime(f.name, epoch_now)
         _, update_time = get_file_stat(f.name)
         self.assertEqual(time.mktime(update_time.timetuple()), epoch_now)
示例#8
0
 def test_local_file(self):
     """
     Generate a single local file.
     """
     input_local_file = {
         "src": {"path": self.local_file, "type": "local"},
         "dest": {"path": "bucket/text1.txt", "type": "s3"},
         "dir_op": False,
         "use_src_name": False,
     }
     params = {"region": "us-east-1"}
     files = FileGenerator(self.service, self.endpoint, "", params).call(input_local_file)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(
         src=self.local_file,
         dest="bucket/text1.txt",
         compare_key="text1.txt",
         size=size,
         last_update=last_update,
         src_type="local",
         dest_type="s3",
         operation_name="",
         service=None,
         endpoint=None,
     )
     ref_list = [file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
示例#9
0
 def test_local_file(self):
     """
     Generate a single local file.
     """
     input_local_file = {
         'src': {
             'path': self.local_file,
             'type': 'local'
         },
         'dest': {
             'path': 'bucket/text1.txt',
             'type': 's3'
         },
         'dir_op': False,
         'use_src_name': False
     }
     params = {'region': 'us-east-1'}
     files = FileGenerator(self.client, '').call(input_local_file)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_stat = FileStat(src=self.local_file,
                          dest='bucket/text1.txt',
                          compare_key='text1.txt',
                          size=size,
                          last_update=last_update,
                          src_type='local',
                          dest_type='s3',
                          operation_name='')
     ref_list = [file_stat]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
示例#10
0
文件: test_utils.py 项目: aws/aws-cli
 def test_successfully_sets_utime(self):
     now = datetime.datetime.now(tzlocal())
     epoch_now = time.mktime(now.timetuple())
     with temporary_file("w") as f:
         set_file_utime(f.name, epoch_now)
         _, update_time = get_file_stat(f.name)
         self.assertEqual(time.mktime(update_time.timetuple()), epoch_now)
 def test_local_file(self):
     """
     Generate a single local file.
     """
     input_local_file = {'src': {'path': self.local_file,
                                 'type': 'local'},
                         'dest': {'path': 'bucket/text1.txt',
                                  'type': 's3'},
                         'dir_op': False, 'use_src_name': False}
     params = {'region': 'us-east-1'}
     files = FileGenerator(self.service,
                           self.endpoint, '', params).call(input_local_file)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(src=self.local_file, dest='bucket/text1.txt',
                          compare_key='text1.txt', size=size,
                          last_update=last_update, src_type='local',
                          dest_type='s3', operation_name='',
                          service=None, endpoint=None)
     ref_list = [file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
示例#12
0
 def test_local_directory(self):
     """
     Generate an entire local directory.
     """
     input_local_dir = {
         "src": {"path": self.local_dir, "type": "local"},
         "dest": {"path": "bucket/", "type": "s3"},
         "dir_op": True,
         "use_src_name": True,
     }
     params = {"region": "us-east-1"}
     files = FileGenerator(self.service, self.endpoint, "", params).call(input_local_dir)
     result_list = []
     for filename in files:
         result_list.append(filename)
     size, last_update = get_file_stat(self.local_file)
     file_info = FileInfo(
         src=self.local_file,
         dest="bucket/text1.txt",
         compare_key="text1.txt",
         size=size,
         last_update=last_update,
         src_type="local",
         dest_type="s3",
         operation_name="",
         service=None,
         endpoint=None,
     )
     path = self.local_dir + "another_directory" + os.sep + "text2.txt"
     size, last_update = get_file_stat(path)
     file_info2 = FileInfo(
         src=path,
         dest="bucket/another_directory/text2.txt",
         compare_key="another_directory/text2.txt",
         size=size,
         last_update=last_update,
         src_type="local",
         dest_type="s3",
         operation_name="",
         service=None,
         endpoint=None,
     )
     ref_list = [file_info2, file_info]
     self.assertEqual(len(result_list), len(ref_list))
     for i in range(len(result_list)):
         compare_files(self, result_list[i], ref_list[i])
示例#13
0
 def _safely_get_file_stats(self, file_path):
     try:
         size, last_update = get_file_stat(file_path)
     except (OSError, ValueError):
         self.triggers_warning(file_path)
     else:
         last_update = self._validate_update_time(last_update, file_path)
         return file_path, {'Size': size, 'LastModified': last_update}
 def assert_handles_fromtimestamp_error(self, error):
     patch_attribute = 'awscli.customizations.s3.utils.datetime'
     with mock.patch(patch_attribute) as datetime_mock:
         with temporary_file('w') as temp_file:
             temp_file.write('foo')
             temp_file.flush()
             datetime_mock.fromtimestamp.side_effect = error
             size, update_time = get_file_stat(temp_file.name)
             self.assertIsNone(update_time)
示例#15
0
文件: test_utils.py 项目: aws/aws-cli
 def test_get_file_stat_returns_epoch_on_invalid_timestamp_os_error(self):
     patch_attribute = "awscli.customizations.s3.utils.datetime"
     with mock.patch(patch_attribute) as datetime_mock:
         with temporary_file("w") as temp_file:
             temp_file.write("foo")
             temp_file.flush()
             datetime_mock.fromtimestamp.side_effect = OSError()
             size, update_time = get_file_stat(temp_file.name)
             self.assertIsNone(update_time)
示例#16
0
 def test_get_file_stat_returns_epoch_on_invalid_timestamp(self):
     patch_attribute = 'awscli.customizations.s3.utils.datetime'
     with mock.patch(patch_attribute) as datetime_mock:
         with temporary_file('w') as temp_file:
             temp_file.write('foo')
             temp_file.flush()
             datetime_mock.fromtimestamp.side_effect = ValueError()
             size, update_time = get_file_stat(temp_file.name)
             self.assertIsNone(update_time)
示例#17
0
 def assert_handles_fromtimestamp_error(self, error):
     patch_attribute = 'awscli.customizations.s3.utils.datetime'
     with mock.patch(patch_attribute) as datetime_mock:
         with temporary_file('w') as temp_file:
             temp_file.write('foo')
             temp_file.flush()
             datetime_mock.fromtimestamp.side_effect = error
             size, update_time = get_file_stat(temp_file.name)
             self.assertIsNone(update_time)
 def test_get_file_stat(self):
     now = datetime.datetime.now(tzlocal())
     epoch_now = time.mktime(now.timetuple())
     with temporary_file('w') as f:
         f.write('foo')
         f.flush()
         os.utime(f.name, (epoch_now, epoch_now))
         size, update_time = get_file_stat(f.name)
         self.assertEqual(size, 3)
         self.assertEqual(time.mktime(update_time.timetuple()), epoch_now)
示例#19
0
 def list_files(self, path, dir_op):
     """
     This function yields the appropriate local file or local files
     under a directory depending on if the operation is on a directory.
     For directories a depth first search is implemented in order to
     follow the same sorted pattern as a s3 list objects operation
     outputs.  It yields the file's source path, size, and last
     update
     """
     join, isdir, isfile = os.path.join, os.path.isdir, os.path.isfile
     error, listdir = os.error, os.listdir
     if not dir_op:
         size, last_update = get_file_stat(path)
         yield path, size, last_update
     else:
         # We need to list files in byte order based on the full
         # expanded path of the key: 'test/1/2/3.txt'  However, listdir()
         # will only give us contents a single directory at a time, so we'll
         # get 'test'.  At the same time we don't want to load the entire
         # list of files into memory.  This is handled by first going
         # through the current directory contents and adding the directory
         # separator to any directories.  We can then sort the contents,
         # and ensure byte order.
         names = listdir(path)
         self._check_paths_decoded(path, names)
         for i, name in enumerate(names):
             file_path = join(path, name)
             if isdir(file_path):
                 names[i] = name + os.path.sep
         names.sort()
         for name in names:
             file_path = join(path, name)
             if isdir(file_path):
                 # Anything in a directory will have a prefix of this
                 # current directory and will come before the
                 # remaining contents in this directory.  This means we need
                 # to recurse into this sub directory before yielding the
                 # rest of this directory's contents.
                 for x in self.list_files(file_path, dir_op):
                     yield x
             else:
                 size, last_update = get_file_stat(file_path)
                 yield file_path, size, last_update
示例#20
0
文件: test_utils.py 项目: aws/aws-cli
 def test_get_file_stat(self):
     now = datetime.datetime.now(tzlocal())
     epoch_now = time.mktime(now.timetuple())
     with temporary_file("w") as f:
         f.write("foo")
         f.flush()
         os.utime(f.name, (epoch_now, epoch_now))
         size, update_time = get_file_stat(f.name)
         self.assertEqual(size, 3)
         self.assertEqual(time.mktime(update_time.timetuple()), epoch_now)
示例#21
0
 def list_files(self, path, dir_op):
     """
     This function yields the appropriate local file or local files
     under a directory depending on if the operation is on a directory.
     For directories a depth first search is implemented in order to
     follow the same sorted pattern as a s3 list objects operation
     outputs.  It yields the file's source path, size, and last
     update
     """
     join, isdir, isfile = os.path.join, os.path.isdir, os.path.isfile
     error, listdir = os.error, os.listdir
     if not dir_op:
         size, last_update = get_file_stat(path)
         yield path, size, last_update
     else:
         # We need to list files in byte order based on the full
         # expanded path of the key: 'test/1/2/3.txt'  However, listdir()
         # will only give us contents a single directory at a time, so we'll
         # get 'test'.  At the same time we don't want to load the entire
         # list of files into memory.  This is handled by first going
         # through the current directory contents and adding the directory
         # separator to any directories.  We can then sort the contents,
         # and ensure byte order.
         names = listdir(path)
         self._check_paths_decoded(path, names)
         for i, name in enumerate(names):
             file_path = join(path, name)
             if isdir(file_path):
                 names[i] = name + os.path.sep
         names.sort()
         for name in names:
             file_path = join(path, name)
             if isdir(file_path):
                 # Anything in a directory will have a prefix of this
                 # current directory and will come before the
                 # remaining contents in this directory.  This means we need
                 # to recurse into this sub directory before yielding the
                 # rest of this directory's contents.
                 for x in self.list_files(file_path, dir_op):
                     yield x
             else:
                 size, last_update = get_file_stat(file_path)
                 yield file_path, size, last_update
示例#22
0
 def list_files(self, path, dir_op):
     """
     This function yields the appropriate local file or local files
     under a directory depending on if the operation is on a directory.
     For directories a depth first search is implemented in order to
     follow the same sorted pattern as a s3 list objects operation
     outputs.  It yields the file's source path, size, and last
     update
     """
     join, isdir, isfile = os.path.join, os.path.isdir, os.path.isfile
     error, listdir = os.error, os.listdir
     if not dir_op:
         size, last_update = get_file_stat(path)
         yield path, size, last_update
     else:
         names = sorted(listdir(path))
         for name in names:
             file_path = join(path, name)
             if isdir(file_path):
                 for x in self.list_files(file_path, dir_op):
                     yield x
             else:
                 size, last_update = get_file_stat(file_path)
                 yield file_path, size, last_update
示例#23
0
 def list_files(self, path, dir_op):
     """
     This function yields the appropriate local file or local files
     under a directory depending on if the operation is on a directory.
     For directories a depth first search is implemented in order to
     follow the same sorted pattern as a s3 list objects operation
     outputs.  It yields the file's source path, size, and last
     update
     """
     join, isdir, isfile = os.path.join, os.path.isdir, os.path.isfile
     error, listdir = os.error, os.listdir
     if not dir_op:
         size, last_update = get_file_stat(path)
         yield path, size, last_update
     else:
         names = sorted(listdir(path))
         for name in names:
             file_path = join(path, name)
             if isdir(file_path):
                 for x in self.list_files(file_path, dir_op):
                     yield x
             else:
                 size, last_update = get_file_stat(file_path)
                 yield file_path, size, last_update
示例#24
0
文件: test_utils.py 项目: aws/aws-cli
 def test_on_success_modifies_utime(self):
     self.subscriber.on_done(self.future)
     _, utime = get_file_stat(self.filename)
     self.assertEqual(utime, self.desired_utime)
示例#25
0
 def test_error_message(self):
     with mock.patch('os.stat', mock.Mock(side_effect=IOError('msg'))):
         with self.assertRaisesRegexp(ValueError, 'myfilename\.txt'):
             get_file_stat('myfilename.txt')
示例#26
0
文件: test_utils.py 项目: aws/aws-cli
 def test_get_file_stat_error_message(self):
     with mock.patch("os.stat", mock.Mock(side_effect=IOError("msg"))):
         with self.assertRaisesRegexp(ValueError, "myfilename\.txt"):
             get_file_stat("myfilename.txt")
 def test_error_message(self):
     with mock.patch('os.stat', mock.Mock(side_effect=IOError('msg'))):
         with self.assertRaisesRegexp(ValueError, 'myfilename\.txt'):
             get_file_stat('myfilename.txt')
 def test_on_success_modifies_utime(self):
     self.subscriber.on_done(self.future)
     _, utime = get_file_stat(self.filename)
     self.assertEqual(utime, self.desired_utime)