Esempio n. 1
0
    def setUp(self):
        super(TestSync, self).setUp()

        # Files that are the same on both sides:
        self.create_random_file_at((self.tmp_dir, self.s3_test_location), 'foo/a.txt')
        self.create_random_file_at((self.tmp_dir, self.s3_test_location), 'foo/b.txt')
        self.create_random_file_at((self.tmp_dir, self.s3_test_location), 'foo/bar/c.txt')
        self.create_random_file_at((self.tmp_dir, self.s3_test_location), 'foo/bar/baz/d.txt')
        # Files that are different locally and remotely:
        self.create_random_file_at((self.tmp_dir,), 'foo/x.txt')
        self.create_random_file_at((self.s3_test_location,), 'foo/x.txt')
        self.create_random_file_at((self.tmp_dir,), 'foo/bar/x.txt')
        self.create_random_file_at((self.s3_test_location,), 'foo/bar/x.txt')
        # Files that are only local:
        self.create_random_file_at((self.tmp_dir,), 'foo/loc.txt')
        self.create_random_file_at((self.tmp_dir,), 'foo/bar/loc.txt')
        # Files that are only remote:
        self.create_random_file_at((self.s3_test_location,), 'foo/rem.txt')
        self.create_random_file_at((self.s3_test_location,), 'foo/bar/rem.txt')

        self.local_dir_to_sync = s3.path.join(self.tmp_dir, 'foo')
        self.remote_dir_to_sync = s3.path.join(self.s3_test_location, 'foo')

        self.expected_local_contents = s3.ls(self.local_dir_to_sync)
        self.expected_remote_contents = [x.replace(self.s3_path+'foo/', '') for x in s3.ls(self.remote_dir_to_sync)]

        # Annoying directory marker that some clients create; create after making contents lists
        s3.touch(s3.path.join(self.s3_test_location, 'foo/'))
Esempio n. 2
0
 def test_s3_ls(self):
     files = ["foo", "bar.baz", "quack/foo.foo"]
     for f in files:
         s3.cp(self.local_file, self.remote_file(f))
     self.assertEqual(set(s3.ls(self.s3_test_location)), set(map(lambda x: self.s3_path+x, files)))
     self.assertEqual(set(s3.ls(self.s3_test_location, return_full_urls=True)), set([self.remote_file(x) for x in files]))
     self.assertEqual(set(s3.ls(self.s3_test_location, shallow=True)), set([self.s3_path+x for x in ['foo', 'bar.baz', 'quack/']]))
Esempio n. 3
0
 def assertContentsAre(self, expected_contents):
     self.assertSetEqual(set(s3.ls(self.local_dir_to_sync)),
                         set(expected_contents))
     self.assertSetEqual(
         set([
             x.replace(self.s3_path + 'foo/', '')
             for x in s3.ls(self.remote_dir_to_sync)
             if x != self.s3_path + 'foo/'
         ]), set(expected_contents))
Esempio n. 4
0
 def test_s3_ls(self):
     files = ["foo", "bar.baz", "quack/foo.foo"]
     for f in files:
         s3.cp(self.local_file, self.remote_file(f))
     self.assertEqual(set(s3.ls(self.s3_test_location)),
                      set(map(lambda x: self.s3_path + x, files)))
     self.assertEqual(
         set(s3.ls(self.s3_test_location, return_full_urls=True)),
         set([self.remote_file(x) for x in files]))
     self.assertEqual(
         set(s3.ls(self.s3_test_location, shallow=True)),
         set([self.s3_path + x for x in ['foo', 'bar.baz', 'quack/']]))
Esempio n. 5
0
 def test_s3_tmpdir(self):
     def fake_uuid():
         fake_uuid.counter += 1
         return "FAKE-UUID-%d" % fake_uuid.counter
     fake_uuid.counter = 0
     self.assertEqual(
         s3.path.gettmpdir(bucket=TEST_BUCKET, prefix=self.s3_path, uuid_generator=fake_uuid),
         's3://%s/%sFAKE-UUID-1/' % (TEST_BUCKET, self.s3_path))
     self.assertEqual(
         len(list(s3.ls('s3://%s/%sFAKE-UUID-1' % (TEST_BUCKET, self.s3_path)))),
         1)
     self.assertTrue(
         s3.exists('s3://%s/%sFAKE-UUID-1/.tempdir' % (TEST_BUCKET, self.s3_path)))
     self.assertEqual(
         s3.path.gettmpdir(bucket=TEST_BUCKET, prefix=self.s3_path, uuid_generator=fake_uuid),
         's3://%s/%sFAKE-UUID-2/' % (TEST_BUCKET, self.s3_path))
     self.assertEqual(len(list(s3.ls('s3://%s/%sFAKE-UUID-2' % (TEST_BUCKET, self.s3_path)))), 1)
     self.assertTrue(s3.exists('s3://%s/%sFAKE-UUID-2/.tempdir' % (TEST_BUCKET, self.s3_path)))
Esempio n. 6
0
    def versions_available(self, path):
        import semantic_version

        path = self.normalize_path(path)
        base_path, ext = os.path.splitext(path)

        versions = filter(lambda path: os.path.splitext(path)[1] == ext, s3.ls('s3://' + self.bucket + base_path))
        versions = sorted([semantic_version.Version(self.extract_version(v)) for v in versions])
        versions = [str(v) for v in versions]
        return versions
Esempio n. 7
0
 def test_s3_with_double_slashes_in_key(self):
     '''
     boto has a nasty behavior by default where it collapses `//` to `/` in keys
     '''
     s3.cp(self.local_file,
           self.remote_file('double//slashes//bork//boto.foo'))
     self.assertEqual([self.remote_file('double//slashes//bork//boto.foo')],
                      list(
                          s3.ls(self.remote_file(''),
                                return_full_urls=True)))
Esempio n. 8
0
    def versions_available(self, path):
        import semantic_version

        path = self.normalize_path(path)
        base_path, ext = os.path.splitext(path)

        versions = filter(lambda path: os.path.splitext(path)[1] == ext, s3.ls('s3://' + self.bucket + base_path))
        versions = sorted([semantic_version.Version(self.extract_version(v)) for v in versions])
        versions = [str(v) for v in versions]
        return versions
Esempio n. 9
0
 def main(self, prefix):
     from baiji.util.console import LabeledSpinner
     if s3.path.islocal(prefix):
         raise ValueError("restore command only works on s3")
     spin = LabeledSpinner()
     for key in s3.ls(prefix, return_full_urls=True, require_s3_scheme=True, list_versions=True):
         if not s3.exists(key):
             spin.drop("Restoring deleted file {}".format(key))
             s3.restore(key)
         else:
             spin.spin(key)
Esempio n. 10
0
    def ls_remote(self):
        '''
        Return a list of keys on the remote server.

        TODO This could return the versions and create date too.

        '''
        paths = s3.ls('s3://' + self.bucket)
        parsed = [self.parse(path) for path in paths]
        # parsed is a list of key, version tuples.
        return sorted(set([key for key, _ in parsed]))
Esempio n. 11
0
    def ls_remote(self):
        '''
        Return a list of keys on the remote server.

        TODO This could return the versions and create date too.

        '''
        paths = s3.ls('s3://' + self.bucket)
        parsed = [self.parse(path) for path in paths]
        # parsed is a list of key, version tuples.
        return sorted(set([key for key, _ in parsed]))
Esempio n. 12
0
 def main(self, prefix):
     from baiji.util.console import LabeledSpinner
     if s3.path.islocal(prefix):
         raise ValueError("restore command only works on s3")
     spin = LabeledSpinner()
     for key in s3.ls(prefix,
                      return_full_urls=True,
                      require_s3_scheme=True,
                      list_versions=True):
         if not s3.exists(key):
             spin.drop("Restoring deleted file {}".format(key))
             s3.restore(key)
         else:
             spin.spin(key)
Esempio n. 13
0
    def setUp(self):
        super(TestSync, self).setUp()

        # Files that are the same on both sides:
        self.create_random_file_at((self.tmp_dir, self.s3_test_location),
                                   'foo/a.txt')
        self.create_random_file_at((self.tmp_dir, self.s3_test_location),
                                   'foo/b.txt')
        self.create_random_file_at((self.tmp_dir, self.s3_test_location),
                                   'foo/bar/c.txt')
        self.create_random_file_at((self.tmp_dir, self.s3_test_location),
                                   'foo/bar/baz/d.txt')
        # Files that are different locally and remotely:
        self.create_random_file_at((self.tmp_dir, ), 'foo/x.txt')
        self.create_random_file_at((self.s3_test_location, ), 'foo/x.txt')
        self.create_random_file_at((self.tmp_dir, ), 'foo/bar/x.txt')
        self.create_random_file_at((self.s3_test_location, ), 'foo/bar/x.txt')
        # Files that are only local:
        self.create_random_file_at((self.tmp_dir, ), 'foo/loc.txt')
        self.create_random_file_at((self.tmp_dir, ), 'foo/bar/loc.txt')
        # Files that are only remote:
        self.create_random_file_at((self.s3_test_location, ), 'foo/rem.txt')
        self.create_random_file_at((self.s3_test_location, ),
                                   'foo/bar/rem.txt')

        self.local_dir_to_sync = s3.path.join(self.tmp_dir, 'foo')
        self.remote_dir_to_sync = s3.path.join(self.s3_test_location, 'foo')

        self.expected_local_contents = s3.ls(self.local_dir_to_sync)
        self.expected_remote_contents = [
            x.replace(self.s3_path + 'foo/', '')
            for x in s3.ls(self.remote_dir_to_sync)
        ]

        # Annoying directory marker that some clients create; create after making contents lists
        s3.touch(s3.path.join(self.s3_test_location, 'foo/'))
Esempio n. 14
0
 def main(self, key):
     if self.uri:
         print "-B and --uri are deprecated options"
     try:
         keys = s3.ls(key, return_full_urls=True, require_s3_scheme=True, shallow=self.shallow, list_versions=self.list_versions)
         if self.detail:
             from baiji.util.console import sizeof_format_human_readable
             for key in keys:
                 info = s3.info(key)
                 enc = " enc" if info['encrypted'] else "    "
                 print "%s\t%s%s\t%s\t%s" % (sizeof_format_human_readable(info['size']), info['last_modified'], enc, key.encode('utf-8'), info['version_id'])
         else:
             print u"\n".join(keys).encode('utf-8')
     except s3.InvalidSchemeException as e:
         print e
         return 1
Esempio n. 15
0
 def main(self, key):
     if self.uri:
         print "-B and --uri are deprecated options"
     try:
         keys = s3.ls(key,
                      return_full_urls=True,
                      require_s3_scheme=True,
                      shallow=self.shallow,
                      list_versions=self.list_versions)
         if self.detail:
             from baiji.util.console import sizeof_format_human_readable
             for key in keys:
                 info = s3.info(key)
                 enc = " enc" if info['encrypted'] else "    "
                 print "%s\t%s%s\t%s\t%s" % (sizeof_format_human_readable(
                     info['size']), info['last_modified'], enc,
                                             key.encode('utf-8'),
                                             info['version_id'])
         else:
             print u"\n".join(keys).encode('utf-8')
     except s3.InvalidSchemeException as e:
         print e
         return 1
Esempio n. 16
0
 def test_raises_keyerror_for_nonexistent_bucket(self):
     with self.assertRaises(s3.KeyNotFound):
         s3.ls('s3://foo-bar-baz-please-this-is-not-a-bucket-amirite')
Esempio n. 17
0
 def test_raises_keyerror_for_nonexistent_bucket(self):
     with self.assertRaises(s3.KeyNotFound):
         s3.ls('s3://foo-bar-baz-please-this-is-not-a-bucket-amirite')
Esempio n. 18
0
 def test_s3_with_double_slashes_in_key(self):
     '''
     boto has a nasty behavior by default where it collapses `//` to `/` in keys
     '''
     s3.cp(self.local_file, self.remote_file('double//slashes//bork//boto.foo'))
     self.assertEqual([self.remote_file('double//slashes//bork//boto.foo')], list(s3.ls(self.remote_file(''), return_full_urls=True)))
Esempio n. 19
0
 def assertContentsAre(self, expected_contents):
     self.assertSetEqual(set(s3.ls(self.local_dir_to_sync)), set(expected_contents))
     self.assertSetEqual(set([x.replace(self.s3_path+'foo/', '') for x in s3.ls(self.remote_dir_to_sync) if x != self.s3_path+'foo/']), set(expected_contents))