def test_setting_filename_suffix_works(self): logdir = s3_temp_dir() w = EventFileWriter(logdir, filename_suffix='.event_horizon') w.close() event_files = sorted(gfile.glob(s3_join(logdir, '*'))) self.assertEqual(event_files[0].split('.')[-1], 'event_horizon')
def testGlob(self): temp_dir = self._CreateDeepS3Structure() # S3 glob includes subdirectory content, which standard # filesystem does not. However, this is good for perf. expected = [ "a.tfevents.1", "bar/b.tfevents.1", "bar/baz/c.tfevents.1", "bar/baz/d.tfevents.1", "bar/quux/some_flume_output.txt", "bar/quux/some_more_flume_output.txt", "bar/red_herring.txt", "model.ckpt", "quuz/e.tfevents.1", "quuz/garply/corge/g.tfevents.1", "quuz/garply/f.tfevents.1", "quuz/garply/grault/h.tfevents.1", "waldo/fred/i.tfevents.1", ] expected_listing = [self._PathJoin(temp_dir, f) for f in expected] gotten_listing = gfile.glob(self._PathJoin(temp_dir, "*")) six.assertCountEqual( self, expected_listing, gotten_listing, "Files must match. Expected %r. Got %r." % (expected_listing, gotten_listing), )
def testGlobChaining(self): """ This tests glob with chained file systems. """ temp_dir = self.get_temp_dir() on_disk = temp_dir.split("://")[1] with open(posixpath.join(on_disk, "foo.txt"), "wb") as myfile: myfile.write(b"foo") with open(posixpath.join(on_disk, "bar.txt"), "wb") as myfile: myfile.write(b"bar") foo_raw = posixpath.join(temp_dir, "foo.txt") foo_cached = "simplecache::" + foo_raw self.assertTrue(gfile.exists(foo_raw)) self.assertTrue(gfile.exists(foo_cached)) cache_dir = "simplecache::" + temp_dir files = gfile.glob(posixpath.join(cache_dir, "*.txt")) self.assertCountEqual( files, [ posixpath.join(cache_dir, "foo.txt"), posixpath.join(cache_dir, "bar.txt"), ], )
def test_event_file_writer_roundtrip(self): _TAGNAME = 'dummy' _DUMMY_VALUE = 42 logdir = s3_temp_dir() w = EventFileWriter(logdir) summary = Summary(value=[Summary.Value(tag=_TAGNAME, simple_value=_DUMMY_VALUE)]) fakeevent = event_pb2.Event(summary=summary) w.add_event(fakeevent) w.close() event_files = sorted(gfile.glob(s3_join(logdir, '*'))) self.assertEqual(len(event_files), 1) r = PyRecordReader_New(event_files[0]) r.GetNext() # meta data, so skip r.GetNext() self.assertEqual(fakeevent.SerializeToString(), r.record())
def testGlob(self): temp_dir = tempfile.mkdtemp(prefix=self.base_temp_dir) self._CreateDeepDirectoryStructure(temp_dir) expected = [ 'foo', 'bar', 'a.tfevents.1', 'model.ckpt', 'quuz', 'waldo', ] expected_listing = [os.path.join(temp_dir, f) for f in expected] gotten_listing = gfile.glob(os.path.join(temp_dir, "*")) six.assertCountEqual( self, expected_listing, gotten_listing, 'Files must match. Expected %r. Got %r.' % (expected_listing, gotten_listing))
def testGlob(self): temp_dir = self.get_temp_dir() self._CreateDeepDirectoryStructure(temp_dir) expected = [ "foo", "bar", "a.tfevents.1", "model.ckpt", "quuz", "waldo", ] expected_listing = [os.path.join(temp_dir, f) for f in expected] gotten_listing = gfile.glob(os.path.join(temp_dir, "*")) self.assertCountEqual( expected_listing, gotten_listing, "Files must match. Expected %r. Got %r." % (expected_listing, gotten_listing), )
def testGlobNonAbsolute(self): """ This tests glob with in memory file system which does not return absolute paths from glob. """ fs = fsspec.filesystem("memory") fs.mkdir("dir") fs.touch("dir/foo.txt") fs.touch("dir/bar.txt") root = "memory://dir" files = gfile.glob(posixpath.join(root, "*.txt")) self.assertCountEqual( files, [ posixpath.join(root, "foo.txt"), posixpath.join(root, "bar.txt"), ], )