def test_maybe_nsfw_spoilers_flags_reporting(self): self.rdict['flags'] = linkcache.result.F_MAYBE_NSFW | \ linkcache.result.F_SPOILERS result = linkcache.result.LinkCacheResult(self.rdict) flags = result.pretty_flags() self.assertIsInstance(flags, str) self.assertTrue('SPOILERS' in flags.split(',')) self.assertTrue('~NSFW' in flags.split(','))
def test_private_nsfw_flags_reporting(self): self.rdict['flags'] = linkcache.result.F_NSFW self.rdict['private'] = True result = linkcache.result.LinkCacheResult(self.rdict) flags = result.pretty_flags() self.assertIsInstance(flags, str) self.assertTrue('P' in flags.split(',')) self.assertTrue('NSFW' in flags.split(','))
def test_private_spoilers_flags_reporting(self): self.rdict['flags'] = linkcache.result.F_SPOILERS self.rdict['private'] = True result = linkcache.result.LinkCacheResult(self.rdict) flags = result.pretty_flags() self.assertIsInstance(flags, str) self.assertTrue('SPOILERS' in flags.split(',')) self.assertTrue('P' in flags.split(','))
def test_private_flag_reporting(self): self.rdict['private'] = True result = linkcache.result.LinkCacheResult(self.rdict) flags = result.pretty_flags() self.assertIsInstance(flags, str) self.assertTrue('P' in flags.split())