def test_regex_compile(self): project_dir, output_dir = get_dirs({ 'input/foo/post1.md': testdata.get_unicode_words(), 'input/foo2/post2.md': testdata.get_unicode_words(), 'input/bar/post3.md': testdata.get_unicode_words(), 'input/bar/fake.jpg': "", }) s = Site(project_dir, output_dir) s.output(r"bar") count = 0 for p in s.posts: if p.output_dir.exists(): count += 1 self.assertEqual(1, count) s.output(r"bar") count = 0 for p in s.posts: if p.output_dir.exists(): count += 1 self.assertEqual(1, count) s.output() count = 0 for p in s.posts: if p.output_dir.exists(): count += 1 self.assertEqual(3, count)
def test_unicode(self): """ Jarid was having encoding issues, so I'm finally making sure prom only ever returns unicode strings """ orm_class = self.get_orm_class() table_name = self.get_table_name() orm_class.schema = self.get_schema( self.get_table_name(), foo=Field(unicode, True), bar=Field(str, True), che=Field(str, False), baz=Field(int, False), ) t = orm_class.create( foo=testdata.get_unicode_name(), bar=testdata.get_unicode_words(), che=testdata.get_unicode_words().encode('utf-8'), baz=testdata.get_int(1, 100000) ) t2 = orm_class.query.get_pk(t.pk) self.assertEqual(t.foo, t2.foo) self.assertEqual(t.bar, t2.bar) #self.assertEqual(t.che, t2.che.encode('utf-8')) self.assertEqual(t.che.decode("utf-8"), t2.che) self.assertTrue(isinstance(t.baz, int))
def test_crud(self): nb = Notebook() self.assertIsNone(nb.guid) self.assertIsNone(nb.name) self.assertIsNone(nb.created) self.assertIsNone(nb.updated) name = testdata.get_unicode_words(1) nb.name = name self.assertEqual(name, nb.name) nb.save() self.assertIsNotNone(nb.guid) self.assertEqual(name, nb.name) self.assertIsNotNone(nb.created) self.assertIsNotNone(nb.updated) updated = nb.updated name2 = testdata.get_words(1) nb.name = name2 self.assertNotEqual(name, nb.name) time.sleep(1) nb.save() self.assertEqual(name2, nb.name) self.assertNotEqual(updated, nb.updated)
def test_get_unicode_words(self): v = testdata.get_unicode_words() self.assertGreater(len(v), 0) with self.assertRaises(UnicodeEncodeError): if is_py2: v.decode('utf-8') elif is_py3: bytes(v, encoding="ascii").decode('utf-8')
def test_close(self): content = testdata.get_unicode_words() server = testdata.create_fileserver({ "foo.txt": content }) with server: pass
def test_unicode_output(self): project_dir, output_dir = get_dirs({ 'input/aux/index.md': testdata.get_unicode_words(), }) s = Site(project_dir, output_dir) s.output() self.assertTrue(os.path.isfile(os.path.join(str(output_dir), 'aux', 'index.html')))
def test_private_post(self): project_dir, output_dir = get_dirs({ 'input/_foo/post1.md': testdata.get_unicode_words(), 'input/_foo/fake.jpg': "", 'input/_bar/other/something.jpg': "", }) s = Site(project_dir, output_dir) s.output() self.assertIsNone(s.posts.first_post) self.assertIsNone(s.others.first_post)
def test_sitemap(self): from bang.plugins import sitemap project_dir, output_dir = get_dirs({ 'input/1/one.md': u'1. {}'.format(testdata.get_unicode_words()), 'input/2/two.md': u'2. {}'.format(testdata.get_unicode_words()), 'input/3/three.md': u'3. {}'.format(testdata.get_unicode_words()), 'bangfile.py': "\n".join([ "host = 'example.com'", "" ]) }) s = Site(project_dir, output_dir) s.output() p = os.path.join(str(s.output_dir), 'sitemap.xml') self.assertTrue(os.path.isfile(p)) body = get_body(p) self.assertTrue('example.com/1' in body) self.assertTrue('example.com/2' in body) self.assertTrue('example.com/3' in body)
def test_sitemap(self): #from bang.plugins import sitemap s = self.get_site({ '1/one.md': '1. {}'.format(testdata.get_unicode_words()), '2/two.md': '2. {}'.format(testdata.get_unicode_words()), '3/three.md': '3. {}'.format(testdata.get_unicode_words()), 'bangfile.py': [ "from bang import event", "@event('config')", "def global_config(event_name, config):", " config.host = 'example.com'", ] }) s.output() p = os.path.join(str(s.output_dir), 'sitemap.xml') self.assertTrue(os.path.isfile(p)) body = get_body(p) self.assertTrue('example.com/1' in body) self.assertTrue('example.com/2' in body) self.assertTrue('example.com/3' in body)
def test_param_unicode(self): c = create_controller() r = endpoints.Request() r.set_header("content-type", "application/json;charset=UTF-8") charset = r.encoding c.request = r #self.assertEqual("UTF-8", charset) @endpoints.decorators.param('foo', type=str) def foo(self, *args, **kwargs): return kwargs.get('foo') words = testdata.get_unicode_words() ret = foo(c, **{"foo": words}) self.assertEqual(String(ret), String(words))
def test_server_encoding(self): content = testdata.get_unicode_words() server = testdata.create_fileserver({ "foo.txt": content, }) with server: res = testdata.fetch(server.url("foo.txt")) self.assertEqual(environ.ENCODING.upper(), res.encoding.upper()) self.assertEqual(content, res.body) server = testdata.create_fileserver({ "foo.txt": content, }, encoding="UTF-16") with server: res = testdata.fetch(server.url("foo.txt")) self.assertNotEqual("UTF-8", res.encoding.upper()) self.assertEqual(content, res.body)
def test_unicode(self): us = testdata.get_unicode_words() s1 = String(ByteString(us)) s2 = String(us) self.assertEqual(s1, s2)
def test_unicode(self): s = "yt {}".format(testdata.get_unicode_words()) q = Q(s) self.assertEqual(s, q)
def test_unicode(self): s = Server() r = s.fetch("yt {}".format(testdata.get_unicode_words())) self.assertEqual(200, r.code)
def test_concat(self): cmd = testdata.get_ascii() commands.add(cmd, b"{}") # no exception being raised is a success url = commands.find("{} {}".format(cmd, testdata.get_unicode_words()))