def add_page(self, num, fname, mname, w, h): p = Page(num=num, filename=fname, mininame=mname, width=w, height=h) p.save() self.pages.add(p) cursor = connection.cursor() cursor.execute('UPDATE documents_document SET done = done + 1 WHERE id = %d' % self.id) connection.commit_unless_managed() cursor.close()
def test_commit_unless_managed(self): cursor = connection.cursor() cursor.execute("INSERT into transactions_regress_mod (fld) values (2)") connection.commit_unless_managed() self.assertFalse(connection.is_dirty()) self.assertEqual(len(Mod.objects.all()), 1) self.assertTrue(connection.is_dirty()) connection.commit_unless_managed() self.assertFalse(connection.is_dirty())
def add_page(self, num, fname, mname, w, h): p = Page(num=num, filename=fname, mininame=mname, width=w, height=h) p.save() self.pages.add(p) cursor = connection.cursor() cursor.execute( 'UPDATE documents_document SET done = done + 1 WHERE id = %d' % self.id) connection.commit_unless_managed() cursor.close()
def test_commit_unless_managed_in_managed(self): cursor = connection.cursor() connection.enter_transaction_management() transaction.managed(True) cursor.execute("INSERT into transactions_regress_mod (fld) values (2)") connection.commit_unless_managed() self.assertTrue(connection.is_dirty()) connection.rollback() self.assertFalse(connection.is_dirty()) self.assertEqual(len(Mod.objects.all()), 0) connection.commit() connection.leave_transaction_management() self.assertFalse(connection.is_dirty()) self.assertEqual(len(Mod.objects.all()), 0) self.assertTrue(connection.is_dirty()) connection.commit_unless_managed() self.assertFalse(connection.is_dirty()) self.assertEqual(len(Mod.objects.all()), 0)
def extract_words(doc, content): # split and keep the words words = [x.lower() for x in sub(r'\W', ' ', content).split() if len(x) > 2] terms_count = dict() for word in words: terms_count[word] = terms_count.get(word, 0) + 1 cursor = connection.cursor() for word, count in terms_count.iteritems(): # too slow : # WordEntry.objects.create(word=word, document=doc, count=count) try: cursor.execute( "insert into search_wordentry %s values('%s', %d, %d);" % ('(word, document_id, count)', word, doc.id, count)) except: pass connection.commit_unless_managed() cursor.close() return len(words)
def extract_words(doc, content): # split and keep the words words = [x.lower() for x in sub(r"\W", " ", content).split() if len(x) > 2] terms_count = dict() for word in words: terms_count[word] = terms_count.get(word, 0) + 1 cursor = connection.cursor() for word, count in terms_count.iteritems(): # too slow : # WordEntry.objects.create(word=word, document=doc, count=count) try: cursor.execute( "insert into search_wordentry %s values('%s', %d, %d);" % ("(word, document_id, count)", word, doc.id, count) ) except: pass connection.commit_unless_managed() cursor.close() return len(words)
def parse_words(doc, content): # split and keep the words words = [x.lower() for x in sub(r"\W", " ", content).split() if len(x) > 2] terms_count = dict() for word in words: terms_count[word] = terms_count.get(word, 0) + 1 cursor = connection.cursor() cursor.execute("select max(id) from search_wordentry;") id = cursor.fetchone()[0] if not id: id = 0 for word, count in terms_count.iteritems(): # too slow : # WordEntry.objects.create(word=word, document=doc, count=count) id += 1 cursor.execute("insert into search_wordentry values(%d, '%s', %d, %d);" % (id, word, doc.id, count)) connection.commit_unless_managed() cursor.close() return len(words)