def get_macro(): rows = list(web.query("select data from data, thing where thing_id=thing.id and key='/macros/BookCount' and revision=latest_revision")) return cjson.decode(rows[0].data)['macro']['value'] rc = read_rc() web.config.db_parameters = dict(dbn='postgres', db=rc['db'], user=rc['user'], pw=rc['pw'], host=rc['host']) web.config.db_printing = False web.ctx.ip = '127.0.0.1' web.load() book_count = count_books() open('/home/edward/book_count', 'a').write("%d %d\n" % (time(), book_count)) infogami = Infogami(rc['infogami']) infogami.login('edward', rc['edward']) macro = get_macro() re_books = re.compile(r'books = "<strong>[\d,]+</strong>"') books = commify(book_count) macro = re_books.sub('books = "<strong>' + books + '</strong>"', macro) # full text count is disabled so that the number stays about 1 million # fulltext = count_fulltext() # re_fulltext = re.compile(r'fulltext = "<strong>[\d,]+</strong>"') # macro = re_fulltext.sub('fulltext = "<strong>' + fulltext + '</strong>"', macro) q = { 'key': '/macros/BookCount', 'macro': { 'connect': 'update', 'type': '/type/text', 'value': macro } }
from olwrite import Infogami from load import build_query from merge import try_merge from db_read import get_things from catalog.get_ia import get_ia, urlopen_keep_trying from catalog.merge.merge_marc import build_marc import pool import sys import urllib2 archive_url = "http://archive.org/download/" rc = read_rc() infogami = Infogami('pharosdb.us.archive.org:7070') infogami.login('ImportBot', rc['ImportBot']) conn = MySQLdb.connect(host=rc['ia_db_host'], user=rc['ia_db_user'], \ passwd=rc['ia_db_pass'], db='archive') cur = conn.cursor() #collection = sys.argv[1] #print 'loading from collection: %s' % collection def read_short_title(title): return str(fast_parse.normalize_str(title)[:25]) def make_index_fields(rec):
"select data from data, thing where thing_id=thing.id and key='/macros/BookCount' and revision=latest_revision" ) ) return cjson.decode(rows[0].data)["macro"]["value"] rc = read_rc() web.config.db_parameters = dict(dbn="postgres", db=rc["db"], user=rc["user"], pw=rc["pw"], host=rc["host"]) web.config.db_printing = False web.ctx.ip = "127.0.0.1" web.load() book_count = count_books() open("/home/edward/book_count", "a").write("%d %d\n" % (time(), book_count)) infogami = Infogami(rc["infogami"]) infogami.login("edward", rc["edward"]) macro = get_macro() re_books = re.compile(r'books = "<strong>[\d,]+</strong>"') books = commify(book_count) macro = re_books.sub('books = "<strong>' + books + '</strong>"', macro) # full text count is disabled so that the number stays about 1 million # fulltext = count_fulltext() # re_fulltext = re.compile(r'fulltext = "<strong>[\d,]+</strong>"') # macro = re_fulltext.sub('fulltext = "<strong>' + fulltext + '</strong>"', macro) q = {"key": "/macros/BookCount", "macro": {"connect": "update", "type": "/type/text", "value": macro}} infogami.write(q, comment="update book count")
rc = read_rc() web.config.db_parameters = dict(dbn='postgres', db=rc['db'], user=rc['user'], pw=rc['pw'], host=rc['host']) web.config.db_printing = False web.ctx.ip = '127.0.0.1' web.load() book_count = count_books() open('/home/edward/book_count', 'a').write("%d %d\n" % (time(), book_count)) infogami = Infogami(rc['infogami']) infogami.login('edward', rc['edward']) macro = get_macro() re_books = re.compile(r'books = "<strong>[\d,]+</strong>"') books = commify(book_count) macro = re_books.sub('books = "<strong>' + books + '</strong>"', macro) # full text count is disabled so that the number stays about 1 million # fulltext = count_fulltext() # re_fulltext = re.compile(r'fulltext = "<strong>[\d,]+</strong>"') # macro = re_fulltext.sub('fulltext = "<strong>' + fulltext + '</strong>"', macro) q = { 'key': '/macros/BookCount', 'macro': { 'connect': 'update',
from olwrite import Infogami from load import build_query from merge import try_merge from db_read import get_things from catalog.get_ia import get_ia, urlopen_keep_trying from catalog.merge.merge_marc import build_marc import pool import sys import urllib2 archive_url = "http://archive.org/download/" rc = read_rc() infogami = Infogami('pharosdb.us.archive.org:7070') infogami.login('ImportBot', rc['ImportBot']) conn = MySQLdb.connect(host=rc['ia_db_host'], user=rc['ia_db_user'], \ passwd=rc['ia_db_pass'], db='archive') cur = conn.cursor() #collection = sys.argv[1] #print 'loading from collection: %s' % collection def read_short_title(title): return str(fast_parse.normalize_str(title)[:25]) def make_index_fields(rec): fields = {} for k, v in rec.iteritems():