def iter_marc(sources): rec_no = 0 for ia in sources: for part, size in files(ia): full_part = ia + "/" + part filename = rc['marc_path'] + full_part assert os.path.exists(filename) f = open(filename) for pos, loc, data in read_marc_file(full_part, f): rec_no +=1 yield rec_no, pos, loc, data
def iter_marc(sources): rec_no = 0 for ia in sources: for part, size in files(ia): full_part = ia + "/" + part filename = rc['marc_path'] + full_part assert os.path.exists(filename) f = open(filename) for pos, loc, data in read_marc_file(full_part, f): rec_no += 1 yield rec_no, pos, loc, data
def iter_marc(): rec_no = 0 for ia in sources(): print ia for part, size in files(ia): full_part = ia + "/" + part filename = base + full_part assert os.path.exists(filename) print filename f = open(filename) for pos, loc, data in read_marc_file(full_part, f): rec_no +=1 yield rec_no, pos, loc, data
def iter_marc(): rec_no = 0 for ia in sources(): print ia for part, size in files(ia): full_part = ia + "/" + part filename = base + full_part assert os.path.exists(filename) print filename f = open(filename) for pos, loc, data in read_marc_file(full_part, f): rec_no += 1 yield rec_no, pos, loc, data
def load_part(archive_id, part, start_pos=0): print 'load_part:', archive_id, part global rec_no, t_prev, load_count full_part = archive_id + "/" + part f = open(rc['marc_path'] + "/" + full_part) if start_pos: f.seek(start_pos) for pos, loc, data in read_marc_file(full_part, f, pos=start_pos): rec_no += 1 if rec_no % chunk == 0: progress(archive_id, rec_no, start_pos, pos) if is_loaded(loc): continue want = ['001', '003', '010', '020', '035', '245'] try: index_fields = fast_parse.index_fields(data, want) except KeyError: print loc print fast_parse.get_tag_lines(data, ['245']) raise except AssertionError: print loc raise except fast_parse.NotBook: continue if not index_fields or 'title' not in index_fields: continue print loc edition_pool = pool.build(index_fields) if not edition_pool: yield loc, data continue rec = fast_parse.read_edition(data) e1 = build_marc(rec) match = False seen = set() for k, v in edition_pool.iteritems(): for edition_key in v: if edition_key in seen: continue thing = None while not thing or thing['type']['key'] == '/type/redirect': seen.add(edition_key) thing = withKey(edition_key) assert thing if thing['type']['key'] == '/type/redirect': print 'following redirect %s => %s' % (edition_key, thing['location']) edition_key = thing['location'] if try_merge(e1, edition_key, thing): add_source_records(edition_key, loc, thing, data) match = True break if match: break if not match: yield loc, data
def load_part(archive_id, part, start_pos=0): print('load_part:', archive_id, part) global rec_no, t_prev, load_count full_part = archive_id + "/" + part f = open(rc['marc_path'] + "/" + full_part) if start_pos: f.seek(start_pos) for pos, loc, data in read_marc_file(full_part, f, pos=start_pos): rec_no += 1 if rec_no % chunk == 0: progress(archive_id, rec_no, start_pos, pos) if is_loaded(loc): continue want = ['001', '003', '010', '020', '035', '245'] try: index_fields = fast_parse.index_fields(data, want) except KeyError: print(loc) print(fast_parse.get_tag_lines(data, ['245'])) raise except AssertionError: print(loc) raise except fast_parse.NotBook: continue if not index_fields or 'title' not in index_fields: continue print(loc) edition_pool = pool.build(index_fields) if not edition_pool: yield loc, data continue rec = fast_parse.read_edition(data) e1 = build_marc(rec) match = False seen = set() for k, v in edition_pool.iteritems(): for edition_key in v: if edition_key in seen: continue thing = None while not thing or thing['type']['key'] == '/type/redirect': seen.add(edition_key) thing = withKey(edition_key) assert thing if thing['type']['key'] == '/type/redirect': print('following redirect %s => %s' % (edition_key, thing['location'])) edition_key = thing['location'] if try_merge(e1, edition_key, thing): add_source_records(edition_key, loc, thing, data) match = True break if match: break if not match: yield loc, data