def read_fmt(bib_name, bib_file): """ Reads the cache file of a bibliography file. If the bibliography file has been changed after the caching, this will result in a CacheMiss. These entries are pre-formatted and compatible with cite completions. Arguments: bib_name -- the (unique) name of the bibliography bib_file -- the bibliography file, which resulted in the entries Returns: The cached pre-formatted entries, which should be passed to the cite completions """ cache_name, formatted_cache_name = _cache_name(bib_name, bib_file) try: meta_data, formatted_entries = cache.read_global(formatted_cache_name) except: raise cache.CacheMiss() # raise a cache miss if the modification took place after the caching modified_time = os.path.getmtime(bib_file) if modified_time > meta_data["cache_time"]: raise cache.CacheMiss() # validate the version and format strings are still valid if (meta_data["version"] != _VERSION or any(meta_data[s] != get_setting("cite_" + s) for s in ["panel_format", "autocomplete_format"])): print("Formatting string has changed, updating cache...") # read the base information from the unformatted cache current_time, bib_entries = cache.read_global(cache_name) # format and cache the entries formatted_entries = _create_formatted_entries(formatted_cache_name, bib_entries, current_time) return formatted_entries
def get_entries(self, *bib_files): entries = [] parser = Parser() for bibfname in bib_files: cache_name = "bib_" + hashlib.md5( bibfname.encode("utf8")).hexdigest() try: modified_time = os.path.getmtime(bibfname) (cached_time, cached_entries) = cache.read_global(cache_name) if modified_time <= cached_time: entries.extend(cached_entries) continue except: pass try: bibf = codecs.open(bibfname, 'r', 'UTF-8', 'ignore') # 'ignore' to be safe except IOError: print("Cannot open bibliography file %s !" % (bibfname, )) sublime.status_message("Cannot open bibliography file %s !" % (bibfname, )) continue else: bib_data = parser.parse(bibf.read()) print('Loaded %d bibitems' % (len(bib_data))) for key in bib_data: entry = bib_data[key] if entry.entry_type in ('xdata', 'comment', 'string'): continue entries.append(EntryWrapper(entry)) try: current_time = time.time() cache.write_global(cache_name, (current_time, entries)) except: print('Error occurred while trying to write to cache {0}'. format(cache_name)) traceback.print_exc() finally: try: bibf.close() except: pass print("Found %d total bib entries" % (len(entries), )) return entries
def get_entries(self, *bib_files): entries = [] parser = Parser() for bibfname in bib_files: cache_name = "bib_" + hashlib.md5(bibfname.encode("utf8")).hexdigest() try: modified_time = os.path.getmtime(bibfname) (cached_time, cached_entries) = cache.read_global(cache_name) if modified_time <= cached_time: entries.extend(cached_entries) continue except: pass try: bibf = codecs.open(bibfname, 'r', 'UTF-8', 'ignore') # 'ignore' to be safe except IOError: print("Cannot open bibliography file %s !" % (bibfname,)) sublime.status_message("Cannot open bibliography file %s !" % (bibfname,)) continue else: bib_data = parser.parse(bibf.read()) print ('Loaded %d bibitems' % (len(bib_data))) for key in bib_data: entry = bib_data[key] if entry.entry_type in ('xdata', 'comment', 'string'): continue entries.append(EntryWrapper(entry)) try: current_time = time.time() cache.write_global(cache_name, (current_time, entries)) except: print('Error occurred while trying to write to cache {0}'.format( cache_name )) traceback.print_exc() finally: try: bibf.close() except: pass print("Found %d total bib entries" % (len(entries),)) return entries
def get_entries(self, *bib_files): entries = [] for bibfname in bib_files: cache_name = "tradbib_" + hashlib.md5(bibfname.encode("utf8")).hexdigest() try: modified_time = os.path.getmtime(bibfname) (cached_time, cached_entries) = cache.read_global(cache_name) if modified_time <= cached_time: entries.extend(cached_entries) continue except: pass try: bibf = codecs.open(bibfname, 'r', 'UTF-8', 'ignore') # 'ignore' to be safe except IOError: print("Cannot open bibliography file %s !" % (bibfname,)) sublime.status_message("Cannot open bibliography file %s !" % (bibfname,)) continue else: bib_data = bibf.readlines() entry = {} for line in bib_data: line = line.strip() # Let's get rid of irrelevant lines first if line == "" or line[0] == '%': continue if line.lower()[0:8] == "@comment": continue if line.lower()[0:7] == "@string": continue if line.lower()[0:9] == "@preamble": continue if line[0] == "@": if 'keyword' in entry: entries.append(entry) entry = {} kp_match = kp.search(line) if kp_match: entry['keyword'] = kp_match.group(1) else: print("Cannot process this @ line: " + line) print( "Previous keyword (if any): " + entry.get('keyword', '') ) continue # Now test for title, author, etc. # Note: we capture only the first line, but that's OK for our purposes multip_match = multip.search(line) if multip_match: key = multip_match.group(1).lower() value = multip_match.group(2) if key == 'title': value = value.replace( '{\\textquoteright}', '' ).replace('{', '').replace('}', '') entry[key] = value continue # at the end, we have a single record if 'keyword' in entry: entries.append(entry) print ('Loaded %d bibitems' % (len(entries))) try: current_time = time.time() cache.write_global(cache_name, (current_time, entries)) except: print('Error occurred while trying to write to cache {0}'.format( cache_name )) traceback.print_exc() finally: try: bibf.close() except: pass print("Found %d total bib entries" % (len(entries),)) return entries
def get_entries(self, *bib_files): entries = [] for bibfname in bib_files: cache_name = "tradbib_" + hashlib.md5( bibfname.encode("utf8")).hexdigest() try: modified_time = os.path.getmtime(bibfname) (cached_time, cached_entries) = cache.read_global(cache_name) if modified_time <= cached_time: entries.extend(cached_entries) continue except: pass try: bibf = codecs.open(bibfname, 'r', 'UTF-8', 'ignore') # 'ignore' to be safe except IOError: print("Cannot open bibliography file %s !" % (bibfname, )) sublime.status_message("Cannot open bibliography file %s !" % (bibfname, )) continue else: bib_data = bibf.readlines() entry = {} for line in bib_data: line = line.strip() # Let's get rid of irrelevant lines first if line == "" or line[0] == '%': continue if line.lower()[0:8] == "@comment": continue if line.lower()[0:7] == "@string": continue if line.lower()[0:9] == "@preamble": continue if line[0] == "@": if 'keyword' in entry: entries.append(entry) entry = {} kp_match = kp.search(line) if kp_match: entry['keyword'] = kp_match.group(1) else: print("Cannot process this @ line: " + line) print("Previous keyword (if any): " + entry.get('keyword', '')) continue # Now test for title, author, etc. # Note: we capture only the first line, but that's OK for our purposes multip_match = multip.search(line) if multip_match: key = multip_match.group(1).lower() value = multip_match.group(2) if key == 'title': value = value.replace('{\\textquoteright}', '').replace('{', '').replace( '}', '') entry[key] = value continue # at the end, we have a single record if 'keyword' in entry: entries.append(entry) print('Loaded %d bibitems' % (len(entries))) try: current_time = time.time() cache.write_global(cache_name, (current_time, entries)) except: print('Error occurred while trying to write to cache {0}'. format(cache_name)) traceback.print_exc() finally: try: bibf.close() except: pass print("Found %d total bib entries" % (len(entries), )) return entries