def update(self, sorting=None): ''' Updates the Entries stored in the database ''' if self.key.url [0] != 'file': raise IOError, "can't update the remote database `%s'" % self.key name = self.key.url[2] if Config.get('base/directsave').data: if Config.get('base/backup').data: copyfile(name, name + '.bak') namefile = open(name, 'w') iterator = Selection.Selection(sort=sorting).iterator(self.iterator()) Open.bibwrite(iterator, out=namefile, how=self.id, database=self) namefile.close () else: # create a temporary file for the new version tmp = os.path.join(os.path.dirname(name), '.#' + os.path.basename(name)) tmpfile = open(tmp, 'w') iterator = Selection.Selection(sort=sorting).iterator(self.iterator()) Open.bibwrite(iterator, out=tmpfile, how=self.id, database=self) tmpfile.close() # if we succeeded, and backup is set, backup file if Config.get('base/backup').data: os.rename(name, name + '.bak') # ...and bring new version online os.rename(tmp, name) return
def open_document (self, url, how = None, no_name = False): Utils.set_cursor (self.w, 'clock') orig_url = Fields.URL (url) url = orig_url.get_url () restore = False if orig_url.url [0] == 'file': name = orig_url.url [2] auto_save = os.path.join (os.path.dirname (name), 'x-pyblio-save-' + os.path.basename (name)) if os.path.exists (auto_save): mod_date = os.stat (name) [stat.ST_MTIME] mod_date_auto = os.stat (auto_save) [stat.ST_MTIME] if mod_date < mod_date_auto: restore = Utils.Callback (_("An autosave file was found which is newer than the original file.\nDo you want to restore it?"), self.w).answer () if restore: url = auto_save try: data = Open.bibopen (url, how = how) except (Exceptions.ParserError, Exceptions.FormatError, Exceptions.FileError), error: Utils.set_cursor (self.w, 'normal') Utils.error_dialog (_("Open error"), error, parent = self.w) return
def __parsefile__(self): self.dict = {} # Ouvrir le fichier associe self.parser = _bibtex.open_file(Open.url_to_local(self.key), Config.get ('bibtex/strict').data) # Incorporer les definitions de l'utilisateur if not Config.get('bibtex+/override').data: user = Config.get('bibtex/macros').data valid = re.compile('^\w+$') for k in user.keys(): if not valid.match(k): raise TypeError, _("key `%s' is malformed") % k _bibtex.set_string(self.parser, k, _bibtex.reverse(_base_fieldtype[Text], Config.get('bibtex+/braces').data, user [k][0])) finished = 0 errors = [] # Creer la base de cles iter = BibtexIterator(self, self.parser) try: entry = iter.first() if entry is not None: if entry.key is None: self.add(entry) else: if self.dict.has_key(entry.key): errors.append(_("%s:%d: key `%s' already defined") % ( str(self.key), entry.line, entry.key.key)) else: self.dict [entry.key] = entry except Exceptions.ParserError, err: errors.append (str (err))
def iterator (url, check): ''' This methods returns an iterator that will parse the database on the fly (useful for merging or to parse broken databases ''' if check and url.url [2] [-4:] != '.isi': return return IsifileIterator (open (Open.url_to_local (url), 'r'))
def find_entries (auxfile, bibtex): """ Parse an auxiliary file and extract the entries from the given BibTeX databases """ entries, data, style = list_entries (auxfile) if not bibtex: bibtex = data # we have to create a Reference database to hold the entries contained in the # current database. r = Base.DataBase (None) keys = copy.copy (entries) # is there something to do ? if len (entries) == 0: return r, style, entries # use the bibliographic databases in order of declaration # to solve the references for bib in bibtex: (root, ext) = os.path.splitext (bib) if not ext: ext = '.bib' # open the database db = Open.bibopen (root + ext) # as we are modifying the list of entries in this loop, we make a copy # of it in order to avoir strange behaviors orig = copy.copy (entries) # loop over the expected entries for e in orig: # create a key in the current database key = Key.Key (db, e) # does the database provide the key ? if db.has_key (key): # yes, add it to the reference r [Key.Key (None, e)] = db [key] # and remove it from the list entries.remove (e) # is it finished ? if len (entries) == 0: break # return the reference on all the entries, plus the missing ones keys = filter (lambda x, entries = entries: not entries.count (x), keys) keys = map (lambda x, r = r: Key.Key (r, x), keys) return r, keys, style, entries
def iterator (url, check): ''' This methods returns an iterator that will parse the database on the fly (useful for merging or to parse broken databases ''' if check and url.url [2] [-5:] != '.ovid': return file = open (Open.url_to_local (url)) return OvidLike.OvidLike ( file, Config.get ('ovid/mapping').data, Config.get ('ovid/deftype').data )
def autosave (self, url, how): ''' autosave file as x-pyblio-save-filename ''' if self.data.key.url [0] != 'file': return False name = self.data.key.url [2] # create an autosave file save = os.path.join (os.path.dirname (name), 'x-pyblio-save-' + os.path.basename (name)) if self.changed: try: savefile = open (save, 'w') except (IOError, OSError), error: self.w.error (_("Error during autosaving:\n%s") % error [1]) return False iterator = Selection.Selection (sort = self.selection.sort) Open.bibwrite (iterator.iterator (self.data.iterator ()), out = savefile, how = how, database=self.data) savefile.close ()
def iterator (url, check): ''' This methods returns an iterator that will parse the database on the fly (useful for merging or to parse broken databases ''' if check and url.url [2] [-4:] != '.bib': return None # Ouvrir le fichier associe parser = _bibtex.open_file (Open.url_to_local (url), Config.get ('bibtex/strict').data) # create a database to generate correct keys db = Base.DataBase (url) return BibtexIterator (db, parser)
def merge_database (self, * arg): ''' add all the entries of another database to the current one ''' # get a new file name (url, how) = FileSelector.URLFileSelection (_("Merge file"), has_auto = True).run () if url is None: return try: iterator = Open.bibiter (url, how = how) except (Exceptions.ParserError, Exceptions.FormatError, Exceptions.FileError), error: Utils.error_dialog (_("Open error"), error, parent = self.w) return
def __init__ (self, url): # each hash value contains a 2-uplet (opener, closer) self.format = None self.top = [] self.data = [] self.config = [] self.methods = {} fh = open (Open.url_to_local (url)) parser = sax.make_parser () parser.setFeature (sax.handler.feature_validation, False) parser.setContentHandler (self) parser.parse (fh) fh.close () return
if os.path.exists (url): if not Utils.Callback ( _("The file `%s' already exists.\nOverwrite it ?") % url, parent = self.w).answer (): return try: file = open (url, 'w') except IOError, error: self.w.error (_("During opening:\n%s") % error [1]) return Utils.set_cursor (self.w, 'clock') iterator = Selection.Selection (sort = self.selection.sort) Open.bibwrite (iterator.iterator (self.data.iterator ()), out = file, how = how, database=self.data) file.close () # remove the old autosave object if self.data.key is not None and self.source_id: gobject.source_remove (self.source_id) # remove old autosave file if self.data.key: if self.data.key.url [0] == 'file': old_file = self.data.key.url [2] old_auto_save = os.path.join (os.path.dirname (old_file), 'x-pyblio-save-' + os.path.basename (old_file)) if os.path.exists (old_auto_save): try:
import locale charset = locale.getlocale () [1] or 'ascii' if len (sys.argv) < 4 or len (sys.argv) > 5: print _("usage: pybconvert <source>..<target> <input> [output]").encode (charset) sys.exit (1) format = sys.argv [2] try: source, target = string.split (format, '..') except: print _("pybconvert: error: bad conversion format").encode (charset) sys.exit (1) from Legacy import Open f_in = sys.argv [3] if len (sys.argv) == 4: f_out = sys.stdout else: f_out = open (sys.argv [4], 'w') database = Open.bibopen (f_in, source) Open.bibwrite (database.iterator (), how = target, out = f_out)
def iterator (url, check): if check and url.url [2] [-6:] != '.refer': return file = open (Open.url_to_local (url)) return ReferIterator (file)