def commit_file(self, fobj, digest, desc=""): """See FileCacherBackend.commit_file(). """ fobj.close() try: with SessionGen() as session: fso = FSObject(description=desc) fso.digest = digest fso.loid = fobj.loid session.add(fso) session.commit() logger.info("File %s (%s) stored on the database.", digest, desc) except IntegrityError: # If someone beat us to adding the same object to the database, we # should at least drop the large object. LargeObject.unlink(fobj.loid) logger.warning("File %s (%s) caused an IntegrityError, ignoring.", digest, desc) return False return True
def commit_file(self, fobj, digest, desc=""): """See FileCacherBackend.commit_file(). """ fobj.close() try: with SessionGen() as session: fso = FSObject(description=desc) fso.digest = digest fso.loid = fobj.loid session.add(fso) session.commit() logger.info("File %s (%s) stored on the database.", digest, desc) except IntegrityError: # If someone beat us to adding the same object to the database, we # should at least drop the large object. LargeObject.unlink(fobj.loid) logger.warning("File %s (%s) caused an IntegrityError, ignoring.", digest, desc) return False return True
def put_file(self, digest, desc=""): """See FileCacherBackend.put_file(). """ try: with SessionGen() as session: fso = FSObject.get_from_digest(digest, session) # Check digest uniqueness if fso is not None: logger.debug( "File %s already stored on database, not " "sending it again.", digest) session.rollback() return None # If it is not already present, copy the file into the # lobject else: fso = FSObject(description=desc) fso.digest = digest session.add(fso) logger.debug("File %s stored on the database.", digest) # FIXME There is a remote possibility that someone # will try to access this file, believing it has # already been stored (since its FSObject exists), # while we're still sending its content. lobject = fso.get_lobject(mode='wb') session.commit() return lobject except IntegrityError: logger.warning("File %s caused an IntegrityError, ignoring...", digest)
def put_file(self, digest, origin, description=""): """See FileCacherBackend.put_file(). """ try: with SessionGen() as session: # Check digest uniqueness if FSObject.get_from_digest(digest, session) is not None: logger.debug("File %s already on database, " "dropping this one." % digest) session.rollback() # If it is not already present, copy the file into the # lobject else: fso = FSObject(description=description) logger.debug("Sending file %s to the database." % digest) with open(origin, 'rb') as temp_file: with fso.get_lobject(session, mode='wb') \ as lobject: logger.debug("Large object created.") buf = temp_file.read(self.CHUNK_SIZE) while buf != '': while len(buf) > 0: written = lobject.write(buf) buf = buf[written:] # Cooperative yield gevent.sleep(0) buf = temp_file.read(self.CHUNK_SIZE) fso.digest = digest session.add(fso) session.commit() logger.debug("File %s sent to the database." % digest) except IntegrityError: logger.warning("File %s caused an IntegrityError, ignoring..." % digest)
def put_file(self, digest, desc=""): """See FileCacherBackend.put_file(). """ try: with SessionGen() as session: fso = FSObject.get_from_digest(digest, session) # Check digest uniqueness if fso is not None: logger.debug("File %s already stored on database, not " "sending it again." % digest) session.rollback() return None # If it is not already present, copy the file into the # lobject else: fso = FSObject(description=desc) fso.digest = digest session.add(fso) logger.debug("File %s stored on the database." % digest) # FIXME There is a remote possibility that someone # will try to access this file, believing it has # already been stored (since its FSObject exists), # while we're still sending its content. lobject = fso.get_lobject(mode='wb') session.commit() return lobject except IntegrityError: logger.warning("File %s caused an IntegrityError, ignoring..." % digest)