def CheckChanges(self, filecheck, dirpath, remotedir, rowInfo): ''' Given a file (with metadata) check if it has changed. If changes have been made, then send them to the server and update database. @param filecheck: The name of the new file @param dirpath: Must be a correct relative path where file is @param remotedir: The remote counterpart of dirpath @param rowinfo: A database row with metainfo of the file ''' localfile = posixpath.join(dirpath,filecheck) modifiedTime = os.stat(localfile).st_mtime computedSize = os.stat(localfile).st_size if ( rowInfo['localtime'] != modifiedTime or rowInfo['size'] != computedSize ): # Here! Some file needs care self._logger.info('Detected modified file: %s' , localfile ) hashes = Hashes.open(posixpath.join( self._hashesdir, str(rowInfo['idfile'] ))) # Get size computedSize = os.stat(localfile).st_size with open ( localfile , "rb" ) as f: # Get checksum computedChksum = adler32(f.read()) # Get delta f.seek(0) delta = hashes.computeDelta(f) self._logger.debug ( "Last revision: %s" , repr(rowInfo['lastrev']) ) self._logger.debug ( "Checksum: %s" , repr(computedChksum) ) self._logger.debug ( "Size: %s" , repr(computedSize) ) ret = self._RemoteCaller.SendDelta ( rowInfo['lastrev'], delta.getXMLRPCBinary(), computedChksum, computedSize ) self._logger.debug ( "Sent, response: %s" , repr(ret) ) with self._db as c: c.execute ( '''update files set lastrev=?, timestamp=?, localtime=?, chksum=?, size=? where idfile=?''' , (ret[0], ret[1], modifiedTime, computedChksum, computedSize, rowInfo['idfile'] ) ) # update the hashes self._logger.debug ( "Updating hash file %s" , str(rowInfo['idfile']) ) hash = Hashes.eval(localfile) hash.save(posixpath.join(self._hashesdir, str(rowInfo['idfile']) )) else: self._logger.debug('No changes found for file:%s (path:%s)' % (filecheck,dirpath) )
def NewFile(self, filecheck, dirpath, remotedir): ''' Function used when a potential new file is found. @param filecheck: The name of the new file @param dirpath: Must be a correct relative path where file is @param remotedir: The remote counterpart of dirpath ''' localfile = posixpath.join(dirpath,filecheck) modifiedTime = os.stat(localfile).st_mtime computedSize = os.stat(localfile).st_size self._logger.info('Detected new file: %s' , localfile ) with open ( localfile , "rb" ) as f: # Get checksum computedChksum = adler32(f.read()) # Get data f.seek(0) dataToSend = Binary(f.read()) # Checking if the server knows the file ret = self._RemoteCaller.CheckFileMetadata (computedSize, computedChksum) if ret > 0: # Say the server that we want to copy self._logger.debug( "Creating a copy of revision %s", str(ret) ) retcp = self._RemoteCaller.CopyFile ( remotedir, filecheck, ret ) self._logger.debug ( "Sent, response: %s" , repr(retcp) ) lastrev, timestamp = retcp else: # Send everything (new) to server self._logger.debug( "Transfering new file to server" ) ret = self._RemoteCaller.SendNewFile( remotedir, filecheck, dataToSend, computedChksum, computedSize) self._logger.debug ( "Sent, response: %s" , repr(ret) ) lastrev,timestamp = ret # We use the revision id with self._db as c: cur = c.execute ( '''insert into files (path, file, lastrev, timestamp, localtime, chksum, size) values (?,?,?,?,?,?,?)''' , (dirpath, filecheck, lastrev, timestamp, modifiedTime, computedChksum, computedSize ) ) fileId = cur.lastrowid # Save the hashes for later use hashes = Hashes.eval(localfile) hashes.save(posixpath.join(self._hashesdir, str(fileId) ))
def SendNewFile (self, path, newfile, bindata , chksum = None, size = None): ''' Create a *non-existing* file in server @param path: String of an *existing* valid path @param newfile: String of the file to create into path @param bindata: Binary contents of file @param chksum: Checksum of file @param size: Size of file @return: Error code or raise if something goes wrong. A pair idrev and timestamp of the file if everything is ok. ''' filepath = self._basicNewChecks(path, newfile) if isinstance(filepath, int): return filepath self._logger.debug ( "Receiving file %s, saving at %s" , newfile , filepath ) try: with open(filepath, "wb") as f: f.write(bindata.data) except: self._seterrormsg('Internal filesystem error when opening ' + filepath) return ERR_FS # Now we have created the file locally # let's check that everything is ok #first checksum with open ( filepath , "rb" ) as f: computedChecksum = adler32(f.read()) if chksum and (chksum != computedChecksum): self._seterrormsg('Checksums do not match --rolled back') os.remove(filepath) return ERR_CHKSUM #then size computedSize = os.stat(filepath).st_size if size and (computedSize != size): self._seterrormsg('Size do not match. Local size: '+ str(computedSize) + ' --rolled back') os.remove(filepath) return ERR_SIZE tsnow = datetime.fromtimestamp(int(time.time())) with self._conn as c: cursor = c.execute('''insert into files (path, file, deleted) values (?,?,0)''', (path, newfile) ) idfile = cursor.lastrowid cursor = c.execute('''insert into revisions (idfile,uid,timestamp,fromrev,typefrom,chksum,size,hardexist) values (?,?,?,NULL,?,?,?,1)''' , (idfile,self._getUID(),tsnow,REV_NEWFILE,computedChecksum,computedSize) ) idrev = cursor.lastrowid c.execute("update files set lastrev=? where idfile=?" , (idrev, idfile) ) revPath = os.path.join ( self._hardsdir ,str(idrev) ) self._logger.info ( "Proceeding to link %s and %s" , filepath , revPath ) os.link ( filepath , revPath ) # calculate here hashes for rsync algorithm hashes = Hashes.eval(filepath) savehash = os.path.join(self._hashesdir,str(idrev)) self._logger.debug ("Saving hash in %s", savehash) hashes.save (savehash) return idrev, tsnow