Beispiel #1
0
 path = Path(wrt.getDestinationFilePathToContent(op), False)
 path.size = np_size
 
 hashMatches = False
 
 if args['fast']:
     pathSrc = Path(wrt.getSourceFilePath(op), False)
     pathSrc.ctime = np_ctime
     pathSrc.mtime = np_mtime
     pathSrc.atime = np_atime
     try:
         pathSrc.size = getsize(op)
     except (OSError) as e:
         pathSrc.size = -1
     
     logging.info("    fast check: %s" % (pfmt.format(op).ljust(120)))
     
     try:
         fastHash = fh._hash(pathSrc)
     except (OSError, IOError) as e:
         fastHash = 'exception'
     #print("    fastHash: %s" % (fastHash))
     #print("    fhash:    %s" % (fhash))
     if fastHash==fhash:
         hashMatches = True
     else:
         # do a "slow" hashing also... to be able to log stuff correctly, leave hashMatches=False as-is
         logging.info("    full check: %s" % (pfmt.format(op).ljust(120)))
         hash = hh.hash(path)
         hashMatches = False
         if hash==fullHash:
Beispiel #2
0
cache.setCacheLocation(args['cache'])
cache.initialize()

analyzer = BackupAnalyzer()
analyzer.setCache(cache)
analyzer.initialize()

logging.info("files with full hashes: %s files" % (analyzer.getFilesWithFullHashesCount()))
logging.info("files without full hashes: %s files" % (analyzer.getFilesWithoutFullHashesCount()))

hh = FullContentHashV1.FullContentHashV1()
hh.initialize()

files = analyzer.getFilesWithoutFullHashes('random', 
    max(args['min'], math.ceil(analyzer.getFilesCount()*(args['percent']/100)))
)
for (p, fhash, sz) in files:
    logging.info("    hash: %s" % (pfmt.format(p).ljust(120)))
    
    path = Path(p, False)
    path.size = sz
    
    hash = hh.hash(path)
    #print "    hash: %s" % (hash)
    cache.updateFileFullHashIntoFiles(path, hash)
cache.commit()

hh.destroy()
analyzer.destroy()
cache.destroy()
Beispiel #3
0
analyzer = BackupAnalyzer()
analyzer.setCache(cache)
analyzer.initialize()

print "files with full hashes: %s files" % (analyzer.getFilesWithFullHashesCount())
print "files without full hashes: %s files" % (analyzer.getFilesWithoutFullHashesCount())

hh = FullContentHashV1.FullContentHashV1()
hh.initialize()

files = analyzer.getFilesWithoutFullHashes('random', 
    max(args['min'], math.ceil(analyzer.getFilesCount()*(args['percent']/100)))
)
sys.stdout.write("\n")
for (p, fhash, sz) in files:
    sys.stdout.write("\r    hash: %s" % (pfmt.format(p).ljust(120)))
    
    path = Path(p, False)
    path.size = sz
    
    hash = hh.hash(path)
    #print "    hash: %s" % (hash)
    cache.updateFileFullHashIntoFiles(path, hash)
cache.commit()

sys.stdout.write("\n")

hh.destroy()
analyzer.destroy()
cache.destroy()
Beispiel #4
0
hh = FullContentHashV1.FullContentHashV1()
hh.initialize()

wrt = Writer(args['destination'], args['source'])
wrt.initialize()

files = analyzer.getFilesWithFullHashes('random', 
    max(args['min'], math.ceil(analyzer.getFilesWithFullHashesCount()*(args['percent']/100)))
)

sys.stdout.write("\n")
for (np, fhash, sz, fullHash) in files:
    op = wrt.getDestinationFilePath(np)
        
    sys.stdout.write("\r    check: %s" % (pfmt.format(op).ljust(120)))
    path = Path(wrt.getDestinationFilePathToContent(op), False)
    path.size = sz
    
    hash = hh.hash(path)
    if hash!=fullHash:
        logging.error("!"*80)
        logging.error("!   fullHash check failed!")
        logging.error("!   fullHash: %s, expected: %s" % (hash, fullHash))
        logging.error("!"*80)


cache.commit()

hh.destroy()
analyzer.destroy()
Beispiel #5
0
hh = FullContentHashV1.FullContentHashV1()
hh.initialize()

wrt = Writer(args['destination'], args['source'])
wrt.initialize()

files = analyzer.getFilesWithFullHashes('random', 
    max(args['min'], math.ceil(analyzer.getFilesWithFullHashesCount()*(args['percent']/100)))
)

failedChecks = []
for (np, fhash, sz, fullHash) in files:
    op = wrt.getDestinationFilePath(np)
        
    logging.info("    check: %s" % (pfmt.format(op).ljust(120)))
    path = Path(wrt.getDestinationFilePathToContent(op), False)
    path.size = sz
    
    hash = hh.hash(path)
    if hash!=fullHash:
        if args['stopOnFirstFail']:
            logging.error("!"*80)
            logging.error("!   fullHash check failed!")
            logging.error("!   path: %s" % (np))
            logging.error("!   fullHash: %s" % (hash))
            logging.error("!   expected: %s" % (fullHash))
            logging.error("!"*80)
            sys.exit(1)
        
        logging.error("       fullHash check failed!")
Beispiel #6
0
class ScriptStatusTracker(object):
    def __init__(self, verbosity):
        self.verbosity = verbosity
        self.pfmt = PathFormatter(120)
        t = time.time()
        self.stats = {
            'startTime':    t,
            'resetTime':    t,
            'flushTime':    t,
            'statsTime':    t,
            'evtps':        0.0, 
            'evtpg':        0.0, 
            'totalEvents':  0,
            'expectedEvents':None,
            'isWarmingUp': True, 
        }
        
        self.events = {}
        self.totalEvents = {}
        
        
        self.settings = {
            'resetTime':    30,
            'flushTime':    1,
        }

    
    def storeEvent(self, tm, event, data):
        if self.verbosity==0:
            return
            
        if not event in self.events:
            self.events[event] = 0
        self.events[event]+= 1
        
        if not event in self.totalEvents:
            self.totalEvents[event] = 0
        self.totalEvents[event]+= 1
        
        self.stats['totalEvents']+= 1
        
    def resetStats(self, tm):
        
        self.stats['resetTime'] =    tm
        #stats['evtps'] =        0
        self.stats['isWarmingUp'] =  True
        
        self.events =           {}
            
        
    def calcStats(self, tm):
        pass

            
    def composeOutputStr(self, statusStr, event, data):
        return "%s %8s: %s" % (statusStr, event, self.pfmt.format(data['p'].path).ljust(120))
        
    def logEvent(self, tm, event, data):
        if self.verbosity==0:
            return
    
        doHandle = False
        if self.verbosity>=3:
            doHandle = True
        elif self.verbosity>=2:
            doHandle = True    
        elif self.verbosity>=1:
            if tm - self.stats['statsTime'] > 0.1:
                doHandle = True
            
        if doHandle:
            self.stats['statsTime'] = tm
            evtps = " ....e/s,"
            if not self.stats['isWarmingUp']:
                evtps = "%5.1fe/s," % (min(self.stats['evtps'], 9999.9))
                
            pgpc = "--.-%,"
            if not self.stats['expectedEvents'] is None:
                pgpc = "%4.1f%%," % (99.9*self.stats['evtpg'])
                
            
            self.calcStats(tm)
            
            self.printEvent(tm, self.composeOutputStr("%s%s" % (pgpc, evtps), tm, event, data))
        
            if tm - self.stats['resetTime'] > self.settings['resetTime']:
                self.resetStats(tm)
        
    def printEvent(self, tm, str):
        if self.verbosity>=3:
            sys.stdout.write("\n%s" % (str))
            sys.stdout.flush()
        else:
            sys.stdout.write("\r%s" % (str))
            if tm - self.stats['flushTime'] > self.settings['flushTime']:
                self.stats['flushTime'] = tm
                sys.stdout.flush()
    
    def printStr(self, str):
        sys.stdout.write("\n%s" % (str))
        
Beispiel #7
0
    path = Path(wrt.getDestinationFilePathToContent(op), False)
    path.size = np_size

    hashMatches = False

    if args['fast']:
        pathSrc = Path(wrt.getSourceFilePath(op), False)
        pathSrc.ctime = np_ctime
        pathSrc.mtime = np_mtime
        pathSrc.atime = np_atime
        try:
            pathSrc.size = getsize(op)
        except (OSError) as e:
            pathSrc.size = -1

        logging.info("    fast check: %s" % (pfmt.format(op).ljust(120)))

        try:
            fastHash = fh._hash(pathSrc)
        except (OSError, IOError) as e:
            fastHash = 'exception'
        #print("    fastHash: %s" % (fastHash))
        #print("    fhash:    %s" % (fhash))
        if fastHash == fhash:
            hashMatches = True
        else:
            # do a "slow" hashing also... to be able to log stuff correctly, leave hashMatches=False as-is
            logging.info("    full check: %s" % (pfmt.format(op).ljust(120)))
            hash = hh.hash(path)
            hashMatches = False
            if hash == fullHash:
Beispiel #8
0
cache.setCacheLocation(args['cache'])
cache.initialize()

analyzer = BackupAnalyzer()
analyzer.setCache(cache)
analyzer.initialize()

logging.info("files with full hashes: %s files" % (analyzer.getFilesWithFullHashesCount()))
logging.info("files without full hashes: %s files" % (analyzer.getFilesWithoutFullHashesCount()))

hh = Hasher.FullContentHashV1.Base()
hh.initialize()

files = analyzer.getFilesWithoutFullHashes('random', 
    max(args['min'], math.ceil(analyzer.getFilesCount()*(args['percent']/100)))
)
for (p, fhash, sz) in files:
    logging.info("    hash: %s" % (pfmt.format(p).ljust(120)))
    
    path = Path(p, False)
    path.size = sz
    
    hash = hh.hash(path)
    #print "    hash: %s" % (hash)
    cache.updateFileFullHashIntoFiles(path, hash)
cache.commit()

hh.destroy()
analyzer.destroy()
cache.destroy()