def check_delete(filelist, source): filelist = utils.deconcat(filelist) if source == "L": root = options.dstroot else: root = options.srcroot protected = "" todelete = "" for filename in filelist: # Be extra careful on what we delete if len(filename) <= len(root): protected = utils.concat(protected, filename) else: todelete = utils.concat(todelete, filename) return (protected, todelete)
def move(action): log(utils.DEBUG1, action['source'], "MOVE action", eventid=action['eventid']) # Command selection itemtype = action['itemtype'] if action['source'] == "L": cmd = (["ssh"] + config.ssh_options + [ options.dsthost, "xargs", "-d", "'\n'", config.helperbin, "-a", "MOVE" ]) else: cmd = ["xargs", "-d", "\n", config.helperbin, "-a", "MOVE"] # Forced? for i in range(options.force): cmd.append("-f") # Define source and target srcfile = action['filelist'] dstfile = action['dstfile'] # Calculate and append checksum tohash = utils.concat("MOVE", utils.concat(srcfile, dstfile)) checksum = hashlib.md5(tohash).hexdigest() cmd = cmd + ["-c", checksum] # Execute and report log(utils.DEBUG2, action['source'], "Preparing to move: \n" + srcfile + " -> " + dstfile, eventid=action['eventid']) (process, output, error) = execute(cmd, action['source'], utils.concat(srcfile, dstfile), warn=False, eventid=action['eventid']) if process.returncode: log(utils.INFO, action['source'], error, eventid=action['eventid']) log(utils.INFO, action['source'], "MOVE failed. Retrying with RSYNC", eventid=action['eventid']) # If recursive move is enabled or the move failed, # do a recursive check with rsync if config.move_event_recurse or process.returncode: action['method'] = "RSYNC" action['filelist'] = action['dstfile'] action['recurse'] = True action['updateonly'] = True rsync(action, acl=True)
def delete(action): log(utils.DEBUG1, action['source'], "DELETE action", eventid=action['eventid']) # Command selection if action['source'] == "L": cmd = (["ssh"] + config.ssh_options + [options.dsthost, "xargs", "-d", "'\n'", config.helperbin, "-a", "DELETE"]) else: cmd = ["xargs", "-d", "\n", config.helperbin, "-a", "DELETE"] # Forced? for i in range(options.force): cmd.append("-f") # Check if we can delete the required files (protected, todelete) = check_delete(action['filelist'], action['source']) # Calculate and append checksum tohash = utils.concat("DELETE", todelete) checksum = hashlib.md5(tohash).hexdigest() cmd = cmd + ["-c", checksum] # Execute and report if todelete: log(utils.DEBUG2, action['source'], "Preparing to delete: \n" + todelete, eventid=action['eventid']) execute(cmd, action['source'], todelete, eventid=action['eventid']) if protected: log(utils.INFO, action['source'], "Refusing to delete: \n" + protected, eventid=action['eventid']) action['method'] = "RSYNC" action['filelist'] = protected rsync(action)
def move(action): log(utils.DEBUG1, action['source'], "MOVE action", eventid=action['eventid']) # Command selection itemtype = action['itemtype'] if action['source'] == "L": cmd = (["ssh"] + config.ssh_options + [options.dsthost, "xargs", "-d", "'\n'", config.helperbin, "-a", "MOVE"]) else: cmd = ["xargs", "-d", "\n", config.helperbin, "-a", "MOVE"] # Forced? for i in range(options.force): cmd.append("-f") # Define source and target srcfile = action['filelist'] dstfile = action['dstfile'] # Calculate and append checksum tohash = utils.concat("MOVE", utils.concat(srcfile, dstfile)) checksum = hashlib.md5(tohash).hexdigest() cmd = cmd + ["-c", checksum] # Execute and report log(utils.DEBUG2, action['source'], "Preparing to move: \n" + srcfile + " -> " + dstfile, eventid=action['eventid']) (process, output, error) = execute(cmd, action['source'], utils.concat(srcfile, dstfile), warn=False, eventid=action['eventid']) if process.returncode: log(utils.INFO, action['source'], error, eventid=action['eventid']) log(utils.INFO, action['source'], "MOVE failed. Retrying with RSYNC", eventid=action['eventid']) # If recursive move is enabled or the move failed, # do a recursive check with rsync if config.move_event_recurse or process.returncode: action['method'] = "RSYNC" action['filelist'] = action['dstfile'] action['recurse'] = True action['updateonly'] = True rsync(action, acl=True)
def dosidebackup(changed, side): if not changed: return (None, None, None) rsync_args = ["-avAX"] filelist = "" for line in utils.deconcat(changed): if line[2] == "+": continue line = line[FLAGLEN:] filelist = utils.concat(filelist, line) if not filelist: return (None, None, None) cmd = (["rsync"] + options.extra + rsync_args + ["--files-from=-", side, backupdir]) (process, output, error) = execute(cmd, filelist) return (process, output, error)
def parse_output(output, strip=False, checksum=False): # Initial values count = 0 changed = "" alert = False # Count changed files for line in output.split("\n"): # If empty, ignore if len(line) <= 0: continue # If not transfered, ignore if line[0] != "<" and line[0] != ">": continue # If local checksum, ignore any matching files: if checksum and line[2] != "c": continue # If checksum or modified_only, ignore new files elif (options.checksum or options.modified_only) and line[3] == "+": continue # Lite checks ignore existing files with same size elif options.lite and line[3] != "s" and line[3] != "+": continue # If we arrived here, the line is interesting. # Count changed lines count = count+1 # If strip, grep the filename only if strip: line = line[FLAGLEN:] # Append the changed line changed = utils.concat(changed, line) # Alerts # For checksum, raise an alert for a non-matching file if checksum and line[2] == "c": alert = True # For full checks, raise an alert if size OR time # of an existing file changed. Otherwise, continue elif not options.lite and (line[3] == "s" or line[4] == "t"): alert = True # Return return (count, changed, alert)
def parse_output(output, strip=False, checksum=False): # Initial values count = 0 changed = "" alert = False # Count changed files for line in output.split("\n"): # If empty, ignore if len(line) <= 0: continue # If not transfered, ignore if line[0] != "<" and line[0] != ">": continue # If local checksum, ignore any matching files: if checksum and line[2] != "c": continue # If checksum or modified_only, ignore new files elif (options.checksum or options.modified_only) and line[3] == "+": continue # Lite checks ignore existing files with same size elif options.lite and line[3] != "s" and line[3] != "+": continue # If we arrived here, the line is interesting. # Count changed lines count = count + 1 # If strip, grep the filename only if strip: line = line[FLAGLEN:] # Append the changed line changed = utils.concat(changed, line) # Alerts # For checksum, raise an alert for a non-matching file if checksum and line[2] == "c": alert = True # For full checks, raise an alert if size OR time # of an existing file changed. Otherwise, continue elif not options.lite and (line[3] == "s" or line[4] == "t"): alert = True # Return return (count, changed, alert)
def delete(action): log(utils.DEBUG1, action['source'], "DELETE action", eventid=action['eventid']) # Command selection if action['source'] == "L": cmd = (["ssh"] + config.ssh_options + [ options.dsthost, "xargs", "-d", "'\n'", config.helperbin, "-a", "DELETE" ]) else: cmd = ["xargs", "-d", "\n", config.helperbin, "-a", "DELETE"] # Forced? for i in range(options.force): cmd.append("-f") # Check if we can delete the required files (protected, todelete) = check_delete(action['filelist'], action['source']) # Calculate and append checksum tohash = utils.concat("DELETE", todelete) checksum = hashlib.md5(tohash).hexdigest() cmd = cmd + ["-c", checksum] # Execute and report if todelete: log(utils.DEBUG2, action['source'], "Preparing to delete: \n" + todelete, eventid=action['eventid']) execute(cmd, action['source'], todelete, eventid=action['eventid']) if protected: log(utils.INFO, action['source'], "Refusing to delete: \n" + protected, eventid=action['eventid']) action['method'] = "RSYNC" action['filelist'] = protected rsync(action)
def reader(process, source="B"): rogue = 0 while True: # Select variables based on event source if source == "L": psyncdir = options.lpsyncdir else: psyncdir = options.rpsyncdir # Read line line = process.stdout.readline() line = line.strip(" \n") # If it is a log, print it match = re.match("^\[(.*?)\] \[(.*?):(.*?)\] \[(.*?)\]", line) if match: severity = match.group(3) line = line[len(match.group()) + 1:] # If it is an heartbeat-related log line, take note and continue if line.find(psyncdir + config.heartfile) >= 0: beat_inotify(source) log(utils.DEBUG3, source, line, 1) else: # Otherwise, simply print it log(severity, source, line, 1) continue # If HEART, take note and continue if line.find(psyncdir + config.heartfile) >= 0: beat_inotify(source) log(utils.DEBUG2, source, "heartbeat") continue # Check if connected if not are_ready(): if len(line) > 0: log(utils.ERROR, source, "Not connected, ignoring event: " + line) continue # Be sure to process a good formed line nfields = 6 match = re.match("^(RSYNC|MOVE|DELETE|NONE)", line, flags=re.I) if not match or line.count(config.separator) != nfields: log(utils.WARNING, source, "Rogue line (n." + str(rogue) + "): " + line) rogue = rogue + 1 if rogue >= 5: return else: continue else: rogue = 0 entry = utils.deconcat(line, config.separator) method = entry[0] itemtype = entry[1] parent = utils.normalize_dir(entry[2]) srcfile = entry[3] dstfile = entry[4] flags = entry[5] checksum = entry[6] # Validate checksum computed = line[:-len(config.separator + checksum)] computed = hashlib.md5(computed).hexdigest() if checksum != computed: log(utils.ERROR, source, "Ignoring event due to invalid checksum for line: " + line) log(utils.ERROR, source, "Received: " + checksum + " - Computed: " + computed) continue else: log( utils.DEBUG2, source, "Checksum ok. Received: " + checksum + " - Computed: " + computed) # Beat the heart beat_inotify(source) # If method is NONE, continue reading if method == "NONE": log(utils.INFO, source, "Ignoring event NONE for file: " + srcfile) continue # Parse event log(utils.DEBUG1, source, "Read event: " + line) # Pending checks if method in config.pending_events: backfired = check_pendings(source, srcfile, method) else: backfired = False if backfired: if method == "RSYNC" and config.rsync_style > 1: log( utils.DEBUG1, source, "Ignoring backfired event " + method + config.separator + srcfile) continue # If batched rsync is true, continue to the next event if config.rsync_style == 3: continue # Normalize dir if itemtype == "DIR": srcfile = utils.normalize_dir(srcfile) if method == "MOVE": dstfile = utils.normalize_dir(dstfile) # Build filelist try: prev = actions.pop() # Is mergeable? if ( # is not a symlink not os.path.islink(srcfile) and # source and method are same than previous source == prev['source'] and method == prev['method'] and (( # method is rsync and other options are the same prev['method'] == "RSYNC" and prev['backfired'] == backfired and prev['flags'] == flags) or ( # method is delete prev['method'] == "DELETE"))): filelist = utils.concat(prev['filelist'], srcfile) state['current_merges'] = state['current_merges'] + 1 else: state['current_merges'] = 0 filelist = srcfile actions.append(prev) except: state['current_merges'] = 0 filelist = srcfile log(utils.DEBUG1, source, "Current merges: " + str(state['current_merges'])) entry = { 'source': source, 'method': method, 'itemtype': itemtype, 'filelist': filelist, 'dstfile': dstfile, 'eventid': checksum[-5:], 'backfired': backfired, 'flags': flags, 'recurse': False, 'updateonly': False } actions.append(entry)
def reader(process, source="B"): rogue = 0 while True: # Select variables based on event source if source == "L": psyncdir = options.lpsyncdir else: psyncdir = options.rpsyncdir # Read line line = process.stdout.readline() line = line.strip(" \n") # If it is a log, print it match = re.match("^\[(.*?)\] \[(.*?):(.*?)\] \[(.*?)\]", line) if match: severity = match.group(3) line = line[len(match.group()) + 1:] # If it is an heartbeat-related log line, take note and continue if line.find(psyncdir + config.heartfile) >= 0: beat_inotify(source) log(utils.DEBUG3, source, line, 1) else: # Otherwise, simply print it log(severity, source, line, 1) continue # If HEART, take note and continue if line.find(psyncdir + config.heartfile) >= 0: beat_inotify(source) log(utils.DEBUG2, source, "heartbeat") continue # Check if connected if not are_ready(): if len(line) > 0: log(utils.ERROR, source, "Not connected, ignoring event: " + line) continue # Be sure to process a good formed line nfields = 6 match = re.match("^(RSYNC|MOVE|DELETE|NONE)", line, flags=re.I) if not match or line.count(config.separator) != nfields: log(utils.WARNING, source, "Rogue line (n." + str(rogue) + "): " + line) rogue = rogue + 1 if rogue >= 5: return else: continue else: rogue = 0 entry = utils.deconcat(line, config.separator) method = entry[0] itemtype = entry[1] parent = utils.normalize_dir(entry[2]) srcfile = entry[3] dstfile = entry[4] flags = entry[5] checksum = entry[6] # Validate checksum computed = line[:-len(config.separator + checksum)] computed = hashlib.md5(computed).hexdigest() if checksum != computed: log(utils.ERROR, source, "Ignoring event due to invalid checksum for line: " + line) log(utils.ERROR, source, "Received: " + checksum + " - Computed: " + computed) continue else: log(utils.DEBUG2, source, "Checksum ok. Received: " + checksum + " - Computed: " + computed) # Beat the heart beat_inotify(source) # If method is NONE, continue reading if method == "NONE": log(utils.INFO, source, "Ignoring event NONE for file: " + srcfile) continue # Parse event log(utils.DEBUG1, source, "Read event: " + line) # Pending checks if method in config.pending_events: backfired = check_pendings(source, srcfile, method) else: backfired = False if backfired: if method == "RSYNC" and config.rsync_style > 1: log(utils.DEBUG1, source, "Ignoring backfired event "+method+ config.separator + srcfile) continue # If batched rsync is true, continue to the next event if config.rsync_style == 3: continue # Normalize dir if itemtype == "DIR": srcfile = utils.normalize_dir(srcfile) if method == "MOVE": dstfile = utils.normalize_dir(dstfile) # Build filelist try: prev = actions.pop() # Is mergeable? if ( # is not a symlink not os.path.islink(srcfile) and # source and method are same than previous source == prev['source'] and method == prev['method'] and ( ( # method is rsync and other options are the same prev['method'] == "RSYNC" and prev['backfired'] == backfired and prev['flags'] == flags ) or ( # method is delete prev['method'] == "DELETE" ) ) ): filelist = utils.concat(prev['filelist'], srcfile) state['current_merges'] = state['current_merges'] + 1 else: state['current_merges'] = 0 filelist = srcfile actions.append(prev) except: state['current_merges'] = 0 filelist = srcfile log(utils.DEBUG1, source, "Current merges: " + str(state['current_merges'])) entry = {'source': source, 'method': method, 'itemtype': itemtype, 'filelist': filelist, 'dstfile': dstfile, 'eventid': checksum[-5:], 'backfired': backfired, 'flags': flags, 'recurse': False, 'updateonly': False} actions.append(entry)