def dequeue(): while True: beat("dequeue") try: action = actions.popleft() except: time.sleep(1) continue # Print queue length log(utils.DEBUG1, "B", "Actions queue length: " + str(len(actions) + 1), eventid=action['eventid']) # Refresh pendings log(utils.DEBUG1, action['source'], "Dequeue, using method " + action['method'], eventid=action['eventid']) log(utils.DEBUG3, action['source'], "LV2 action: " + str(action), eventid=action['eventid']) if are_ready(): # Select appropriate command if action['method'] == "RSYNC": rsync(action, acl=True) if action['method'] == "DELETE": # If full sync is running and locks are enabled, # skip DELETE events if config.full_sync_lock: if locks['global'].acquire(False): locks['global'].release() delete(action) elif not full_syncher.is_alive(): delete(action) else: for filename in utils.deconcat(action['filelist']): log(utils.INFO, action['source'], "FULL SYNC in progress. Skipping DELETE for " + filename, eventid=action['eventid']) else: delete(action) if action['method'] == "MOVE": move(action) else: for filename in utils.deconcat(action['filelist']): log(utils.ERROR, action['source'], "Not connected, removing from queue event " + action['method'] + config.separator + filename, eventid=action['eventid'])
def dequeue(): while True: beat("dequeue") try: action = actions.popleft() except: time.sleep(1) continue # Print queue length log(utils.DEBUG1, "B", "Actions queue length: "+ str(len(actions)+1), eventid=action['eventid']) # Refresh pendings log(utils.DEBUG1, action['source'], "Dequeue, using method " + action['method'], eventid=action['eventid']) log(utils.DEBUG3, action['source'], "LV2 action: " + str(action), eventid=action['eventid']) if are_ready(): # Select appropriate command if action['method'] == "RSYNC": rsync(action, acl=True) if action['method'] == "DELETE": # If full sync is running and locks are enabled, # skip DELETE events if config.full_sync_lock: if locks['global'].acquire(False): locks['global'].release() delete(action) elif not full_syncher.is_alive(): delete(action) else: for filename in utils.deconcat(action['filelist']): log(utils.INFO, action['source'], "FULL SYNC in progress. Skipping DELETE for " + filename, eventid=action['eventid']) else: delete(action) if action['method'] == "MOVE": move(action) else: for filename in utils.deconcat(action['filelist']): log(utils.ERROR, action['source'], "Not connected, removing from queue event " + action['method'] + config.separator + filename, eventid=action['eventid'])
def rsync(action, recurse=config.rsync_event_recurse, acl=False, warn=True, updateonly=False): log(utils.DEBUG1, action['source'], "RSYNC action", eventid=action['eventid']) # Options selection rsync_options = [] if action['backfired']: rsync_options.append("--existing") if recurse or action['recurse']: rsync_options.append("-r") if acl and (action['source'] == "L" or not config.acl_from_left_only): rsync_options.append("-AX") if action['flags'] == utils.FFORCE and config.maxsize: rsync_options.append(config.maxsize) if action['updateonly'] or updateonly: rsync_options.append("-u") # Command selection if action['source'] == "L": left = options.srcroot right = options.dsthost + ":" + options.dstroot filelist = action['filelist'].replace(left, "") else: left = options.dsthost + ":" + options.dstroot right = options.srcroot filelist = action['filelist'].replace(right, "") # Filelist mangling to remove duplicate if len(filelist) == 0: filelist = "/" else: fileset = set(utils.deconcat(filelist)) filelist = "\n".join(fileset) # Generating exclude list excludelist = utils.gen_exclude(options.rsync_excludes) # Execute and report log(utils.DEBUG2, action['source'], "Preparing to sync: \n" + filelist, eventid=action['eventid']) cmd = (["rsync", "-ai"] + options.rsync_extra + rsync_options + ["--files-from=-"] + excludelist + [left, right]) (process, output, error) = execute(cmd, action['source'], filelist, warn=warn, eventid=action['eventid']) if process.returncode == utils.RSYNC_TERMINATED: log(utils.INFO, action['source'], "Rescheduling files: \n" + filelist, eventid=action['eventid']) actions.append(action)
def translate(line): original = line frompath, topath = utils.deconcat(options.translate, config.separator) if topath == "None": topath = "" if line.find(frompath) >= 0: translated = True line = line.replace(frompath, topath) log(utils.DEBUG2, "Translate: " + original + " -> " + line) else: translated = False return translated, original, line
def safeline(line): # Check for bad formed line if line.count(config.separator) != 4: log(utils.WARNING, "Strange line (type S1): " + line) return False # Check for sane path/file names event, dirname, filename, dstfile, end = utils.deconcat( line, config.separator, False) if not sanitize_path(dirname) or not sanitize_path(filename): log(utils.WARNING, "Strange line (type S2): " + line) return False # If all it's ok, return success return True
def safeline(line): # Check for bad formed line if line.count(config.separator) != 4: log(utils.WARNING, "Strange line (type S1): "+line) return False # Check for sane path/file names event, dirname, filename, dstfile, end = utils.deconcat(line, config.separator, False) if not sanitize_path(dirname) or not sanitize_path(filename): log(utils.WARNING, "Strange line (type S2): "+line) return False # If all it's ok, return success return True
def check_delete(filelist, source): filelist = utils.deconcat(filelist) if source == "L": root = options.dstroot else: root = options.srcroot protected = "" todelete = "" for filename in filelist: # Be extra careful on what we delete if len(filename) <= len(root): protected = utils.concat(protected, filename) else: todelete = utils.concat(todelete, filename) return (protected, todelete)
def dosidebackup(changed, side): if not changed: return (None, None, None) rsync_args = ["-avAX"] filelist = "" for line in utils.deconcat(changed): if line[2] == "+": continue line = line[FLAGLEN:] filelist = utils.concat(filelist, line) if not filelist: return (None, None, None) cmd = (["rsync"] + options.extra + rsync_args + ["--files-from=-", side, backupdir]) (process, output, error) = execute(cmd, filelist) return (process, output, error)
def reader(process, source="B"): rogue = 0 while True: # Select variables based on event source if source == "L": psyncdir = options.lpsyncdir else: psyncdir = options.rpsyncdir # Read line line = process.stdout.readline() line = line.strip(" \n") # If it is a log, print it match = re.match("^\[(.*?)\] \[(.*?):(.*?)\] \[(.*?)\]", line) if match: severity = match.group(3) line = line[len(match.group()) + 1:] # If it is an heartbeat-related log line, take note and continue if line.find(psyncdir + config.heartfile) >= 0: beat_inotify(source) log(utils.DEBUG3, source, line, 1) else: # Otherwise, simply print it log(severity, source, line, 1) continue # If HEART, take note and continue if line.find(psyncdir + config.heartfile) >= 0: beat_inotify(source) log(utils.DEBUG2, source, "heartbeat") continue # Check if connected if not are_ready(): if len(line) > 0: log(utils.ERROR, source, "Not connected, ignoring event: " + line) continue # Be sure to process a good formed line nfields = 6 match = re.match("^(RSYNC|MOVE|DELETE|NONE)", line, flags=re.I) if not match or line.count(config.separator) != nfields: log(utils.WARNING, source, "Rogue line (n." + str(rogue) + "): " + line) rogue = rogue + 1 if rogue >= 5: return else: continue else: rogue = 0 entry = utils.deconcat(line, config.separator) method = entry[0] itemtype = entry[1] parent = utils.normalize_dir(entry[2]) srcfile = entry[3] dstfile = entry[4] flags = entry[5] checksum = entry[6] # Validate checksum computed = line[:-len(config.separator + checksum)] computed = hashlib.md5(computed).hexdigest() if checksum != computed: log(utils.ERROR, source, "Ignoring event due to invalid checksum for line: " + line) log(utils.ERROR, source, "Received: " + checksum + " - Computed: " + computed) continue else: log( utils.DEBUG2, source, "Checksum ok. Received: " + checksum + " - Computed: " + computed) # Beat the heart beat_inotify(source) # If method is NONE, continue reading if method == "NONE": log(utils.INFO, source, "Ignoring event NONE for file: " + srcfile) continue # Parse event log(utils.DEBUG1, source, "Read event: " + line) # Pending checks if method in config.pending_events: backfired = check_pendings(source, srcfile, method) else: backfired = False if backfired: if method == "RSYNC" and config.rsync_style > 1: log( utils.DEBUG1, source, "Ignoring backfired event " + method + config.separator + srcfile) continue # If batched rsync is true, continue to the next event if config.rsync_style == 3: continue # Normalize dir if itemtype == "DIR": srcfile = utils.normalize_dir(srcfile) if method == "MOVE": dstfile = utils.normalize_dir(dstfile) # Build filelist try: prev = actions.pop() # Is mergeable? if ( # is not a symlink not os.path.islink(srcfile) and # source and method are same than previous source == prev['source'] and method == prev['method'] and (( # method is rsync and other options are the same prev['method'] == "RSYNC" and prev['backfired'] == backfired and prev['flags'] == flags) or ( # method is delete prev['method'] == "DELETE"))): filelist = utils.concat(prev['filelist'], srcfile) state['current_merges'] = state['current_merges'] + 1 else: state['current_merges'] = 0 filelist = srcfile actions.append(prev) except: state['current_merges'] = 0 filelist = srcfile log(utils.DEBUG1, source, "Current merges: " + str(state['current_merges'])) entry = { 'source': source, 'method': method, 'itemtype': itemtype, 'filelist': filelist, 'dstfile': dstfile, 'eventid': checksum[-5:], 'backfired': backfired, 'flags': flags, 'recurse': False, 'updateonly': False } actions.append(entry)
def parse_line(line): line = line.rstrip("\n") # Check if it's an inotify logline if inotifylog(line): return # Check for safety if not safeline(line): return log(utils.DEBUG2, "Raw EVENT: "+line) # Translate and re-check for safety if options.translate: translated, original, line = translate(line) if not safeline(line): return else: translated = False # If safe, go ahead event, dirname, filename, dstfile, end = utils.deconcat(line, config.separator, False) # Item identification dirname = utils.normalize_dir(dirname) if event.find(",ISDIR") >= 0: itemtype = "DIR" filename = utils.normalize_dir(filename) dstfile = utils.normalize_dir(dstfile) else: itemtype = "FILE" event = utils.deconcat(event, ",")[0] # Flags - by default, they are empty flags = utils.FNORMAL # Select sync method and skip unwanted events # On directories, CREATE is skipped to avoid backfire from rsync # On files, CREATE is skipped because we want to sync only # closed/CLOSE_WRITE (ie: complete) files. # To expand: when files are CREATED but not CLOSED, the mtime # attribute can be 'wrong' (ie: newer) then what it should be # Example: a file which need 60 seconds to be uploaded, will have # a constantly-changing mtime until the upload complete, when the mtime # will be rolled back to the original value. # This behavior is application dependent, but we can't risk: a wrong # mtime can led to wrong replication direction and truncated file. if event == "CREATE": log(utils.DEBUG2, "Skipping uninteresting event for "+filename) return if event.find("SELF") >= 0: log(utils.DEBUG2, "Skipping uninteresting event for "+filename) return # Method selection if event == "ATTRIB" or event == "CLOSE_WRITE" or event == "MODIFY": method = "RSYNC" # MOVE handling elif event == "MOVED_FROM" or event == "MOVED_TO": return elif event == "MOVE": method = "MOVE" # DELETE and undefined method elif event == "DELETE": method = "DELETE" else: log(utils.DEBUG2, "Skipping uninteresting event for "+filename) return # If event if for tempfile, ignore it if re.search(options.tempfiles, dstfile, re.I): log(utils.DEBUG2, "Skipping event for tempfile "+dstfile) return else: # If source was a tempfile but destination is a normal file, use RSYNC if re.search(options.tempfiles, filename, re.I): method = "RSYNC" filename = dstfile flags = utils.FFORCE log(utils.DEBUG2, "Changing method from MOVE to RSYNC " + "for tempfile " + filename) # If event is from/to excluded files, ignore it if (re.search(options.excludes, filename.rstrip("/"), re.I) or re.search(options.excludes, dstfile.rstrip("/"), re.I)): log(utils.DEBUG2, "Skipping event for excluded path "+filename) return # Be EXTRA CAREFUL to skip the safesuffix if (re.search(config.safesuffix, filename.rstrip("/"), re.I) or re.search(config.safesuffix, dstfile.rstrip("/"), re.I)): log(utils.DEBUG2, "Skipping event for excluded path "+filename) return # If it was a translated line, only allow RSYNC method if translated and not method == "RSYNC": log(utils.DEBUG2, "Skipping non-rsync method for translated line") return # Construct action entry = {'method':method, 'itemtype':itemtype, 'dir':dirname, 'file':filename, 'dstfile':dstfile, 'timestamp':time.time(), 'flags':flags} # Rsync checks if method == "RSYNC": if not rsync_early_checks(entry): return # Move checks if method == "MOVE": if not move_early_checks(entry): return # Coalesce and append actions try: prev = actions.pop() except: prev = False if prev: if (method == "RSYNC" and prev['method'] == "DELETE" and filename == prev['file']): pass else: actions.append(prev) actions.append(entry)
def parse_line(line): line = line.rstrip("\n") # Check if it's an inotify logline if inotifylog(line): return # Check for safety if not safeline(line): return log(utils.DEBUG2, "Raw EVENT: " + line) # Translate and re-check for safety if options.translate: translated, original, line = translate(line) if not safeline(line): return else: translated = False # If safe, go ahead event, dirname, filename, dstfile, end = utils.deconcat( line, config.separator, False) # Item identification dirname = utils.normalize_dir(dirname) if event.find(",ISDIR") >= 0: itemtype = "DIR" filename = utils.normalize_dir(filename) dstfile = utils.normalize_dir(dstfile) else: itemtype = "FILE" event = utils.deconcat(event, ",")[0] # Flags - by default, they are empty flags = utils.FNORMAL # Select sync method and skip unwanted events # On directories, CREATE is skipped to avoid backfire from rsync # On files, CREATE is skipped because we want to sync only # closed/CLOSE_WRITE (ie: complete) files. # To expand: when files are CREATED but not CLOSED, the mtime # attribute can be 'wrong' (ie: newer) then what it should be # Example: a file which need 60 seconds to be uploaded, will have # a constantly-changing mtime until the upload complete, when the mtime # will be rolled back to the original value. # This behavior is application dependent, but we can't risk: a wrong # mtime can led to wrong replication direction and truncated file. if event == "CREATE": log(utils.DEBUG2, "Skipping uninteresting event for " + filename) return if event.find("SELF") >= 0: log(utils.DEBUG2, "Skipping uninteresting event for " + filename) return # Method selection if event == "ATTRIB" or event == "CLOSE_WRITE" or event == "MODIFY": method = "RSYNC" # MOVE handling elif event == "MOVED_FROM" or event == "MOVED_TO": return elif event == "MOVE": method = "MOVE" # DELETE and undefined method elif event == "DELETE": method = "DELETE" else: log(utils.DEBUG2, "Skipping uninteresting event for " + filename) return # If event if for tempfile, ignore it if re.search(options.tempfiles, dstfile, re.I): log(utils.DEBUG2, "Skipping event for tempfile " + dstfile) return else: # If source was a tempfile but destination is a normal file, use RSYNC if re.search(options.tempfiles, filename, re.I): method = "RSYNC" filename = dstfile flags = utils.FFORCE log( utils.DEBUG2, "Changing method from MOVE to RSYNC " + "for tempfile " + filename) # If event is from/to excluded files, ignore it if (re.search(options.excludes, filename.rstrip("/"), re.I) or re.search(options.excludes, dstfile.rstrip("/"), re.I)): log(utils.DEBUG2, "Skipping event for excluded path " + filename) return # Be EXTRA CAREFUL to skip the safesuffix if (re.search(config.safesuffix, filename.rstrip("/"), re.I) or re.search(config.safesuffix, dstfile.rstrip("/"), re.I)): log(utils.DEBUG2, "Skipping event for excluded path " + filename) return # If it was a translated line, only allow RSYNC method if translated and not method == "RSYNC": log(utils.DEBUG2, "Skipping non-rsync method for translated line") return # Construct action entry = { 'method': method, 'itemtype': itemtype, 'dir': dirname, 'file': filename, 'dstfile': dstfile, 'timestamp': time.time(), 'flags': flags } # Rsync checks if method == "RSYNC": if not rsync_early_checks(entry): return # Move checks if method == "MOVE": if not move_early_checks(entry): return # Coalesce and append actions try: prev = actions.pop() except: prev = False if prev: if (method == "RSYNC" and prev['method'] == "DELETE" and filename == prev['file']): pass else: actions.append(prev) actions.append(entry)
def reader(process, source="B"): rogue = 0 while True: # Select variables based on event source if source == "L": psyncdir = options.lpsyncdir else: psyncdir = options.rpsyncdir # Read line line = process.stdout.readline() line = line.strip(" \n") # If it is a log, print it match = re.match("^\[(.*?)\] \[(.*?):(.*?)\] \[(.*?)\]", line) if match: severity = match.group(3) line = line[len(match.group()) + 1:] # If it is an heartbeat-related log line, take note and continue if line.find(psyncdir + config.heartfile) >= 0: beat_inotify(source) log(utils.DEBUG3, source, line, 1) else: # Otherwise, simply print it log(severity, source, line, 1) continue # If HEART, take note and continue if line.find(psyncdir + config.heartfile) >= 0: beat_inotify(source) log(utils.DEBUG2, source, "heartbeat") continue # Check if connected if not are_ready(): if len(line) > 0: log(utils.ERROR, source, "Not connected, ignoring event: " + line) continue # Be sure to process a good formed line nfields = 6 match = re.match("^(RSYNC|MOVE|DELETE|NONE)", line, flags=re.I) if not match or line.count(config.separator) != nfields: log(utils.WARNING, source, "Rogue line (n." + str(rogue) + "): " + line) rogue = rogue + 1 if rogue >= 5: return else: continue else: rogue = 0 entry = utils.deconcat(line, config.separator) method = entry[0] itemtype = entry[1] parent = utils.normalize_dir(entry[2]) srcfile = entry[3] dstfile = entry[4] flags = entry[5] checksum = entry[6] # Validate checksum computed = line[:-len(config.separator + checksum)] computed = hashlib.md5(computed).hexdigest() if checksum != computed: log(utils.ERROR, source, "Ignoring event due to invalid checksum for line: " + line) log(utils.ERROR, source, "Received: " + checksum + " - Computed: " + computed) continue else: log(utils.DEBUG2, source, "Checksum ok. Received: " + checksum + " - Computed: " + computed) # Beat the heart beat_inotify(source) # If method is NONE, continue reading if method == "NONE": log(utils.INFO, source, "Ignoring event NONE for file: " + srcfile) continue # Parse event log(utils.DEBUG1, source, "Read event: " + line) # Pending checks if method in config.pending_events: backfired = check_pendings(source, srcfile, method) else: backfired = False if backfired: if method == "RSYNC" and config.rsync_style > 1: log(utils.DEBUG1, source, "Ignoring backfired event "+method+ config.separator + srcfile) continue # If batched rsync is true, continue to the next event if config.rsync_style == 3: continue # Normalize dir if itemtype == "DIR": srcfile = utils.normalize_dir(srcfile) if method == "MOVE": dstfile = utils.normalize_dir(dstfile) # Build filelist try: prev = actions.pop() # Is mergeable? if ( # is not a symlink not os.path.islink(srcfile) and # source and method are same than previous source == prev['source'] and method == prev['method'] and ( ( # method is rsync and other options are the same prev['method'] == "RSYNC" and prev['backfired'] == backfired and prev['flags'] == flags ) or ( # method is delete prev['method'] == "DELETE" ) ) ): filelist = utils.concat(prev['filelist'], srcfile) state['current_merges'] = state['current_merges'] + 1 else: state['current_merges'] = 0 filelist = srcfile actions.append(prev) except: state['current_merges'] = 0 filelist = srcfile log(utils.DEBUG1, source, "Current merges: " + str(state['current_merges'])) entry = {'source': source, 'method': method, 'itemtype': itemtype, 'filelist': filelist, 'dstfile': dstfile, 'eventid': checksum[-5:], 'backfired': backfired, 'flags': flags, 'recurse': False, 'updateonly': False} actions.append(entry)