def get_task_content(task: Task): serialized_data = task.get_ds() if not serialized_data: # ansible 2.8 serialized_data = task.dump_attrs() if not serialized_data: logger.error( "unable to obtain task content from ansible: caching will not work" ) return c = json.dumps(serialized_data, sort_keys=True).encode("utf-8") logger.debug("content = %s", c) m = hashlib.sha512(c) return m.hexdigest()
def get_task_content(task: Task): sha512 = hashlib.sha512() serialized_data = task.get_ds() if not serialized_data: # ansible 2.8 serialized_data = task.dump_attrs() if not serialized_data: logger.error( "unable to obtain task content from ansible: caching will not work" ) return c = json.dumps(serialized_data, sort_keys=True) logger.debug("content = %s", c) sha512.update(c.encode("utf-8")) # If task is a file action, cache the src. # # Take the file stats of the src (if directory, get the stats # of every file within) and concatinate it with the task config # (assigned under serialized_data) # # The idea is that, if a file is changed, so will its modification time, # which will force the layer to be reloaded. Otherwise, load from cache. # # Note: serialized_data was grabbed above. task_config = task.dump_attrs() if (('args' in task_config) and ('src' in task_config['args'])): src = task_config['args']['src'] src_path = os.path.join(task.get_search_path()[0], "files", src) if (not (os.path.exists(src_path))): src_path = os.path.join(task.get_search_path()[0], src) if os.path.isdir(src_path): dir_hash = CallbackModule.get_dir_fingerprint(src_path) sha512.update(dir_hash.encode("utf-8")) elif os.path.isfile(src_path): the_file = pathlib.Path(src_path) date_modified = str(the_file.stat().st_mtime) sha512.update(date_modified.encode("utf-8")) return sha512.hexdigest()