def test_logpath_last_chunk(): utils.cleanup(PATH) with open(PATH, "w") as filp: filp.write("lost last chunk") logpath = LogPath(PATH) os.remove(PATH) with open(PATH, "w") as filp: filp.write("Hello Again\n") assert logpath.readline() == "Hello Again" logpath.commit() assert logpath.offsetfile.offset == 12
def test_logpath_existing(): utils.cleanup(PATH) with open(PATH, "w") as filp: print("Hello World!", file=filp) logpath = LogPath(PATH) while logpath.readline(): pass logpath.commit() logpath = LogPath(PATH) assert logpath.offsetfile.offset == 13
def test_logpath_basic(): utils.cleanup(PATH) with open(PATH, "w") as filp: print("Hello World ", file=filp) print("", file=filp) logpath = LogPath(PATH) assert logpath.readline() == "Hello World" assert logpath.readline() is None logpath.commit() offsetfile = OffsetFile.read(PATH) assert offsetfile.offset == 16 logpath.close()
def test_logpath_lost_rotation(): utils.cleanup(PATH) with open(PATH, "w") as filp: print("Hello World", file=filp) logpath = LogPath(PATH) while logpath.readline(): pass logpath.commit() os.remove(PATH) with open(PATH, "w") as filp: print("Hello Again", file=filp) logpath = LogPath(PATH) assert logpath.offsetfile.offset == 0
def test_logpath_empty_rotation(): utils.cleanup(PATH) with open(PATH, "w") as filp: print("line1", file=filp) logpath = LogPath(PATH) assert logpath.readline() == "line1" logpath.commit() os.remove(PATH) with open(PATH, "w") as filp: print("line2", file=filp) assert logpath.readline() == "line2"
def test_logpath_rotation(): utils.cleanup(PATH) with open(PATH, "w") as filp: print("line1", file=filp) logpath = LogPath(PATH) assert logpath.readline() == "line1" logpath.commit() with open(PATH, "a") as filp: print("line2", file=filp) filp.write("partial line to be lost") os.remove(PATH) with open(PATH, "w") as filp: print("line3", file=filp) assert logpath.readline() == "line2" assert logpath.readline() == "line3"
def main(args: dict): """ The main entry point for the child subprocess that is run for a given log file """ client = docker.from_env() info = client.api.inspect_container(args.id) run_once = getattr(args, "run_once", False) batch_size = getattr(args, "batch_size") if not batch_size: batch_size = DEFAULT_BATCH_SIZE done = False name = info["Name"] if len(name) > 128: name = name[0:128] logging.basicConfig( format="%(levelname)s:" + name + ":%(message)s", level=args.debug and logging.DEBUG or logging.INFO, ) path = info["LogPath"] logging.info("%s started: %s", args.id, path) if args.reset: offset_file_path = info["LogPath"] + OffsetFile.DEFAULT_SUFFIX if os.path.exists(offset_file_path): logging.warning("Removing offsetfile: %s", offset_file_path) os.remove(offset_file_path) engine = sqlalchemy.create_engine(settings.DATABASE_URL, poolclass=NullPool) models.Session.configure(bind=engine) session = models.Session() utils.containers_chown(path) log_path = LogPath(path) while not done: lines = [] while True: line = None try: line = log_path.readline() except FileNotFoundError: logging.info("FileNotFound %s, container likely removed", path) done = True if not line: break # assumes json formatting data = transform(info["Name"], json.loads(line)) if not data: continue lines.append(data) logging.debug("LINE: %s", str(data)) if len(lines) >= batch_size: break if not lines: if run_once: return log_path # pragma: no coverage logging.debug("sleep") time.sleep(args.interval) continue logs = [] for data in lines: log = models.Log(container_id=args.id, container_name=name, json=data) session.add(log) logs.append(log) session.commit() if args.debug: for log in logs: # careful, this can hang during same-process tests session.refresh(log) logging.debug("LOG: %s", log.as_dict()) log_path.commit() if run_once: return log_path return 0