def test_open_exclusive(): fpath = os.path.join(tempfile.mkdtemp(), "yeet.exclusive") with open(fpath, "wb") as fp: fp.write("42421337Test") with pytest.raises(OSError): open_exclusive(fpath, bufsize=1)
def init(self): self.logpath = os.path.join(self.handler.storagepath, "analysis.log") try: self.fd = open_exclusive(self.logpath, bufsize=1) except OSError: log.debug("Task #%s: attempted to reopen live log analysis.log.", self.task_id) return log.debug("Task #%s: live log analysis.log initialized.", self.task_id)
def handle(self): # Read until newline for file path, e.g., # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin self.handler.sock.settimeout(30) if self.header is None: # Backwards compatibility, version 1 self.header = { "store_as": self.handler.read_newline(), } elif self.header == 2: # Backwards compatibility, version 2 self.header = { "store_as": self.handler.read_newline(), "path": self.handler.read_newline(), "pids": map(int, self.handler.read_newline().split(",")), } else: self.response_id = self.header.get("rid") dump_path = self.header.get("store_as") if not dump_path: raise CuckooOperationalError( "No dump path specified for file in task #%s" % self.task_id) dump_path = netlog_sanitize_fname(dump_path) path = self.header.get("path") pids = self.header.get("pids", []) log.debug("Task #%s: File upload for %r", self.task_id, dump_path) file_path = os.path.join(self.storagepath, dump_path.decode("utf-8")) try: self.fd = open_exclusive(file_path) except OSError as e: if e.errno == errno.EEXIST: raise CuckooOperationalError("Analyzer for task #%s tried to " "overwrite an existing file" % self.task_id) raise # Append-writes are atomic with open(self.filelog, "a+b") as f: print(json.dumps({ "path": dump_path, "filepath": path, "pids": pids, }), file=f) self.handler.sock.settimeout(None) try: return self.handler.copy_to_fd(self.fd, self.upload_max_size) finally: log.debug("Task #%s uploaded file length: %s", self.task_id, self.fd.tell())
def init(self): stream_type = self.handler.read_newline() if stream_type == 'XNUMON': file_name = "logs/logs.xnumon" elif stream_type == 'DTRACE': file_name = "logs/logs.dtrace" self.logpath = os.path.join(self.handler.storagepath, file_name) log.debug("Agent is streaming JSON data. Storing them to log.%s", stream_type.lower()) try: self.fd = open_exclusive(self.logpath, "wb") except: log.error("Task %s attempted to open the log file more than once", self.task_id) log.debug("Task #%s, live stream initalized", self.task_id)
def handle(self): # Read until newline for file path, e.g., # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin self.handler.sock.settimeout(30) dump_path = netlog_sanitize_fname(self.handler.read_newline()) if self.version and self.version >= 2: # NB: filepath is only used as metadata filepath = self.handler.read_newline() pids = map(int, self.handler.read_newline().split()) else: filepath, pids = None, [] log.debug("Task #%s: File upload for %r", self.task_id, dump_path) file_path = os.path.join(self.storagepath, dump_path.decode("utf-8")) try: self.fd = open_exclusive(file_path) except OSError as e: if e.errno == errno.EEXIST: raise CuckooOperationalError("Analyzer for task #%s tried to " "overwrite an existing file" % self.task_id) raise # Append-writes are atomic with open(self.filelog, "a+b") as f: print( json.dumps({ "path": dump_path, "filepath": filepath, "pids": pids }), file=f, ) self.handler.sock.settimeout(None) try: return self.handler.copy_to_fd(self.fd, self.upload_max_size) finally: log.debug("Task #%s uploaded file length: %s", self.task_id, self.fd.tell())