def __init__(self, dsn=None): """@param dsn: database connection string.""" cfg = Config() if dsn: self.engine = create_engine(dsn, poolclass=NullPool) elif cfg.database.connection: self.engine = create_engine(cfg.database.connection, poolclass=NullPool) else: db_file = os.path.join(CUCKOO_ROOT, "db", "cuckoo.db") if not os.path.exists(db_file): db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): try: create_folder(folder=db_dir) except CuckooOperationalError as e: raise CuckooDatabaseError("Unable to create database directory: {0}".format(e)) self.engine = create_engine("sqlite:///{0}".format(db_file), poolclass=NullPool) # Disable SQL logging. Turn it on for debugging. self.engine.echo = False # Connection timeout. if cfg.database.timeout: self.engine.pool_timeout = cfg.database.timeout else: self.engine.pool_timeout = 60 # Create schema. try: Base.metadata.create_all(self.engine) except SQLAlchemyError as e: raise CuckooDatabaseError("Unable to create or connect to database: {0}".format(e)) # Get db session. self.Session = sessionmaker(bind=self.engine)
def read_next_message(self): # read until newline for file path # e.g. shots/0001.jpg or files/9498687557/libcurl-4.dll.bin buf = self.handler.read_newline().strip().replace("\\", "/") log.debug("File upload request for {0}".format(buf)) if "../" in buf: raise CuckooOperationalError("FileUpload failure, banned path.") dir_part, filename = os.path.split(buf) if dir_part: try: create_folder(self.storagepath, dir_part) except CuckooOperationalError: log.error("Unable to create folder %s" % dir_part) return False file_path = os.path.join(self.storagepath, buf.strip()) fd = open(file_path, "wb") chunk = self.handler.read_any() while chunk: fd.write(chunk) if fd.tell() >= self.upload_max_size: fd.write("... (truncated)") break chunk = self.handler.read_any() log.debug("Uploaded file length: {0}".format(fd.tell())) fd.close()
def read_next_message(self): # read until newline for file path # e.g. shots/0001.jpg or files/9498687557/libcurl-4.dll.bin buf = self.handler.read_newline().strip().replace("\\", "/") log.debug("File upload request for {0}".format(buf)) if "../" in buf: raise CuckooOperationalError("FileUpload failure, banned path.") dir_part, filename = os.path.split(buf) if dir_part: try: create_folder(self.storagepath, dir_part) except CuckooOperationalError: log.error("Unable to create folder %s" % folder) return False file_path = os.path.join(self.storagepath, buf.strip()) fd = open(file_path, "wb") chunk = self.handler.read_any() while chunk: fd.write(chunk) if fd.tell() >= self.upload_max_size: fd.write("... (truncated)") break chunk = self.handler.read_any() log.debug("Uploaded file length: {0}".format(fd.tell())) fd.close()
def __init__(self, dsn=None, schema_check=True): """@param dsn: database connection string. @param schema_check: disable or enable the db schema version check """ cfg = Config() if dsn: self._connect_database(dsn) elif cfg.database.connection: self._connect_database(cfg.database.connection) else: db_file = os.path.join(CUCKOO_ROOT, "db", "cuckoo.db") if not os.path.exists(db_file): db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): try: create_folder(folder=db_dir) except CuckooOperationalError as e: raise CuckooDatabaseError("Unable to create database directory: {0}".format(e)) self._connect_database("sqlite:///%s" % db_file) # Disable SQL logging. Turn it on for debugging. self.engine.echo = False # Connection timeout. if cfg.database.timeout: self.engine.pool_timeout = cfg.database.timeout else: self.engine.pool_timeout = 60 # Create schema. try: Base.metadata.create_all(self.engine) except SQLAlchemyError as e: raise CuckooDatabaseError("Unable to create or connect to database: {0}".format(e)) # Get db session. self.Session = sessionmaker(bind=self.engine) # Deal with schema versioning. # TODO: it's a little bit dirty, needs refactoring. tmp_session = self.Session() if not tmp_session.query(AlembicVersion).count(): # Set database schema version. tmp_session.add(AlembicVersion(version_num=SCHEMA_VERSION)) try: tmp_session.commit() except SQLAlchemyError as e: raise CuckooDatabaseError("Unable to set schema version: {0}".format(e)) tmp_session.rollback() finally: tmp_session.close() else: # Check if db version is the expected one. last = tmp_session.query(AlembicVersion).first() tmp_session.close() if last.version_num != SCHEMA_VERSION and schema_check: raise CuckooDatabaseError("DB schema version mismatch: found " "{0}, expected {1}. Try to apply all " "migrations.".format(last.version_num, SCHEMA_VERSION))
def read_next_message(self): # Read until newline for file path, e.g., # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin buf = self.handler.read_newline().strip().replace("\\", "/") guest_path = "" if self.is_binary: guest_path = self.handler.read_newline().strip()[:32768] log.debug("File upload request for {0}".format(buf)) dir_part, filename = os.path.split(buf) if "./" in buf or not dir_part or buf.startswith("/"): raise CuckooOperationalError("FileUpload failure, banned path.") for restricted in self.RESTRICTED_DIRECTORIES: if restricted in dir_part: raise CuckooOperationalError("FileUpload failure, banned path.") try: create_folder(self.storagepath, dir_part) except CuckooOperationalError: log.error("Unable to create folder %s" % dir_part) return False file_path = os.path.join(self.storagepath, buf.strip()) if not file_path.startswith(self.storagepath): raise CuckooOperationalError("FileUpload failure, path sanitization failed.") if guest_path != "": guest_paths = [] if os.path.exists(file_path + "_info.txt"): guest_paths = [line.strip() for line in open(file_path + "_info.txt")] if guest_path not in guest_paths: infofd = open(file_path + "_info.txt", "a") infofd.write(guest_path + "\n") infofd.close() if not self.duplicate: if os.path.exists(file_path): log.warning("Analyzer tried to overwrite an existing file, closing connection.") return False self.fd = open(file_path, "wb") chunk = self.handler.read_any() while chunk: self.fd.write(chunk) if self.fd.tell() >= self.upload_max_size: log.warning("Uploaded file length larger than upload_max_size, stopping upload.") self.fd.write("... (truncated)") break try: chunk = self.handler.read_any() except: break log.debug("Uploaded file length: {0}".format(self.fd.tell()))
def create_folders(self): folders = ("CAPE", "aux", "curtain", "files", "logs", "memory", "shots", "sysmon", "stap", "procdump", "debugger") for folder in folders: try: create_folder(self.storagepath, folder=folder) except Exception as e: log.error(e, exc_info=True)
def create_folders(self): folders = ('CAPE', 'aux', 'aux', 'curtain', 'files', 'logs', 'memory', 'shots', 'sysmon') for folder in folders: try: create_folder(self.storagepath, folder=folder) except Exception as e: print(e)
def store_file(self, sha256): """Store a copy of the file being analyzed.""" if not os.path.exists(self.task.target): log.error( "Task #%s: The file to analyze does not exist at path '%s', analysis aborted", self.task.id, convert_to_printable(self.task.target), ) return False self.binary = os.path.join(CUCKOO_ROOT, "storage", "binaries", str(self.task.id), sha256) copy_path = os.path.join(CUCKOO_ROOT, "storage", "binaries", sha256) if os.path.exists(self.binary): log.info("Task #%s: File already exists at '%s'", self.task.id, self.binary) else: # TODO: do we really need to abort the analysis in case we are not able to store a copy of the file? try: create_folder(folder=os.path.join( CUCKOO_ROOT, "storage", "binaries", str(self.task.id))) shutil.copy(self.task.target, self.binary) except (IOError, shutil.Error) as e: log.error( "Task #%s: Unable to store file from '%s' to '%s', analysis aborted", self.task.id, self.task.target, self.binary, ) return False if os.path.exists(copy_path): log.info("Task #%s: File already exists at '%s'", self.task.id, copy_path) else: # TODO: do we really need to abort the analysis in case we are not able to store a copy of the file? try: shutil.copy(self.task.target, copy_path) except (IOError, shutil.Error) as e: log.error( "Task #%s: Unable to store file from '%s' to '%s', analysis aborted", self.task.id, self.task.target, copy_path) return False try: new_binary_path = os.path.join(self.storage, "binary") if hasattr(os, "symlink"): os.symlink(self.binary, new_binary_path) else: shutil.copy(self.binary, new_binary_path) except (AttributeError, OSError) as e: log.error( "Task #%s: Unable to create symlink/copy from '%s' to '%s': %s", self.task.id, self.binary, self.storage, e) return True
def create_folders(self): folders = "shots", "files", "logs" for folder in folders: try: create_folder(self.storagepath, folder=folder) except CuckooOperationalError: log.error("Unable to create folder %s" % folder) return False
def create_folders(self): folders = ('CAPE', 'aux', 'curtain', 'files', 'logs', 'memory', 'shots', 'sysmon', 'stap', 'procdump', 'debugger') for folder in folders: try: create_folder(self.storagepath, folder=folder) except Exception as e: log.error(e, exc_info=True)
def __init__(self, dsn=None): """@param dsn: database connection string.""" cfg = Config() if dsn: self.engine = create_engine(dsn, poolclass=NullPool) elif cfg.database.connection: self.engine = create_engine(cfg.database.connection, poolclass=NullPool) else: db_file = os.path.join(CUCKOO_ROOT, "db", "cuckoo.db") if not os.path.exists(db_file): db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): try: create_folder(folder=db_dir) except CuckooOperationalError as e: raise CuckooDatabaseError( "Unable to create database directory: {0}".format( e)) self.engine = create_engine("sqlite:///{0}".format(db_file), poolclass=NullPool) # Disable SQL logging. Turn it on for debugging. self.engine.echo = False # Connection timeout. if cfg.database.timeout: self.engine.pool_timeout = cfg.database.timeout else: self.engine.pool_timeout = 60 # Create schema. try: Base.metadata.create_all(self.engine) except SQLAlchemyError as e: raise CuckooDatabaseError( "Unable to create or connect to database: {0}".format(e)) # Get db session. self.Session = sessionmaker(bind=self.engine) # Set database schema version. # TODO: it's a little bit dirty, needs refactoring. tmp_session = self.Session() if not tmp_session.query(AlembicVersion).count(): tmp_session.add(AlembicVersion(version_num=SCHEMA_VERSION)) try: tmp_session.commit() except SQLAlchemyError as e: raise CuckooDatabaseError( "Unable to set schema version: {0}".format(e)) tmp_session.rollback() finally: tmp_session.close() else: tmp_session.close()
def createDirBaseline(): result = False try: create_folder(root=BASELINE_ROOT, folder='baseline') result = True except: pass return result
def __iter__(self): # Read until newline for file path, e.g., # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin buf = self.handler.read_newline().strip().replace("\\", "/") log.debug("File upload request for %s", buf) dir_part, filename = os.path.split(buf) if "./" in buf or not dir_part or buf.startswith("/"): raise CuckooOperationalError("FileUpload failure, banned path.") for restricted in self.RESTRICTED_DIRECTORIES: if restricted in dir_part: raise CuckooOperationalError( "FileUpload failure, banned path.") try: create_folder(self.storagepath, dir_part) except CuckooOperationalError: log.error("Unable to create folder %s", dir_part) return file_path = os.path.join(self.storagepath, buf.strip()) if not file_path.startswith(self.storagepath): raise CuckooOperationalError( "FileUpload failure, path sanitization failed.") if os.path.exists(file_path): log.warning( "Analyzer tried to overwrite an existing file, closing connection." ) return self.fd = open(file_path, "wb") chunk = self.handler.read_any() while chunk: self.fd.write(chunk) if self.fd.tell() >= self.upload_max_size: log.warning( "Uploaded file length larger than upload_max_size, stopping upload." ) self.fd.write("... (truncated)") break try: chunk = self.handler.read_any() except: break log.debug("Uploaded file length: %s", self.fd.tell()) return yield
def set_path(self, analysis_path): """Set analysis folder path. @param analysis_path: analysis folder path. """ self.analysis_path = analysis_path self.conf_path = os.path.join(self.analysis_path, "analysis.conf") self.reports_path = os.path.join(self.analysis_path, "reports") try: create_folder(folder=self.reports_path) except CuckooOperationalError as e: CuckooReportError(e)
def init_storage(self): """Initialize analyses storage folder. @raise CuckooAnalysisError: if storage folder already exists.""" self.analysis.results_folder = os.path.join(os.path.join(CUCKOO_ROOT, "storage", "analyses"), str(self.task.id)) if os.path.exists(self.analysis.results_folder): raise CuckooAnalysisError("Analysis results folder already exists at path \"%s\", analysis aborted" % self.analysis.results_folder) try: create_folder(folder=self.analysis.results_folder) except CuckooOperationalError: raise CuckooAnalysisError("Unable to create analysis folder %s" % self.analysis.results_folder)
def __init__(self, dsn=None): """@param dsn: database connection string.""" cfg = Config() if dsn: self.engine = create_engine(dsn, poolclass=NullPool) elif cfg.database.connection: self.engine = create_engine(cfg.database.connection, poolclass=NullPool) else: db_file = os.path.join(CUCKOO_ROOT, "db", "cuckoo.db") if not os.path.exists(db_file): db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): try: create_folder(folder=db_dir) except CuckooOperationalError as e: raise CuckooDatabaseError("Unable to create database directory: {0}".format(e)) self.engine = create_engine("sqlite:///{0}".format(db_file), poolclass=NullPool) # Disable SQL logging. Turn it on for debugging. self.engine.echo = False # Connection timeout. if cfg.database.timeout: self.engine.pool_timeout = cfg.database.timeout else: self.engine.pool_timeout = 60 # Create schema. try: Base.metadata.create_all(self.engine) except SQLAlchemyError as e: raise CuckooDatabaseError("Unable to create or connect to database: {0}".format(e)) # Get db session. self.Session = sessionmaker(bind=self.engine) # Set database schema version. # TODO: it's a little bit dirty, needs refactoring. tmp_session = self.Session() if not tmp_session.query(AlembicVersion).count(): tmp_session.add(AlembicVersion(version_num=SCHEMA_VERSION)) try: tmp_session.commit() except SQLAlchemyError as e: raise CuckooDatabaseError("Unable to set schema version: {0}".format(e)) tmp_session.rollback() finally: tmp_session.close() else: tmp_session.close()
def __iter__(self): # Read until newline for file path, e.g., # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin buf = self.handler.read_newline().strip().replace("\\", "/") log.debug("File upload request for %s", buf) dir_part, filename = os.path.split(buf) if "./" in buf or not dir_part or buf.startswith("/"): raise CuckooOperationalError("FileUpload failure, banned path.") for restricted in self.RESTRICTED_DIRECTORIES: if restricted in dir_part: raise CuckooOperationalError("FileUpload failure, banned path.") try: create_folder(self.storagepath, dir_part) except CuckooOperationalError: log.error("Unable to create folder %s", dir_part) return file_path = os.path.join(self.storagepath, buf.strip()) if not file_path.startswith(self.storagepath): raise CuckooOperationalError("FileUpload failure, path sanitization failed.") if os.path.exists(file_path): log.warning("Analyzer tried to overwrite an existing file, closing connection.") return self.fd = open(file_path, "wb") chunk = self.handler.read_any() while chunk: self.fd.write(chunk) if self.fd.tell() >= self.upload_max_size: log.warning("Uploaded file length larger than upload_max_size, stopping upload.") self.fd.write("... (truncated)") break try: chunk = self.handler.read_any() except: break log.debug("Uploaded file length: %s", self.fd.tell()) return yield
def set_path(self, analysis_path): """Set analysis folder path. @param analysis_path: analysis folder path. """ self.analysis_path = analysis_path self.conf_path = self._get_analysis_path("analysis.conf") self.file_path = os.path.realpath(self._get_analysis_path("binary")) self.reports_path = self._get_analysis_path("reports") self.shots_path = self._get_analysis_path("shots") self.pcap_path = self._get_analysis_path("dump.pcap") try: create_folder(folder=self.reports_path) except CuckooOperationalError as e: CuckooReportError(e)
def generate(self): """Create database. @return: operation status. """ if os.path.exists(self.db_file): return False db_dir = os.path.dirname(self.db_file) if not os.path.exists(db_dir): try: create_folder(folder=db_dir) except CuckooOperationalError as e: raise CuckooDatabaseError( "Unable to create database directory: %s" % e) conn = sqlite3.connect(self.db_file) cursor = conn.cursor() try: cursor.execute("CREATE TABLE tasks (\n" \ " id INTEGER PRIMARY KEY,\n" \ " md5 TEXT DEFAULT NULL,\n" \ " file_path TEXT NOT NULL,\n" \ " timeout INTEGER DEFAULT NULL,\n" \ " priority INTEGER DEFAULT 0,\n" \ " custom TEXT DEFAULT NULL,\n" \ " machine TEXT DEFAULT NULL,\n" \ " package TEXT DEFAULT NULL,\n" \ " options TEXT DEFAULT NULL,\n" \ " platform TEXT DEFAULT NULL,\n" \ " added_on DATE DEFAULT CURRENT_TIMESTAMP,\n" \ " completed_on DATE DEFAULT NULL,\n" \ " lock INTEGER DEFAULT 0,\n" \ # Status possible values: # 0 = not completed # 1 = error occurred # 2 = completed successfully. " status INTEGER DEFAULT 0\n" \ ");") except sqlite3.OperationalError as e: raise CuckooDatabaseError("Unable to create database: %s" % e) return True
def init_storage(self): """Initialize analyses storage folder. @raise CuckooAnalysisError: if storage folder already exists.""" self.analysis.results_folder = os.path.join( os.path.join(CUCKOO_ROOT, "storage", "analyses"), str(self.task.id)) if os.path.exists(self.analysis.results_folder): raise CuckooAnalysisError( "Analysis results folder already exists at path \"%s\", analysis aborted" % self.analysis.results_folder) try: create_folder(folder=self.analysis.results_folder) except CuckooOperationalError: raise CuckooAnalysisError("Unable to create analysis folder %s" % self.analysis.results_folder)
def read_next_message(self): # Read until newline for file path, e.g., # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin buf = self.handler.read_newline().strip().replace("\\", "/") log.debug("File upload request for {0}".format(buf)) dir_part, filename = os.path.split(buf) if "./" in buf or not dir_part or buf.startswith("/"): raise CuckooOperationalError("FileUpload failure, banned path.") for restricted in self.RESTRICTED_DIRECTORIES: if restricted in dir_part: raise CuckooOperationalError( "FileUpload failure, banned path.") try: create_folder(self.storagepath, dir_part) except CuckooOperationalError: log.error("Unable to create folder %s" % dir_part) return False file_path = os.path.join(self.storagepath, buf.strip()) if not file_path.startswith(self.storagepath): raise CuckooOperationalError( "FileUpload failure, path sanitization failed.") self.fd = open(file_path, "wb") chunk = self.handler.read_any() while chunk: self.fd.write(chunk) if self.fd.tell() >= self.upload_max_size: self.fd.write("... (truncated)") break try: chunk = self.handler.read_any() except: break log.debug("Uploaded file length: {0}".format(self.fd.tell()))
def init_storage(self): """Initialize analysis storage folder.""" self.storage = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(self.task.id)) # If the analysis storage folder already exists, we need to abort the # analysis or previous results will be overwritten and lost. if os.path.exists(self.storage): log.error("Task #%s: Analysis results folder already exists at path '%s', analysis aborted", self.task.id, self.storage) return False # If we're not able to create the analysis storage folder, we have to # abort the analysis. try: create_folder(folder=self.storage) except CuckooOperationalError: log.error("Task #%s: Unable to create analysis folder %s", self.task.id, self.storage) return False return True
def init_storage(self): """Initialize analysis storage folder.""" self.storage = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(self.task.id)) # If the analysis storage folder already exists, we need to abort the # analysis or previous results will be overwritten and lost. if os.path.exists(self.storage): log.error('Analysis results folder already exists at path "%s",' " analysis aborted", self.storage) return False # If we're not able to create the analysis storage folder, we have to # abort the analysis. try: create_folder(folder=self.storage) except CuckooOperationalError: log.error("Unable to create analysis folder %s", self.storage) return False return True
def read_next_message(self): # Read until newline for file path, e.g., # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin buf = self.handler.read_newline().strip().replace("\\", "/") log.debug("File upload request for {0}".format(buf)) dir_part, filename = os.path.split(buf) if "./" in buf or not dir_part or buf.startswith("/"): raise CuckooOperationalError("FileUpload failure, banned path.") for restricted in self.RESTRICTED_DIRECTORIES: if restricted in dir_part: raise CuckooOperationalError("FileUpload failure, banned path.") try: create_folder(self.storagepath, dir_part) except CuckooOperationalError: log.error("Unable to create folder %s" % dir_part) return False file_path = os.path.join(self.storagepath, buf.strip()) if not file_path.startswith(self.storagepath): raise CuckooOperationalError("FileUpload failure, path sanitization failed.") self.fd = open(file_path, "wb") chunk = self.handler.read_any() while chunk: self.fd.write(chunk) if self.fd.tell() >= self.upload_max_size: self.fd.write("... (truncated)") break try: chunk = self.handler.read_any() except: break log.debug("Uploaded file length: {0}".format(self.fd.tell()))
def generate(self): """Create database. @return: operation status. """ if os.path.exists(self.db_file): return False db_dir = os.path.dirname(self.db_file) if not os.path.exists(db_dir): try: create_folder(folder=db_dir) except CuckooOperationalError as e: raise CuckooDatabaseError("Unable to create database directory: %s" % e) conn = sqlite3.connect(self.db_file) cursor = conn.cursor() try: cursor.execute("CREATE TABLE tasks (\n" \ " id INTEGER PRIMARY KEY,\n" \ " md5 TEXT DEFAULT NULL,\n" \ " file_path TEXT NOT NULL,\n" \ " timeout INTEGER DEFAULT NULL,\n" \ " priority INTEGER DEFAULT 0,\n" \ " custom TEXT DEFAULT NULL,\n" \ " machine TEXT DEFAULT NULL,\n" \ " package TEXT DEFAULT NULL,\n" \ " options TEXT DEFAULT NULL,\n" \ " platform TEXT DEFAULT NULL,\n" \ " added_on DATE DEFAULT CURRENT_TIMESTAMP,\n" \ " completed_on DATE DEFAULT NULL,\n" \ " lock INTEGER DEFAULT 0,\n" \ # Status possible values: # 0 = not completed # 1 = error occurred # 2 = completed successfully. " status INTEGER DEFAULT 0\n" \ ");") except sqlite3.OperationalError as e: raise CuckooDatabaseError("Unable to create database: %s" % e) return True
def __init__(self, dsn=None, schema_check=True): """@param dsn: database connection string. @param schema_check: disable or enable the db schema version check """ self._lock = SuperLock() self.cfg = Config() if dsn: self._connect_database(dsn) elif self.cfg.database.connection: self._connect_database(self.cfg.database.connection) else: db_file = os.path.join(CUCKOO_ROOT, "db", "cuckoo.db") if not os.path.exists(db_file): db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): try: create_folder(folder=db_dir) except CuckooOperationalError as e: raise CuckooDatabaseError( "Unable to create database directory: {0}".format( e)) self._connect_database("sqlite:///%s" % db_file) # Disable SQL logging. Turn it on for debugging. self.engine.echo = False # Connection timeout. if self.cfg.database.timeout: self.engine.pool_timeout = self.cfg.database.timeout else: self.engine.pool_timeout = 60 # Create schema. try: Base.metadata.create_all(self.engine) except SQLAlchemyError as e: raise CuckooDatabaseError( "Unable to create or connect to database: {0}".format(e)) # Get db session. self.Session = sessionmaker(bind=self.engine) @event.listens_for(self.Session, 'after_flush') def delete_tag_orphans(session, ctx): session.query(Tag).filter(~Tag.tasks.any()).filter( ~Tag.machines.any()).delete(synchronize_session=False) # Deal with schema versioning. # TODO: it's a little bit dirty, needs refactoring. tmp_session = self.Session() if not tmp_session.query(AlembicVersion).count(): # Set database schema version. tmp_session.add(AlembicVersion(version_num=SCHEMA_VERSION)) try: tmp_session.commit() except SQLAlchemyError as e: tmp_session.rollback() raise CuckooDatabaseError( "Unable to set schema version: {0}".format(e)) finally: tmp_session.close() else: # Check if db version is the expected one. last = tmp_session.query(AlembicVersion).first() tmp_session.close() if last.version_num != SCHEMA_VERSION and schema_check: raise CuckooDatabaseError( "DB schema version mismatch: found {0}, expected {1}. " "Try to apply all migrations (cd utils/db_migration/ && " "alembic upgrade head).".format(last.version_num, SCHEMA_VERSION))
def create_folders(self): for folder in list(RESULT_UPLOADABLE) + [b"logs"]: try: create_folder(self.storagepath, folder=folder.decode()) except Exception as e: log.error(e, exc_info=True)
def test_single_folder(self): """Tests a single folder creation.""" utils.create_folder(self.tmp_dir, "foo") assert os.path.exists(os.path.join(self.tmp_dir, "foo")) utils.create_folder(self.tmp_dir, "foo") os.rmdir(os.path.join(self.tmp_dir, "foo"))
def __init__(self, dsn=None, schema_check=True, echo=False): """ @param dsn: database connection string. @param schema_check: disable or enable the db schema version check. @param echo: echo sql queries. """ self._lock = SuperLock() cfg = Config() if dsn: self._connect_database(dsn) elif hasattr(cfg, "database") and cfg.database.connection: self._connect_database(cfg.database.connection) else: db_file = os.path.join(CUCKOO_ROOT, "db", "cuckoo.db") if not os.path.exists(db_file): db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): try: create_folder(folder=db_dir) except CuckooOperationalError as e: raise CuckooDatabaseError("Unable to create database directory: {0}".format(e)) self._connect_database("sqlite:///%s" % db_file) # Disable SQL logging. Turn it on for debugging. self.engine.echo = echo # Connection timeout. if hasattr(cfg, "database") and cfg.database.timeout: self.engine.pool_timeout = cfg.database.timeout else: self.engine.pool_timeout = 60 # Let's emit a warning just in case. if not hasattr(cfg, "database"): log.warning("It appears you don't have a valid `database` " "section in conf/cuckoo.conf, using sqlite3 instead.") # Create schema. try: Base.metadata.create_all(self.engine) except SQLAlchemyError as e: raise CuckooDatabaseError("Unable to create or connect to database: {0}".format(e)) # Get db session. self.Session = sessionmaker(bind=self.engine) # Deal with schema versioning. # TODO: it's a little bit dirty, needs refactoring. tmp_session = self.Session() if not tmp_session.query(AlembicVersion).count(): # Set database schema version. tmp_session.add(AlembicVersion(version_num=SCHEMA_VERSION)) try: tmp_session.commit() except SQLAlchemyError as e: raise CuckooDatabaseError("Unable to set schema version: {0}".format(e)) tmp_session.rollback() finally: tmp_session.close() else: # Check if db version is the expected one. last = tmp_session.query(AlembicVersion).first() tmp_session.close() if last.version_num != SCHEMA_VERSION and schema_check: raise CuckooDatabaseError( "DB schema version mismatch: found %s, expected %s. " "Try to apply all migrations (cd utils/db_migration/ && " "alembic upgrade head)." % (last.version_num, SCHEMA_VERSION) )
def test_create_folder_err(self, rnd_tmp_folder, mocker): mocker.patch("os.makedirs", side_effect=OSError) with pytest.raises(CuckooOperationalError): utils.create_folder(root="/tmp", folder=rnd_tmp_folder)
def __init__(self, dsn=None, schema_check=True): """@param dsn: database connection string. @param schema_check: disable or enable the db schema version check """ self._lock = SuperLock() self.cfg = Config() if dsn: self._connect_database(dsn) elif self.cfg.database.connection: self._connect_database(self.cfg.database.connection) else: db_file = os.path.join(CUCKOO_ROOT, "db", "cuckoo.db") if not os.path.exists(db_file): db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): try: create_folder(folder=db_dir) except CuckooOperationalError as e: raise CuckooDatabaseError("Unable to create database directory: {0}".format(e)) self._connect_database("sqlite:///%s" % db_file) # Disable SQL logging. Turn it on for debugging. self.engine.echo = False # Connection timeout. if self.cfg.database.timeout: self.engine.pool_timeout = self.cfg.database.timeout else: self.engine.pool_timeout = 60 # Create schema. try: Base.metadata.create_all(self.engine) except SQLAlchemyError as e: raise CuckooDatabaseError("Unable to create or connect to database: {0}".format(e)) # Get db session. self.Session = sessionmaker(bind=self.engine) @event.listens_for(self.Session, 'after_flush') def delete_tag_orphans(session, ctx): session.query(Tag).filter(~Tag.tasks.any()).filter(~Tag.machines.any()).delete(synchronize_session=False) # Deal with schema versioning. # TODO: it's a little bit dirty, needs refactoring. tmp_session = self.Session() if not tmp_session.query(AlembicVersion).count(): # Set database schema version. tmp_session.add(AlembicVersion(version_num=SCHEMA_VERSION)) try: tmp_session.commit() except SQLAlchemyError as e: tmp_session.rollback() raise CuckooDatabaseError("Unable to set schema version: {0}".format(e)) finally: tmp_session.close() else: # Check if db version is the expected one. last = tmp_session.query(AlembicVersion).first() tmp_session.close() if last.version_num != SCHEMA_VERSION and schema_check: raise CuckooDatabaseError( "DB schema version mismatch: found {0}, expected {1}. " "Try to apply all migrations (cd utils/db_migration/ && " "alembic upgrade head).".format(last.version_num, SCHEMA_VERSION))
def read_next_message(self): # Read until newline for file path, e.g., # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin buf = self.handler.read_newline().strip().replace("\\", "/") guest_path = "" if self.is_binary: guest_path = sanitize_pathname(self.handler.read_newline().strip()[:32768]) dir_part, filename = os.path.split(buf) filename = sanitize_pathname(filename) buf = os.path.join(dir_part, filename) log.debug("File upload request for {0}".format(buf)) if "./" in buf or not dir_part or buf.startswith("/"): raise CuckooOperationalError("FileUpload failure, banned path.") for restricted in self.RESTRICTED_DIRECTORIES: if restricted in dir_part: raise CuckooOperationalError("FileUpload failure, banned path.") try: create_folder(self.storagepath, dir_part) except CuckooOperationalError: log.error("Unable to create folder %s" % dir_part) return False file_path = os.path.join(self.storagepath, buf) if not file_path.startswith(self.storagepath): raise CuckooOperationalError("FileUpload failure, path sanitization failed.") if guest_path != "": guest_paths = [] if os.path.exists(file_path + "_info.txt"): guest_paths = [line.strip() for line in open(file_path + "_info.txt")] if guest_path not in guest_paths: infofd = open(file_path + "_info.txt", "a") infofd.write(guest_path + "\n") infofd.close() if not self.duplicate: if os.path.exists(file_path): log.warning("Analyzer tried to overwrite an existing file, closing connection.") return False self.fd = open(file_path, "wb") chunk = self.handler.read_any() while chunk: self.fd.write(chunk) if self.fd.tell() >= self.upload_max_size: log.warning("Uploaded file length ({0}) larger than upload_max_size ({1}), stopping upload.".format(self.fd.tell(), self.upload_max_size)) self.fd.write("... (truncated)") break try: chunk = self.handler.read_any() except: break log.debug("Uploaded file length: {0}".format(self.fd.tell()))
def __iter__(self): # Read until newline for file path, e.g., # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin dump_path = self.handler.read_newline(strip=True).replace("\\", "/") if self.version >= 2: filepath = self.handler.read_newline(strip=True) pids = map(int, self.handler.read_newline(strip=True).split()) else: filepath, pids = None, [] log.debug("File upload request for %s", dump_path) dir_part, filename = os.path.split(dump_path) if "./" in dump_path or not dir_part or dump_path.startswith("/"): raise CuckooOperationalError( "FileUpload failure, banned path: %s" % dump_path) for restricted in self.RESTRICTED_DIRECTORIES: if restricted in dir_part: raise CuckooOperationalError( "FileUpload failure, banned path.") try: create_folder(self.storagepath, dir_part) except CuckooOperationalError: log.error("Unable to create folder %s", dir_part) return file_path = os.path.join(self.storagepath, dump_path.strip()) if not file_path.startswith(self.storagepath): raise CuckooOperationalError( "FileUpload failure, path sanitization failed.") if os.path.exists(file_path): log.warning("Analyzer tried to overwrite an existing file, " "closing connection.") return self.fd = open(file_path, "wb") chunk = self.handler.read_any() while chunk: self.fd.write(chunk) if self.fd.tell() >= self.upload_max_size: log.warning( "Uploaded file length larger than upload_max_size, " "stopping upload.") self.fd.write("... (truncated)") break try: chunk = self.handler.read_any() except: break self.lock.acquire() with open(self.filelog, "a+b") as f: f.write("%s\n" % json.dumps({ "path": dump_path, "filepath": filepath, "pids": pids, })) self.lock.release() log.debug("Uploaded file length: %s", self.fd.tell()) return yield
def __iter__(self): # Read until newline for file path, e.g., # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin dump_path = self.handler.read_newline(strip=True).replace("\\", "/") if self.version >= 2: filepath = self.handler.read_newline(strip=True) pids = map(int, self.handler.read_newline(strip=True).split()) else: filepath, pids = None, [] log.debug("File upload request for %s", dump_path) dir_part, filename = os.path.split(dump_path) if "./" in dump_path or not dir_part or dump_path.startswith("/"): raise CuckooOperationalError( "FileUpload failure, banned path: %s" % dump_path ) for restricted in self.RESTRICTED_DIRECTORIES: if restricted in dir_part: raise CuckooOperationalError( "FileUpload failure, banned path." ) try: create_folder(self.storagepath, dir_part) except CuckooOperationalError: log.error("Unable to create folder %s", dir_part) return file_path = os.path.join(self.storagepath, dump_path) if not file_path.startswith(self.storagepath): raise CuckooOperationalError( "FileUpload failure, path sanitization failed." ) if os.path.exists(file_path): log.warning( "Analyzer tried to overwrite an existing file, " "closing connection." ) return self.fd = open(file_path, "wb") chunk = self.handler.read_any() while chunk: self.fd.write(chunk) if self.fd.tell() >= self.upload_max_size: log.warning( "Uploaded file length larger than upload_max_size, " "stopping upload." ) self.fd.write("... (truncated)") break try: chunk = self.handler.read_any() except: break self.lock.acquire() with open(self.filelog, "a+b") as f: f.write("%s\n" % json.dumps({ "path": dump_path, "filepath": filepath, "pids": pids, })) self.lock.release() log.debug("Uploaded file length: %s", self.fd.tell()) return yield
def __init__(self, dsn=None, schema_check=True, echo=False): """ @param dsn: database connection string. @param schema_check: disable or enable the db schema version check. @param echo: echo sql queries. """ self._lock = SuperLock() cfg = Config() if dsn: self._connect_database(dsn) elif hasattr(cfg, "database") and cfg.database.connection: self._connect_database(cfg.database.connection) else: db_file = os.path.join(CUCKOO_ROOT, "db", "cuckoo.db") if not os.path.exists(db_file): db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): try: create_folder(folder=db_dir) except CuckooOperationalError as e: raise CuckooDatabaseError( "Unable to create database directory: {0}".format( e)) self._connect_database("sqlite:///%s" % db_file) # Disable SQL logging. Turn it on for debugging. self.engine.echo = echo # Connection timeout. if hasattr(cfg, "database") and cfg.database.timeout: self.engine.pool_timeout = cfg.database.timeout else: self.engine.pool_timeout = 60 # Let's emit a warning just in case. if not hasattr(cfg, "database"): log.warning("It appears you don't have a valid `database` " "section in conf/cuckoo.conf, using sqlite3 instead.") # Create schema. try: Base.metadata.create_all(self.engine) except SQLAlchemyError as e: raise CuckooDatabaseError( "Unable to create or connect to database: {0}".format(e)) # Get db session. self.Session = sessionmaker(bind=self.engine) # Deal with schema versioning. # TODO: it's a little bit dirty, needs refactoring. tmp_session = self.Session() if not tmp_session.query(AlembicVersion).count(): # Set database schema version. tmp_session.add(AlembicVersion(version_num=SCHEMA_VERSION)) try: tmp_session.commit() except SQLAlchemyError as e: raise CuckooDatabaseError( "Unable to set schema version: {0}".format(e)) tmp_session.rollback() finally: tmp_session.close() else: # Check if db version is the expected one. last = tmp_session.query(AlembicVersion).first() tmp_session.close() if last.version_num != SCHEMA_VERSION and schema_check: raise CuckooDatabaseError( "DB schema version mismatch: found {0}, expected {1}. " "Try to apply all migrations (cd utils/db_migration/ && " "alembic upgrade head).".format(last.version_num, SCHEMA_VERSION))
def test_create_folder(self, rnd_tmp_folder): utils.create_folder(root="/tmp", folder=rnd_tmp_folder) assert os.path.isdir("/tmp/" + rnd_tmp_folder) is True
def test_create_folder_default(self): with pytest.raises(CuckooOperationalError): utils.create_folder()
def __init__(self, dsn=None, schema_check=True): """@param dsn: database connection string. @param schema_check: disable or enable the db schema version check """ cfg = Config() if dsn: self._connect_database(dsn) elif cfg.database.connection: self._connect_database(cfg.database.connection) else: db_file = os.path.join(CUCKOO_ROOT, "db", "cuckoo.db") if not os.path.exists(db_file): db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): try: create_folder(folder=db_dir) except CuckooOperationalError as e: raise CuckooDatabaseError( "Unable to create database directory: {0}".format( e)) self._connect_database("sqlite:///%s" % db_file) # Disable SQL logging. Turn it on for debugging. self.engine.echo = False # Connection timeout. if cfg.database.timeout: self.engine.pool_timeout = cfg.database.timeout else: self.engine.pool_timeout = 60 # Create schema. try: Base.metadata.create_all(self.engine) except SQLAlchemyError as e: raise CuckooDatabaseError( "Unable to create or connect to database: {0}".format(e)) # Get db session. self.Session = sessionmaker(bind=self.engine) # Deal with schema versioning. # TODO: it's a little bit dirty, needs refactoring. tmp_session = self.Session() if not tmp_session.query(AlembicVersion).count(): # Set database schema version. tmp_session.add(AlembicVersion(version_num=SCHEMA_VERSION)) try: tmp_session.commit() except SQLAlchemyError as e: raise CuckooDatabaseError( "Unable to set schema version: {0}".format(e)) tmp_session.rollback() finally: tmp_session.close() else: # Check if db version is the expected one. last = tmp_session.query(AlembicVersion).first() tmp_session.close() if last.version_num != SCHEMA_VERSION and schema_check: raise CuckooDatabaseError( "DB schema version mismatch: found " "{0}, expected {1}. Try to apply all " "migrations.".format(last.version_num, SCHEMA_VERSION))