def create_folders(self): folders = "shots", "files", "logs", "buffer" for folder in folders: try: create_folder(self.storagepath, folder=folder) except DetectorOperationalError: log.error("Unable to create folder %s" % folder) return False
def __iter__(self): # Read until newline for file path, e.g., # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin buf = self.handler.read_newline().strip().replace("\\", "/") log.debug("File upload request for %s", buf) dir_part, filename = os.path.split(buf) if "./" in buf or not dir_part or buf.startswith("/"): raise DetectorOperationalError("FileUpload failure, banned path.") for restricted in self.RESTRICTED_DIRECTORIES: if restricted in dir_part: raise DetectorOperationalError( "FileUpload failure, banned path.") try: create_folder(self.storagepath, dir_part) except DetectorOperationalError: log.error("Unable to create folder %s", dir_part) return file_path = os.path.join(self.storagepath, buf.strip()) if not file_path.startswith(self.storagepath): raise DetectorOperationalError( "FileUpload failure, path sanitization failed.") if os.path.exists(file_path): log.warning( "Analyzer tried to overwrite an existing file, closing connection." ) return self.fd = open(file_path, "wb") chunk = self.handler.read_any() while chunk: self.fd.write(chunk) if self.fd.tell() >= self.upload_max_size: log.warning( "Uploaded file length larger than upload_max_size, stopping upload." ) self.fd.write("... (truncated)") break try: chunk = self.handler.read_any() except: break log.debug("Uploaded file length: %s", self.fd.tell()) return yield
def set_path(self, analysis_path): """Set analysis folder path. @param analysis_path: analysis folder path. """ self.analysis_path = analysis_path self.conf_path = self._get_analysis_path("analysis.conf") self.file_path = os.path.realpath(self._get_analysis_path("binary")) self.reports_path = self._get_analysis_path("reports") self.shots_path = self._get_analysis_path("shots") self.pcap_path = self._get_analysis_path("dump.pcap") try: create_folder(folder=self.reports_path) except DetectorOperationalError as e: DetectorReportError(e)
def init_storage(self): """Initialize analysis storage folder.""" self.storage = os.path.join(DETECTOR_ROOT, "storage", "analyses", str(self.task.id)) # If the analysis storage folder already exists, we need to abort the # analysis or previous results will be overwritten and lost. if os.path.exists(self.storage): log.error( "Analysis results folder already exists at path \"%s\"," " analysis aborted", self.storage) return False # If we're not able to create the analysis storage folder, we have to # abort the analysis. try: create_folder(folder=self.storage) except DetectorOperationalError: log.error("Unable to create analysis folder %s", self.storage) return False return True
def __init__(self, dsn=None, schema_check=True, echo=False): """ @param dsn: database connection string. @param schema_check: disable or enable the db schema version check. @param echo: echo sql queries. """ self._lock = SuperLock() cfg = Config() if dsn: self._connect_database(dsn) elif hasattr(cfg, "database") and cfg.database.connection: self._connect_database(cfg.database.connection) else: db_file = os.path.join(DETECTOR_ROOT, "db", "detector.db") if not os.path.exists(db_file): db_dir = os.path.dirname(db_file) if not os.path.exists(db_dir): try: create_folder(folder=db_dir) except DetectorOperationalError as e: raise DetectorDatabaseError( "Unable to create database directory: {0}".format( e)) self._connect_database("sqlite:///%s" % db_file) # Disable SQL logging. Turn it on for debugging. self.engine.echo = echo # Connection timeout. if hasattr(cfg, "database") and cfg.database.timeout: self.engine.pool_timeout = cfg.database.timeout else: self.engine.pool_timeout = 60 # Let's emit a warning just in case. if not hasattr(cfg, "database"): log.warning( "It appears you don't have a valid `database` " "section in conf/detector.conf, using sqlite3 instead.") # Create schema. try: Base.metadata.create_all(self.engine) except SQLAlchemyError as e: raise DetectorDatabaseError( "Unable to create or connect to database: {0}".format(e)) # Get db session. self.Session = sessionmaker(bind=self.engine) # Deal with schema versioning. # TODO: it's a little bit dirty, needs refactoring. tmp_session = self.Session() if not tmp_session.query(AlembicVersion).count(): # Set database schema version. tmp_session.add(AlembicVersion(version_num=SCHEMA_VERSION)) try: tmp_session.commit() except SQLAlchemyError as e: raise DetectorDatabaseError( "Unable to set schema version: {0}".format(e)) tmp_session.rollback() finally: tmp_session.close() else: # Check if db version is the expected one. last = tmp_session.query(AlembicVersion).first() tmp_session.close() if last.version_num != SCHEMA_VERSION and schema_check: raise DetectorDatabaseError( "DB schema version mismatch: found {0}, expected {1}. " "Try to apply all migrations (cd utils/db_migration/ && " "alembic upgrade head).".format(last.version_num, SCHEMA_VERSION))