def main(): filelist = common.parseFileList([LIBS], recursive=True) try: import pefile filepath = pefile.__file__[:-1] filelist.append(filepath) except ImportError: print('pefile not installed...') for filename in filelist: if filename.endswith('.py'): filename = str(filename) try: pyx_to_dll(filename, inplace=True) print(filename, 'successful!') except Exception as e: print('ERROR:', filename, 'failed') try: os.remove(filename[:-2] + 'c') except Exception as e: # TODO: log exception pass # Cleanup build dirs walk = os.walk(LIBS) for path in walk: path = path[0] if os.path.basename(path) == '_pyxbld' and os.path.isdir(path): shutil.rmtree(path)
def _main(): # Force all prints to go to stderr stdout = sys.stdout sys.stdout = sys.stderr # Import dependencies only needed by _main() import zipfile # Get args args = _parse_args() # Set verbose if args.verbose: global VERBOSE VERBOSE = args.verbose # Checks if user is trying to initialize if args.Files == ['init'] and not os.path.isfile('init'): _init(args) if not os.path.isfile(args.config): config_init(args.config) # Make sure report is not a dir if args.json: if os.path.isdir(args.json): print('ERROR:', args.json, 'is a directory, a file is expected') return False # Parse the file list parsedlist = parseFileList(args.Files, recursive=args.recursive) # Unzip zip files if asked to if args.extractzips: for fname in parsedlist: if zipfile.is_zipfile(fname): unzip_dir = os.path.join('_tmp', os.path.basename(fname)) z = zipfile.ZipFile(fname) if PY3: args.password = bytes(args.password, 'utf-8') try: z.extractall(path=unzip_dir, pwd=args.password) for uzfile in z.namelist(): parsedlist.append(os.path.join(unzip_dir, uzfile)) except RuntimeError as e: print("ERROR: Failed to extract ", fname, ' - ', e, sep='') parsedlist.remove(fname) # Resume from report if args.resume: i = len(parsedlist) try: reportfile = codecs.open(args.json, 'r', 'utf-8') except Exception as e: print("ERROR: Could not open report file") exit(1) for line in reportfile: line = json.loads(line) for fname in line: if fname in parsedlist: parsedlist.remove(fname) reportfile.close() i = i - len(parsedlist) if VERBOSE: print("Skipping", i, "files which are in the report already") # Do multiple runs if there are too many files filelists = [] if len(parsedlist) > args.numberper: while len(parsedlist) > args.numberper: filelists.append(parsedlist[:args.numberper]) parsedlist = parsedlist[args.numberper:] if parsedlist: filelists.append(parsedlist) for filelist in filelists: # Record start time for metadata starttime = str(datetime.datetime.now()) # Run the multiscan results = multiscan(filelist, recursive=None, configfile=args.config) # We need to read in the config for the parseReports call Config = configparser.SafeConfigParser() Config.optionxform = str Config.read(args.config) config = _get_main_config(Config) # Make sure we have a group-types if "group-types" not in config: config["group-types"] = [] elif not config["group-types"]: config["group-types"] = [] # Add in script metadata endtime = str(datetime.datetime.now()) # For windows compatibility try: username = os.getlogin() except: username = os.getenv('USERNAME') results.append(([], {"Name": "MultiScanner", "Start Time": starttime, "End Time": endtime, # "Command Line":list2cmdline(sys.argv), "Run by": username })) if args.show or not stdout.isatty(): # TODO: Make this output something readable # Parse Results report = parse_reports(results, groups=config["group-types"], ugly=args.ugly, includeMetadata=args.metadata) # Print report try: print(convert_encoding(report, encoding='ascii', errors='replace'), file=stdout) stdout.flush() except Exception as e: print('ERROR: Can\'t print report -', e) report = parse_reports(results, groups=config["group-types"], includeMetadata=args.metadata, python=True) update_conf = None if args.json: update_conf = {'File': {'path': args.json}} if args.json.endswith('.gz') or args.json.endswith('.gzip'): update_conf['File']['gzip'] = True if 'storage-config' not in config: config["storage-config"] = None storage_handle = storage.StorageHandler(configfile=config["storage-config"], config=update_conf) storage_handle.store(report) storage_handle.close() # Cleanup zip extracted files if args.extractzips: shutil.rmtree('_tmp')
def multiscan(Files, recursive=False, configregen=False, configfile=CONFIG, config=None, module_list=None): """ The meat and potatoes. Returns the list of module results Files - A list of files and dirs to be scanned recursive - If true it will search the dirs in Files recursively configregen - If True a new config file will be created overwriting the old configfile - What config file to use. Can be None. config - A dictionary containing the configuration options to be used. """ # Redirect stdout to stderr stdout = sys.stdout sys.stdout = sys.stderr # TODO: Make sure the cleanup from this works is something breaks # Init some vars # If recursive is None we don't parse the file list and take it as is. if recursive is not None: filelist = parseFileList(Files, recursive=recursive) else: filelist = Files # A list of files in the module dir if module_list is None: module_list = parseDir(MODULEDIR, recursive=True) # A dictionary used for the copyfileto parameter filedic = {} # What will be the config file object config_object = None # Read in config if configfile: config_object = configparser.SafeConfigParser() config_object.optionxform = str # Regen the config if needed or wanted if configregen or not os.path.isfile(configfile): _rewite_config(module_list, config_object, filepath=configfile) config_object.read(configfile) main_config = _get_main_config(config_object, filepath=configfile) if config: file_conf = parse_config(config_object) for key in config: if key not in file_conf: file_conf[key] = config[key] file_conf[key]['_load_default'] = True else: file_conf[key].update(config[key]) config = file_conf else: config = parse_config(config_object) else: if config is None: config = {} else: config['_load_default'] = True if 'main' in config: main_config = config['main'] else: main_config = DEFAULTCONF # If none of the files existed if not filelist: sys.stdout = stdout raise ValueError("No valid files") # Copy files to a share if configured if "copyfilesto" not in main_config: main_config["copyfilesto"] = False if main_config["copyfilesto"]: if os.path.isdir(main_config["copyfilesto"]): filelist = _copy_to_share(filelist, filedic, main_config["copyfilesto"]) else: sys.stdout = stdout raise IOError('The copyfilesto dir" ' + main_config["copyfilesto"] + '" is not a valid dir') # Create the global module interface global_module_interface = _GlobalModuleInterface() # Start a thread for each module thread_list = _start_module_threads(filelist, module_list, config, global_module_interface) # Write the default configure settings for missing ones if config_object: _write_missing_module_configs(module_list, config_object, filepath=configfile) # Warn about spaces in file names for f in filelist: if ' ' in f: print('WARNING: You are using file paths with spaces. This may result in modules not reporting correctly.') break # Wait for all threads to finish thread_wait_list = thread_list[:] i = 0 while thread_wait_list: i += 1 for thread in thread_wait_list: if not thread.is_alive(): i = 0 thread_wait_list.remove(thread) if VERBOSE: print(thread.name, "took", thread.endtime-thread.starttime) if i == 15: i = 0 if VERBOSE: p = 'Waiting on' for thread in thread_wait_list: p += ' ' + thread.name p += '...' print(p) time.sleep(1) # Delete copied files if main_config["copyfilesto"]: for item in filelist: os.remove(item) # Get Result list results = [] for thread in thread_list: if thread.ret is not None: results.append(thread.ret) del thread # Translates file names back to the originals if filedic: # I have no idea if this is the best way to do in-place modifications for i in range(0, len(results)): (result, metadata) = results[i] modded = False for j in range(0, len(result)): (filename, hit) = result[j] base = basename(filename) if base in filedic: filename = filedic[base] modded = True result[j] = (filename, hit) if modded: results[i] = (result, metadata) # Scan subfiles if needed subscan_list = global_module_interface._get_subscan_list() if subscan_list: # Translate from_filename back to original if needed if filedic: for i in range(0, len(subscan_list)): file_path, from_filename, module_name = subscan_list[i] base = basename(from_filename) if base in filedic: from_filename = filedic[base] subscan_list[i] = (file_path, from_filename, module_name) results.extend(_subscan(subscan_list, config, main_config, module_list, global_module_interface)) global_module_interface._cleanup() # Return stdout to previous state sys.stdout = stdout return results
def multiscan(Files, recursive=False, configregen=False, configfile=CONFIG, config=None, module_list=None): """ The meat and potatoes. Returns the list of module results Files - A list of files and dirs to be scanned recursive - If true it will search the dirs in Files recursively configregen - If True a new config file will be created overwriting the old configfile - What config file to use. Can be None. config - A dictionary containing the configuration options to be used. """ # Redirect stdout to stderr stdout = sys.stdout sys.stdout = sys.stderr # TODO: Make sure the cleanup from this works is something breaks # Init some vars # If recursive is None we don't parse the file list and take it as is. if recursive is not None: filelist = parseFileList(Files, recursive=recursive) else: filelist = Files # A list of files in the module dir if module_list is None: module_list = parseDir(MODULEDIR, recursive=True) # A dictionary used for the copyfileto parameter filedic = {} # What will be the config file object config_object = None # Read in config if configfile: config_object = configparser.SafeConfigParser() config_object.optionxform = str # Regen the config if needed or wanted if configregen or not os.path.isfile(configfile): _rewrite_config(module_list, config_object, filepath=configfile) config_object.read(configfile) main_config = _get_main_config(config_object, filepath=configfile) if config: file_conf = parse_config(config_object) for key in config: if key not in file_conf: file_conf[key] = config[key] file_conf[key]['_load_default'] = True else: file_conf[key].update(config[key]) config = file_conf else: config = parse_config(config_object) else: if config is None: config = {} else: config['_load_default'] = True if 'main' in config: main_config = config['main'] else: main_config = DEFAULTCONF # If none of the files existed if not filelist: sys.stdout = stdout raise ValueError("No valid files") # Copy files to a share if configured if "copyfilesto" not in main_config: main_config["copyfilesto"] = False if main_config["copyfilesto"]: if os.path.isdir(main_config["copyfilesto"]): filelist = _copy_to_share(filelist, filedic, main_config["copyfilesto"]) else: sys.stdout = stdout raise IOError('The copyfilesto dir" ' + main_config["copyfilesto"] + '" is not a valid dir') # Create the global module interface global_module_interface = _GlobalModuleInterface() # Start a thread for each module thread_list = _start_module_threads(filelist, module_list, config, global_module_interface) # Write the default configure settings for missing ones if config_object: _write_missing_module_configs(module_list, config_object, filepath=configfile) # Warn about spaces in file names for f in filelist: if ' ' in f: print( 'WARNING: You are using file paths with spaces. This may result in modules not reporting correctly.' ) break # Wait for all threads to finish thread_wait_list = thread_list[:] i = 0 while thread_wait_list: i += 1 for thread in thread_wait_list: if not thread.is_alive(): i = 0 thread_wait_list.remove(thread) if VERBOSE: print(thread.name, "took", thread.endtime - thread.starttime) if i == 15: i = 0 if VERBOSE: p = 'Waiting on' for thread in thread_wait_list: p += ' ' + thread.name p += '...' print(p) time.sleep(1) # Delete copied files if main_config["copyfilesto"]: for item in filelist: try: os.remove(item) except OSError: pass # Get Result list results = [] for thread in thread_list: if thread.ret is not None: results.append(thread.ret) del thread # Translates file names back to the originals if filedic: # I have no idea if this is the best way to do in-place modifications for i in range(0, len(results)): (result, metadata) = results[i] modded = False for j in range(0, len(result)): (filename, hit) = result[j] base = basename(filename) if base in filedic: filename = filedic[base] modded = True result[j] = (filename, hit) if modded: results[i] = (result, metadata) # Scan subfiles if needed subscan_list = global_module_interface._get_subscan_list() if subscan_list: # Translate from_filename back to original if needed if filedic: for i in range(0, len(subscan_list)): file_path, from_filename, module_name = subscan_list[i] base = basename(from_filename) if base in filedic: from_filename = filedic[base] subscan_list[i] = (file_path, from_filename, module_name) results.extend( _subscan(subscan_list, config, main_config, module_list, global_module_interface)) global_module_interface._cleanup() # Return stdout to previous state sys.stdout = stdout return results
def _main(): # Force all prints to go to stderr stdout = sys.stdout sys.stdout = sys.stderr # Import dependencies only needed by _main() import zipfile # Get args args = _parse_args() # Set verbose if args.verbose: global VERBOSE VERBOSE = args.verbose # Checks if user is trying to initialize if str(args.Files) == "['init']" and not os.path.isfile('init'): _init(args) if not os.path.isfile(args.config): config_init(args.config) # Make sure report is not a dir if args.json: if os.path.isdir(args.json): print('ERROR:', args.json, 'is a directory, a file is expected') return False # Parse the file list parsedlist = parseFileList(args.Files, recursive=args.recursive) # Unzip zip files if asked to if args.extractzips: for fname in parsedlist: if zipfile.is_zipfile(fname): unzip_dir = os.path.join('_tmp', os.path.basename(fname)) z = zipfile.ZipFile(fname) if PY3: args.password = bytes(args.password, 'utf-8') try: z.extractall(path=unzip_dir, pwd=args.password) for uzfile in z.namelist(): parsedlist.append(os.path.join(unzip_dir, uzfile)) except RuntimeError as e: print("ERROR: Failed to extract ", fname, ' - ', e, sep='') parsedlist.remove(fname) # Resume from report if args.resume: i = len(parsedlist) try: reportfile = codecs.open(args.json, 'r', 'utf-8') except Exception as e: print("ERROR: Could not open report file") exit(1) for line in reportfile: line = json.loads(line) for fname in line: if fname in parsedlist: parsedlist.remove(fname) reportfile.close() i = i - len(parsedlist) if VERBOSE: print("Skipping", i, "files which are in the report already") # Do multiple runs if there are too many files filelists = [] if len(parsedlist) > args.numberper: while len(parsedlist) > args.numberper: filelists.append(parsedlist[:args.numberper]) parsedlist = parsedlist[args.numberper:] if parsedlist: filelists.append(parsedlist) for filelist in filelists: # Record start time for metadata starttime = str(datetime.datetime.now()) # Run the multiscan results = multiscan(filelist, recursive=None, configfile=args.config) # We need to read in the config for the parseReports call Config = configparser.SafeConfigParser() Config.optionxform = str Config.read(args.config) config = _get_main_config(Config) # Make sure we have a group-types if "group-types" not in config: config["group-types"] = [] elif not config["group-types"]: config["group-types"] = [] # Add in script metadata endtime = str(datetime.datetime.now()) # For windows compatibility try: username = os.getlogin() except: username = os.getenv('USERNAME') results.append(( [], { "Name": "MultiScanner", "Start Time": starttime, "End Time": endtime, # "Command Line":list2cmdline(sys.argv), "Run by": username })) if args.show or not stdout.isatty(): # TODO: Make this output something readable # Parse Results report = parse_reports(results, groups=config["group-types"], ugly=args.ugly, includeMetadata=args.metadata) # Print report try: print(convert_encoding(report, encoding='ascii', errors='replace'), file=stdout) stdout.flush() except Exception as e: print('ERROR: Can\'t print report -', e) report = parse_reports(results, groups=config["group-types"], includeMetadata=args.metadata, python=True) update_conf = None if args.json: update_conf = {'File': {'path': args.json}} if args.json.endswith('.gz') or args.json.endswith('.gzip'): update_conf['File']['gzip'] = True if 'storage-config' not in config: config["storage-config"] = None storage_handle = storage.StorageHandler( configfile=config["storage-config"], config=update_conf) storage_handle.store(report) storage_handle.close() # Cleanup zip extracted files if args.extractzips: shutil.rmtree('_tmp')
def _main(): # Force all prints to go to stderr stdout = sys.stdout sys.stdout = sys.stderr # Import dependencies only needed by _main() import zipfile # Get args args = _parse_args() # Set verbose if args.verbose: global VERBOSE VERBOSE = args.verbose # Checks if user is trying to initialize if args.Files == ['init'] and not os.path.isfile('init'): _init(args) if not os.path.isfile(args.config): config_init(args.config) # Parse the file list parsedlist = parseFileList(args.Files, recursive=args.recursive) # Unzip zip files if asked to if args.extractzips: for fname in parsedlist: if zipfile.is_zipfile(fname): unzip_dir = os.path.join('_tmp', os.path.basename(fname)) z = zipfile.ZipFile(fname) # TODO: Add password capabilities if PY3: args.password = bytes(args.password, 'utf-8') try: z.extractall(path=unzip_dir, pwd=args.password) for uzfile in z.namelist(): parsedlist.append(os.path.join(unzip_dir, uzfile)) except RuntimeError as e: print("ERROR: Failed to extract ", fname, ' - ', e, sep='') parsedlist.remove(fname) # Resume from report if args.resume: i = len(parsedlist) try: reportfile = codecs.open(args.json, 'r', 'utf-8') except Exception as e: print("ERROR: Could not open report file") exit(1) for line in reportfile: line = json.loads(line) for fname in line: if fname in parsedlist: parsedlist.remove(fname) reportfile.close() i = i - len(parsedlist) if VERBOSE: print("Skipping", i, "files which are in the report already") # Do multiple runs if there are too many files filelists = [] if len(parsedlist) > args.numberper: while len(parsedlist) > args.numberper: filelists.append(parsedlist[:args.numberper]) parsedlist = parsedlist[args.numberper:] if parsedlist: filelists.append(parsedlist) for filelist in filelists: # Record start time for metadata starttime = str(datetime.datetime.now()) # Run the multiscan results = multiscan(filelist, recursive=None, configfile=args.config) # We need to read in the config for the parseReports call Config = configparser.SafeConfigParser() Config.optionxform = str Config.read(args.config) config = _get_main_config(Config) # Add in script metadata endtime = str(datetime.datetime.now()) # For windows compatibility try: username = os.getlogin() except: username = os.getenv('USERNAME') results.append(([], {"Name": "MultiScanner", "Start Time": starttime, "End Time": endtime, # "Command Line":list2cmdline(sys.argv), "Run by": username })) report = None if not args.quiet and stdout.isatty(): # TODO: Make this output something readable # Parse Results if "group-types" not in config: config["group-types"] = [] elif not config["group-types"]: config["group-types"] = [] report = parse_reports(results, groups=config["group-types"], ugly=args.ugly, includeMetadata=args.metadata) # Print report and write to file print(report, file=stdout) if not stdout.isatty(): report = parse_reports(results, groups=config["group-types"], ugly=True, includeMetadata=args.metadata) print(report, file=stdout) stdout.flush() # Don't write the default location if we are redirecting output if args.json == 'report.json': print('Not writing results to report.json, pick a different filename to override') else: try: reportfile = codecs.open(args.json, 'a', 'utf-8') reportfile.write(report) reportfile.write('\n') reportfile.close() except Exception as e: print(e) print("ERROR: Could not write report file, report not saved") exit(2) else: # Check if we need to run the report again if report is not None and args.ugly is True: pass else: report = parse_reports(results, groups=config["group-types"], ugly=True, includeMetadata=args.metadata) # Try to write report try: reportfile = codecs.open(args.json, 'a', 'utf-8') reportfile.write(report) reportfile.write('\n') reportfile.close() except Exception as e: print(e) print("ERROR: Could not write report file, report not saved") exit(2) # Cleanup zip extracted files if args.extractzips: shutil.rmtree('_tmp')
def multiscan(Files, recursive=False, configregen=False, configfile=CONFIG): """ The meat and potatoes. Returns the list of module results Files - A list of files and dirs to be scanned recursive - If true it will search the dirs in Files recursively configregen - If True a new config file will be created overwriting the old configfile - What config file to use """ # Redirect stdout to stderr stdout = sys.stdout sys.stdout = sys.stderr # TODO: Make sure the cleanup from this works is something breaks # Init some vars # If recursive is None we don't parse the file list and take it as is. if recursive is not None: filelist = parseFileList(Files, recursive=recursive) else: filelist = Files # A list of files in the module dir # TODO: This should just be a list of .py's that is passed ModuleList = parseDir(MODULEDIR) # A dictionary used for the copyfileto parameter filedic = {} # Read in config file Config = configparser.SafeConfigParser() Config.optionxform = str # Regen the config if needed or wanted if configregen or not os.path.isfile(configfile): _rewite_config(ModuleList, Config, filepath=configfile) Config.read(configfile) config = _get_main_config(Config, filepath=configfile) # If none of the files existed if not filelist: sys.stdout = stdout raise ValueError("No valid files") # Copy files to a share if configured if "copyfilesto" not in config: config["copyfilesto"] = False if config["copyfilesto"]: if os.path.isdir(config["copyfilesto"]): filelist = _copy_to_share(filelist, filedic, config["copyfilesto"]) else: sys.stdout = stdout raise IOError('The copyfilesto dir" ' + config["copyfilesto"] + '" is not a valid dir') # Start a thread for each module ThreadList = _start_module_threads(filelist, ModuleList, Config) # Write the default configure settings for missing ones _write_missing_module_configs(ModuleList, Config, filepath=configfile) # Wait for all threads to finish for thread in ThreadList: thread.join() if VERBOSE: for thread in ThreadList: print(thread.name, "took", thread.endtime-thread.starttime) # Delete copied files if config["copyfilesto"]: for item in filelist: os.remove(item) # Get Result list results = [] for thread in ThreadList: if thread.ret is not None: results.append(thread.ret) del thread # Translates file names back to the originals if filedic: # I have no idea if this is the best way to do in-place modifications for i in range(0, len(results)): (result, metadata) = results[i] modded = False for j in range(0, len(result)): (filename, hit) = result[j] # This is ugly but os.path.basename is os dependent base = filename.split("\\")[-1].split("/")[-1] if base in filedic: filename = filedic[base] modded = True result[j] = (filename, hit) if modded: results[i] = (result, metadata) # Return stdout to previous state sys.stdout = stdout return results
def _main(): # Get args args = _parse_args() # Set verbose if args.verbose: VERBOSE = args.verbose # Checks if user is trying to initialize if args.Files == ['init'] and not os.path.isfile('init'): _init(args) if not os.path.isfile(args.config): config_init(args.config) # Parse the file list parsedlist = parseFileList(args.Files, recursive=args.recursive) # Resume from report if args.resume: i = len(parsedlist) try: reportfile = open(args.json, 'r') except Exception as e: print("ERROR: Could not open report file") exit(1) for line in reportfile: line = json.loads(line) for fname in line: if fname in parsedlist: parsedlist.remove(fname) reportfile.close() i = i - len(parsedlist) if VERBOSE: print("Skipping", i, "files which are in the report already") # Do multiple runs if there are too many files filelists = [] if len(parsedlist) > args.numberper: while len(parsedlist) > args.numberper: filelists.append(parsedlist[:args.numberper]) parsedlist = parsedlist[args.numberper:] if parsedlist: filelists.append(parsedlist) for filelist in filelists: # Record start time for metadata starttime = str(datetime.datetime.now()) # Run the multiscan results = multiscan(filelist, recursive=None, configfile=args.config) # We need to read in the config for the parseReports call Config = ConfigParser.ConfigParser() Config.optionxform = str Config.read(args.config) config = _get_main_config(Config) # Add in script metadata endtime = str(datetime.datetime.now()) # For windows compatibility try: username = os.getlogin() except: username = os.getenv('USERNAME') results.append(([], {"Name": "MultiScanner", "Start Time": starttime, "End Time": endtime, # "Command Line":list2cmdline(sys.argv), "Run by": username })) if not args.quiet: # TODO: Make this output something readable # Parse Results if "group-types" not in config: config["group-types"] = [] elif not config["group-types"]: config["group-types"] = [] report = parseReports(results, groups=config["group-types"], ugly=args.ugly, includeMetadata=args.metadata) # Print report and write to file print(report) try: reportfile = open(args.json, 'a') reportfile.write(parseReports(results, groups=config["group-types"], ugly=True, includeMetadata=args.metadata)) reportfile.write('\n') reportfile.close() except Exception as e: print(e) print("ERROR: Could not write report file, report not saved") exit(2)