Exemplo n.º 1
0
def main():
    mapURL = {}

    logo()
    parser = argparse.ArgumentParser(description='Ragpicker Malware Crawler')
    parser.add_argument("-a",
                        "--artwork",
                        help="Show artwork",
                        action="store_true",
                        required=False)
    parser.add_argument("-p",
                        "--processes",
                        type=int,
                        default=3,
                        help="Number of processes (default=3, max=6)")
    parser.add_argument("-u",
                        "--url",
                        help="Download and analysis from a single URL")
    parser.add_argument("-d",
                        "--directory",
                        help="Load files from local directory")
    parser.add_argument("-i",
                        "--info",
                        help="Print Ragpicker config infos",
                        action="store_true",
                        required=False)
    parser.add_argument("-da",
                        "--delete",
                        help="Delete all stored data",
                        action="store_true")
    parser.add_argument('--log-level',
                        default=logging.INFO,
                        help='logging level, default=logging.INFO')
    parser.add_argument('--log-filename', help='logging filename')
    parser.add_argument('--version',
                        action='version',
                        version='Ragpicker version ' + RAGPICKER_VERSION)

    global args
    args = parser.parse_args()

    if args.artwork:
        try:
            while True:
                time.sleep(1)
                logo()
        except KeyboardInterrupt:
            return

    if args.log_level:
        log_conf = dict(
            level=args.log_level,
            format='%(levelname)s %(name)s %(module)s:%(lineno)d %(message)s')

        if args.log_filename:
            log_conf['filename'] = args.log_filename
            log.info("log-filename: " + args.log_filename)

        logging.basicConfig(**log_conf)

    if args.delete:
        worker = Worker()
        worker.runDelete()
        return

    if args.info:
        printRagpickerInfos(True)
        return

    if args.url:
        log.info(color("Download and analysis from %s" % args.url, RED))
        runWorker(args.url)
    elif args.directory:
        printRagpickerInfos()
        log.info(
            color("Load files from local directory %s" % args.directory, RED))
        mapURL = getLocalFiles(args.directory)
    else:
        printRagpickerInfos()
        # Malware URLs Crawlen
        mapURL = runCrawler()

    # Max Threads=6
    if args.processes > 6:
        args.processes = 6

    log.info(color("Processes: " + str(args.processes), RED))
    log.info(color("Process " + str(len(mapURL)) + " URLs", RED))

    # Create Process Pool
    pool = Pool(processes=args.processes)

    # Malware Download, process and reporting
    for url in mapURL.values():
        pool.apply_async(runWorker, args=(url, ))

    pool.close()
    pool.join()
Exemplo n.º 2
0
def main():
	mapURL = {}
	
	logo()
	parser = argparse.ArgumentParser(description='Ragpicker Malware Crawler')
	parser.add_argument("-a", "--artwork", help="Show artwork", action="store_true", required=False)
	parser.add_argument("-p", "--processes", type=int, default=3, help="Number of processes (default=3, max=6)")
	parser.add_argument("-u", "--url", help="Download and analysis from a single URL")
	parser.add_argument("-d", "--directory", help="Load files from local directory")
	parser.add_argument("-i", "--info", help="Print Ragpicker config infos", action="store_true", required=False)
	parser.add_argument("-da", "--delete", help="Delete all stored data", action="store_true")
	parser.add_argument('--log-level', default=logging.INFO, help='logging level, default=logging.INFO')
	parser.add_argument('--log-filename', help='logging filename')
	parser.add_argument('--version', action='version', version='Ragpicker version ' + RAGPICKER_VERSION)

	global args 
	args = parser.parse_args()
	
	if args.artwork:
		try:
			while True:
				time.sleep(1)
				logo()
		except KeyboardInterrupt:
			return
		
	if args.log_level:
		log_conf = dict(level=args.log_level,
			format='%(levelname)s %(name)s %(module)s:%(lineno)d %(message)s')

		if args.log_filename:
			log_conf['filename'] = args.log_filename
			log.info("log-filename: " + args.log_filename)

		logging.basicConfig(**log_conf)
	
	if args.delete:
		worker = Worker()
		worker.runDelete()
		return
	
	if args.info:
		printRagpickerInfos(True)
		return
	
	if args.url:
		log.info(color("Download and analysis from %s" % args.url, RED))
		runWorker(args.url)
	elif args.directory:
		printRagpickerInfos()
		log.info(color("Load files from local directory %s" % args.directory, RED))
		mapURL = getLocalFiles(args.directory)
	else:			
		printRagpickerInfos()
		# Malware URLs Crawlen 
		mapURL = runCrawler()
		
	# Max Threads=6
	if args.processes > 6:
		args.processes = 6
		
	log.info(color("Processes: " + str(args.processes), RED))	
	log.info(color("Process " + str(len(mapURL)) + " URLs", RED))
	
	# Create Process Pool
	pool = Pool(processes=args.processes)
	
	# Malware Download, process and reporting
	for url in mapURL.values():
		pool.apply_async(runWorker, args=(url,))
		
	pool.close()
	pool.join()