Ejemplo n.º 1
0
def main():
	mapURL = {}
	
	logo()
	parser = argparse.ArgumentParser(description='Ragpicker Malware Crawler')
	parser.add_argument("-a", "--artwork", help="Show artwork", action="store_true", required=False)
	parser.add_argument("-t", "--threads", type=int, default=3, help="Threads to process (default=3, max=6)")
	parser.add_argument("-u", "--url", help="Download and analysis from a single URL")
	parser.add_argument('--log-level', default=logging.INFO, help='logging level, default=logging.INFO')
	parser.add_argument('--log-filename', help='logging filename')
	parser.add_argument('--version', action='version', version='Ragpicker version ' + RAGPICKER_VERSION)

	global args 
	args = parser.parse_args()
	
	if args.artwork:
		try:
			while True:
				time.sleep(1)
				logo()
		except KeyboardInterrupt:
			return
		
	if args.log_level:
		log_conf = dict(level=args.log_level,
			format='%(levelname)s %(name)s %(module)s:%(lineno)d %(message)s')

		if args.log_filename:
			log_conf['filename'] = args.log_filename
			log.info("log-filename: " + args.log_filename)

		logging.basicConfig(**log_conf)
	
	log.info("RAGPICKER_VERSION: " + RAGPICKER_VERSION)
	log.info("RAGPICKER_BUILD_DATE: " + RAGPICKER_BUILD_DATE)
	log.info("RAGPICKER_ROOT: " + RAGPICKER_ROOT)
	
	if args.url:
		log.info("Download and analysis from %s" % args.url)
		
		mapURL["0"] = args.url
		
		#Malware Download, process and reporting
		runWorker(mapURL, 1)
	else:
		#Max Threads=6
		if args.threads > 6:
			args.threads = 6
			
		log.info("Threads: " + str(args.threads))
			
		#Malware URLs Crawlen 
		mapURL = runCrawler()
		log.info("Process "+str(len(mapURL))+" URLs")
		
		#Malware Download, process and reporting
		runWorker(mapURL, args.threads)
Ejemplo n.º 2
0
def main():
    mapURL = {}

    logo()
    parser = argparse.ArgumentParser(description='Ragpicker Malware Crawler')
    parser.add_argument("-a",
                        "--artwork",
                        help="Show artwork",
                        action="store_true",
                        required=False)
    parser.add_argument("-p",
                        "--processes",
                        type=int,
                        default=3,
                        help="Number of processes (default=3, max=6)")
    parser.add_argument("-u",
                        "--url",
                        help="Download and analysis from a single URL")
    parser.add_argument("-d",
                        "--directory",
                        help="Load files from local directory")
    parser.add_argument("-i",
                        "--info",
                        help="Print Ragpicker config infos",
                        action="store_true",
                        required=False)
    parser.add_argument("-da",
                        "--delete",
                        help="Delete all stored data",
                        action="store_true")
    parser.add_argument('--log-level',
                        default=logging.INFO,
                        help='logging level, default=logging.INFO')
    parser.add_argument('--log-filename', help='logging filename')
    parser.add_argument('--version',
                        action='version',
                        version='Ragpicker version ' + RAGPICKER_VERSION)

    global args
    args = parser.parse_args()

    if args.artwork:
        try:
            while True:
                time.sleep(1)
                logo()
        except KeyboardInterrupt:
            return

    if args.log_level:
        log_conf = dict(
            level=args.log_level,
            format='%(levelname)s %(name)s %(module)s:%(lineno)d %(message)s')

        if args.log_filename:
            log_conf['filename'] = args.log_filename
            log.info("log-filename: " + args.log_filename)

        logging.basicConfig(**log_conf)

    if args.delete:
        worker = Worker()
        worker.runDelete()
        return

    if args.info:
        printRagpickerInfos(True)
        return

    if args.url:
        log.info(color("Download and analysis from %s" % args.url, RED))
        runWorker(args.url)
    elif args.directory:
        printRagpickerInfos()
        log.info(
            color("Load files from local directory %s" % args.directory, RED))
        mapURL = getLocalFiles(args.directory)
    else:
        printRagpickerInfos()
        # Malware URLs Crawlen
        mapURL = runCrawler()

    # Max Threads=6
    if args.processes > 6:
        args.processes = 6

    log.info(color("Processes: " + str(args.processes), RED))
    log.info(color("Process " + str(len(mapURL)) + " URLs", RED))

    # Create Process Pool
    pool = Pool(processes=args.processes)

    # Malware Download, process and reporting
    for url in mapURL.values():
        pool.apply_async(runWorker, args=(url, ))

    pool.close()
    pool.join()
Ejemplo n.º 3
0
from core.statistics import Statistics

log = logging.getLogger("Main")

if __name__ == '__main__':

    parser = argparse.ArgumentParser(description='Ragpicker Statistics')
    subparsers = parser.add_subparsers(title='subcommands',
                                       description='valid subcommands',
                                       help='additional help')

    parser_long = subparsers.add_parser('long',
                                        help="Show statistics (long version)")
    parser_long.set_defaults(which='long')
    parser_short = subparsers.add_parser(
        'short', help="Show statistics (short version)")
    parser_short.set_defaults(which='short')
    parser_av = subparsers.add_parser('av',
                                      help="Show statistics (AV version)")
    parser_av.set_defaults(which='av')

    args = vars(parser.parse_args())

    logo()

    if args['which'] == 'long':
        Statistics().runStatisticsLong()
    elif args['which'] == 'short':
        Statistics().runStatisticsShort()
    elif args['which'] == 'av':
        Statistics().runStatisticsAV()
Ejemplo n.º 4
0
def main():
	mapURL = {}
	
	logo()
	parser = argparse.ArgumentParser(description='Ragpicker Malware Crawler')
	parser.add_argument("-a", "--artwork", help="Show artwork", action="store_true", required=False)
	parser.add_argument("-p", "--processes", type=int, default=3, help="Number of processes (default=3, max=6)")
	parser.add_argument("-u", "--url", help="Download and analysis from a single URL")
	parser.add_argument("-d", "--directory", help="Load files from local directory")
	parser.add_argument("-i", "--info", help="Print Ragpicker config infos", action="store_true", required=False)
	parser.add_argument("-da", "--delete", help="Delete all stored data", action="store_true")
	parser.add_argument('--log-level', default=logging.INFO, help='logging level, default=logging.INFO')
	parser.add_argument('--log-filename', help='logging filename')
	parser.add_argument('--version', action='version', version='Ragpicker version ' + RAGPICKER_VERSION)

	global args 
	args = parser.parse_args()
	
	if args.artwork:
		try:
			while True:
				time.sleep(1)
				logo()
		except KeyboardInterrupt:
			return
		
	if args.log_level:
		log_conf = dict(level=args.log_level,
			format='%(levelname)s %(name)s %(module)s:%(lineno)d %(message)s')

		if args.log_filename:
			log_conf['filename'] = args.log_filename
			log.info("log-filename: " + args.log_filename)

		logging.basicConfig(**log_conf)
	
	if args.delete:
		worker = Worker()
		worker.runDelete()
		return
	
	if args.info:
		printRagpickerInfos(True)
		return
	
	if args.url:
		log.info(color("Download and analysis from %s" % args.url, RED))
		runWorker(args.url)
	elif args.directory:
		printRagpickerInfos()
		log.info(color("Load files from local directory %s" % args.directory, RED))
		mapURL = getLocalFiles(args.directory)
	else:			
		printRagpickerInfos()
		# Malware URLs Crawlen 
		mapURL = runCrawler()
		
	# Max Threads=6
	if args.processes > 6:
		args.processes = 6
		
	log.info(color("Processes: " + str(args.processes), RED))	
	log.info(color("Process " + str(len(mapURL)) + " URLs", RED))
	
	# Create Process Pool
	pool = Pool(processes=args.processes)
	
	# Malware Download, process and reporting
	for url in mapURL.values():
		pool.apply_async(runWorker, args=(url,))
		
	pool.close()
	pool.join()
Ejemplo n.º 5
0
import argparse
import logging

from utils.logo import logo 
from core.statistics import Statistics

log = logging.getLogger("Main")

if __name__ == '__main__':         
        
    parser = argparse.ArgumentParser(description='Ragpicker Statistics')
    subparsers = parser.add_subparsers(title='subcommands', description='valid subcommands', help='additional help')
   
    parser_long = subparsers.add_parser('long', help="Show statistics (long version)")
    parser_long.set_defaults(which='long')
    parser_short = subparsers.add_parser('short', help="Show statistics (short version)")
    parser_short.set_defaults(which='short')   
    parser_av = subparsers.add_parser('av', help="Show statistics (AV version)")
    parser_av.set_defaults(which='av')
    
    args = vars(parser.parse_args())
    
    logo()
    
    if args['which'] == 'long':
        Statistics().runStatisticsLong()
    elif args['which'] == 'short':
        Statistics().runStatisticsShort()
    elif args['which'] == 'av':
        Statistics().runStatisticsAV()
Ejemplo n.º 6
0
train_path: List[str] = glob(
    "../data/track_1/idao_dataset/new_test/train/*/*.png")
test_path: List[str] = glob(
    "../data/track_1/idao_dataset/new_test/*_test/*.png")

transform = transforms.Compose([
    transforms.CenterCrop(128),
    transforms.ToTensor(),
    transforms.Normalize(
        (-0.3918, -0.2711, -0.0477),
        (0.0822, 0.0840, 0.0836),  # mean and std over all the dataset
    ),
])

fold_generator = logo(list_files=train_path)
# fold_generator = dummy_validation(list_files=train_path)
device = torch.device("cuda")

for index, (fold_train_path, fold_val_path) in enumerate(fold_generator):

    train_dataset = IdaoDataset(list_path=fold_train_path, transform=transform)
    val_dataset = IdaoDataset(list_path=fold_val_path, transform=transform)

    train_dataloader = DataLoader(dataset=train_dataset,
                                  batch_size=50,
                                  shuffle=True,
                                  num_workers=8)

    val_dataloader = DataLoader(dataset=val_dataset,
                                batch_size=50,
Ejemplo n.º 7
0
 def error(self, message):
     logo()
     self.print_help()
     sys.exit(2)