def main(): """ """ # Check if output directories exist commons.check_path(args) # Print start messages commons.show_summary(args) commons.show_networking(args) # globals: proxies, torsocks # Read config.yaml commons.read_config(args) # globals: config # Recompile exclusions commons.recompile_exclusions() # globals: exclusions # Create queues url_queue = commons.create_queue("url_queue") # Create threads commons.UrlQueueManager(args, url_queue) # Request URLs from urlscan.io urls = commons.query_urlscan(args) # Process URLs for url in urls: url_queue.put(url) url_queue.join() return
def main(): """ """ # Check if output directories exist commons.check_path(args) # Print start messages commons.show_summary(args) commons.show_networking(args) # globals: proxies, torsocks # Read config.yaml commons.read_config(args) # globals: config # Recompile exclusions commons.recompile_exclusions() # globals: exclusions # Create queues domain_queue = commons.create_queue("domain_queue") url_queue = commons.create_queue("url_queue") # Create threads commons.DomainQueueManager(args, domain_queue, url_queue) commons.UrlQueueManager(args, url_queue) # Get domains domains = commons.get_domains(args) print((colored("Scoring and checking the domains...\n", "yellow", attrs=["bold"]))) for domain in (sorted(domains)): domain_queue.put(domain) domain_queue.join() url_queue.join() return
def main(): """ """ global exclusions global suspicious global url_queue # Check if output directories exist commons.check_path(args) # Print start messages commons.show_summary(args) commons.show_networking(args, uagent) # Read suspicious.yaml and external.yaml suspicious = commons.read_externals() # Recompile exclusions exclusions = commons.recompile_exclusions() # Create queues url_queue = commons.create_queue("url_queue") # Create threads commons.UrlQueueManager(args, url_queue, uagent) # Listen for events via Certstream print(colored("Connecting to Certstream...\n", "yellow", attrs=["bold"])) certstream.listen_for_events(message_callback=callback, url="wss://certstream.calidog.io", on_open=on_open)
def main(): """ """ global exclusions global config global url_queue # Check if output directories exist commons.check_path(args) # Print start messages commons.show_summary(args) commons.show_networking(args) # globals: proxies, torsocks # Read config.yaml config = commons.read_config(args) # globals: config # Recompile exclusions exclusions = commons.recompile_exclusions() # globals: exclusions # Create queues url_queue = commons.create_queue("url_queue") # Create threads commons.UrlQueueManager(args, url_queue) # Listen for events via Certstream print(colored("Connecting to Certstream...\n", "yellow", attrs=["bold"])) certstream.listen_for_events(message_callback=callback, url=args.ctl_server, on_open=on_open)
def main(): """ """ # Check if output directories exist commons.check_path(args) # Print start messages commons.show_summary(args) commons.show_networking(args) # globals: proxies, torsocks # Read config.yaml commons.read_config(args) # globals: config # Recompile exclusions commons.recompile_exclusions() # globals: exclusions # Create queues url_queue = commons.create_queue("url_queue") # Create threads commons.UrlQueueManager(args, url_queue) # Read file containing URLs urls = commons.read_file(args.input_file) # Process URLs for url in urls: if not url.startswith("http"): url = "http://{}".format(url) url_queue.put(url) url_queue.join() return
def main(): """ """ # Check if output directories exist commons.check_path(args) # Print start messages commons.show_summary(args) commons.show_networking(args, uagent) # Read suspicious.yaml and external.yaml suspicious = commons.read_externals() # Recompile exclusions commons.recompile_exclusions() # Build dict of extensions extensions = {} extensions.update(suspicious["archives"]) extensions.update(suspicious["files"]) # Request URLs from urlscan.io urls = commons.query_urlscan(args, suspicious["queries"], uagent, extensions) # Create queues recursion_queue = commons.create_queue("recursion_queue") # Create threads commons.RecursiveQueueManager(args, recursion_queue, uagent, extensions) # Process URLs for url in urls: recursion_queue.put(url) recursion_queue.join() return
def main(): """ """ # Check if output directories exist commons.check_path(args) # Print start messages commons.show_summary(args) commons.show_networking(args, uagent) # Read suspicious.yaml and external.yaml suspicious = commons.read_externals() # Recompile exclusions commons.recompile_exclusions() # Build dict of extensions extensions = {} extensions.update(suspicious["archives"]) extensions.update(suspicious["files"]) # Read file containing URLs urls = commons.read_file(args.input_file) # Create queues recursion_queue = commons.create_queue("recursion_queue") # Create threads commons.RecursiveQueueManager(args, recursion_queue, uagent, extensions) # Process URLs for url in urls: if not (url.startswith("http://") or url.startswith("https://")): continue recursion_queue.put(url) recursion_queue.join() return
def main(): """ """ # Check if output directories exist commons.check_path(args) # Print start messages commons.show_summary(args) commons.show_networking(args, uagent) # Read suspicious.yaml and external.yaml commons.read_externals() # Recompile exclusions commons.recompile_exclusions() # Create queues domain_queue = commons.create_queue("domain_queue") url_queue = commons.create_queue("url_queue") # Create threads commons.DomainQueueManager(args, domain_queue, url_queue) commons.UrlQueueManager(args, url_queue, uagent) # Get domains domains = commons.get_domains(uagent, args) print( colored("Scoring and checking the domains...\n", "yellow", attrs=["bold"])) for domain in domains: domain_queue.put(domain) domain_queue.join() url_queue.join() return