class MasscanWorker(BaseWorker): qinput = Queue('masscan') qoutput = Queue('masscan_out') chunk_size = 300 def run(self, job): """ Job is in the form [seed, shards-string, port]. e.g. [213850, '4/10', 80] """ command = [ 'masscan', '--seed', str(job[0]), '--shards', str(job[1]), '--ports', str(job[2]), ] proc = subprocess.Popen(command, stdout=subprocess.PIPE) for line in proc.stdout: match = re.match( 'Discovered open port (\d+)/(\w+) on (\d+\.\d+\.\d+\.\d+)', line.strip()) if match: yield match.groups() proc.wait()
def test_iterq(): iq = Queue('iter_queue') iq.clear() for x in range(10): iq.send(x) for x, y in zip(range(10), iq): assert x == y assert len(iq) == 0
class Runner(BaseWorker): qinput = Queue('job:raw') qoutput = Queue('result:raw') def run(self, job): outfile, command = job command = map(str, command) print "Running command: %s" % ' '.join(command) proc = subprocess.Popen(command, stdout=subprocess.PIPE) stdout, stderr = proc.communicate() yield [outfile, stdout]
class RFBFingerprinter(BaseWorker): qinput = Queue('masscan_out') qoutput = PickleQueue('rfb_print') def run(self, job): port, proto, ip = job s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(1) output = [] try: s.connect((ip, int(port))) rfb_proto = s.recv(512) output.append(rfb_proto) # mirror the specified protocol back to the sender s.sendall(rfb_proto) security_proto = s.recv(512) output.append(security_proto) if rfb_proto != 'RFB 003.003\n': s.sendall('\x01') # try no security follow_data = s.recv(512) output.append(follow_data) s.close() except Exception: # Bad practice to catch all exceptions, but this is demo code... pass if output: # (ip, port, rfb_proto, security_proto, follow_data) yield [ip, port] + output
def test_clear(): cq = Queue('clear_queue') cq.clear() assert len(cq) == 0 cq.send('baz') assert len(cq) == 1 assert cq.clear() == 1 assert len(cq) == 0 assert cq.next() == None
class Results(BaseWorker): qinput = Queue('result:raw') def run(self, job): output_template, result = job output_filename = output_template.format(timestamp=time.time()) with open(output_filename, 'w') as f: f.write(result)
def test_finite(): iq = Queue('finite_test_queue') iq.clear() for x in range(10): iq.send(x) for x, y in zip(range(10) + [None] * 10, iq): assert x == y for x in range(10): iq.send(x) fq = iq.finite() assert len(iq) == 10 assert range(10) == [x for x in fq] assert len(iq) == 0
def test_queue(): myq = Queue('my_queue') # clean up anything that might be lingering myq.clear() assert len(myq) == 0 assert myq.next() == None myq.send('foo') assert len(myq) == 1 assert myq.next() == 'foo' assert len(myq) == 0
class AddWorker(BaseWorker): # First we define our input and output queues # This worker is going to process items it finds in the 'add_input' queue qinput = Queue('add_input') # and when it's done with them, it puts them in the multiply_input # queue to be handled by the multiply worker. qoutput = Queue('multiply_input') # the only method we have to define is run, tasa takes care of # everything else. def run(self, job): # unpack the values passed in for the job a, b, c = job # do our operation added = a + b # and *YIELD* our result. The run method can a) return nothing # b) return a list of results, or c) yield each result as it # is calculated. In many cases, a run takes one input and # produces one output, as in this example. However, sometimes # one input results in multiple output jobs, which is why we # have this flexibilty. yield [added, c]
def test_blocking(): iq = Queue('blocking_test_queue') iq.clear() iq.blocking = 1 for x in range(10): iq.send(x) for x, y in zip(range(10) + [None], iq): assert x == y start_time = time.time() assert iq.next() == None elapsed = time.time() - start_time print elapsed assert 1 < elapsed < 4
class MultWorker(BaseWorker): # we take the queue used as output from add for our input qinput = Queue('multiply_input') # and since this is an example, we're going to print the result # rather than pushing it back into an output queue, so we can skip # defining qoutput. def run(self, job): # unpack the job again. Note that we could pass a more complex # data structure here - a dict is commonly useful. added, c = job result = added * c print "Result: %d" % result
def test_queue(): myq = Queue('my_queue') # clean up anything that might be lingering myq.clear() assert len(myq) == 0 assert myq.next() == None myq.send('foo') assert len(myq) == 1 assert myq.next() == 'foo' assert len(myq) == 0 myq.send('zoo', 'bar', 'baz') assert len(myq) == 3 assert myq.next() == 'zoo' assert myq.next() == 'bar' assert myq.next() == 'baz' assert len(myq) == 0
# defining qoutput. def run(self, job): # unpack the job again. Note that we could pass a more complex # data structure here - a dict is commonly useful. added, c = job result = added * c print "Result: %d" % result # and now we don't return anything, which is just fine. In a # real job, we might store our result in an object store here, # and yield a job complete message. # Now we're going to stick some jobs in the queue for the # workers. We'll make new instances to work with: add_input = Queue('add_input') multiply_input = Queue('multiply_input') # There's nothing special about instances of Queue objects - as long # as they have the same name, they point to the same queue. We could # have defined these at the top and used them throughout the file, but # it's often more convenient to be slightly redundant. # Since this is an example, the first thing we're going to do is clear # out the queues in case there's stale data in them. add_input.clear() multiply_input.clear() # Now let's put some jobs in the queue add_input.send([1, 2, 3]) add_input.send([3, 5, 9]) add_input.send([1, 10, 100])
# defining qoutput. def run(self, job): # unpack the job again. Note that we could pass a more complex # data structure here - a dict is commonly useful. added, c = job result = added * c print "Result: %d" % result # and now we don't return anything, which is just fine. In a # real job, we might store our result in an object store here, # and yield a job complete message. # Now we're going to stick some jobs in the queue for the # workers. We'll make new instances to work with: add_input = Queue('add_input') multiply_input = Queue('multiply_input') # There's nothing special about instances of Queue objects - as long # as they have the same name, they point to the same queue. We could # have defined these at the top and used them throughout the file, but # it's often more convenient to be slightly redundant. # Since this is an example, the first thing we're going to do is clear # out the queues in case there's stale data in them. add_input.clear() multiply_input.clear() # Now let's put some jobs in the queue add_input.send([1,2,3]) add_input.send([3,5,9]) add_input.send([1,10,100])
def jobs(self): while True: yield time.ctime() time.sleep(10) def run(self, job): queues = [Queue('job:raw'), Queue('result:raw')] for q in queues: print 'Queue:', q.name, len(q) if __name__ == '__main__': SUBNET_PREFIXLEN = 27 portlist = '80,443' ip = netaddr.IPNetwork(sys.argv[1]) qinput = Queue('job:raw') if SUBNET_PREFIXLEN < ip.prefixlen: subnet_list = [ ip, ] else: subnet_list = ip.subnet(SUBNET_PREFIXLEN) for sub in subnet_list: cmd = [ 'nmap', '-T4', # use aggressive timings '--open', # only return open ports '-sS', # SYN scan '-n', # don't attempt DNS resolution
def jobs(self): while True: yield time.ctime() time.sleep(10) def run(self, job): queues = [Queue('job:raw'), Queue('result:raw')] for q in queues: print 'Queue:', q.name, len(q) if __name__ == '__main__': SUBNET_PREFIXLEN = 27 portlist = '80,443' ip = netaddr.IPNetwork(sys.argv[1]) qinput = Queue('job:raw') for sub in ip.subnet(SUBNET_PREFIXLEN): cmd = ['nmap', '-T4', # use aggressive timings '--open', # only return open ports '-sS', # SYN scan '-n', # don't attempt DNS resolution '-PN', # Treat all hosts as online (don't ping) '-oX', '-', # XML output to stdout '-p', portlist, # ports to scan str(sub) # Target specification (hostname, IP # addresses, ranges, subnets, etc.) ] qinput.send(['out/%s_%s_{timestamp}.xml' % (sub.ip, sub.prefixlen), cmd])
def run(self, job): queues = [Queue('job:raw'), Queue('result:raw')] for q in queues: print 'Queue:', q.name, len(q)