def __init__(self, handler, **kw): # handler.getOrchestrator().addStream('meterpreter') # handler.getOrchestrator().streamIdent.setHardStreamName('meterpreter') # handler.getOrchestrator().deleteStream('control') args = defaultArgMerging(MeterpreterShell.Defaults, kw) BaseShell.__init__(self, handler, **args)
def __init__(self, recv, send, orchestrator, **kw): """ :param str stage_stream: The stream where all stages will be received. """ super(StageableHandler, self).__init__(recv, send, orchestrator, **kw) arguments = defaultArgMerging(self.Defaults, kw) # print arguments self.stage_stream = arguments['stage_stream'] self.addStage(self.stage_stream, stage_obj)
def __init__(self, recv, send, orchestrator, **kw): """ :param str request_data: The data that, when received as message, a stored chunk will be sent. """ super(ResponseOnlyHandler, self).__init__(recv, send, orchestrator, **kw) arguments = defaultArgMerging(self.Defaults, kw) self.request_data = arguments['request_data'] self.preferred_send = self.queueSend
def __init__(self, recv, send, orchestrator, **kw): super(DateableHandler, self).__init__(recv, send, orchestrator, **kw) arguments = defaultArgMerging(self.Defaults, kw) self.dates = {} self.dates['workinghours'] = arguments['workinghours'] self.dates['weekends'] = [] for day in arguments['weekends']: normalized_day = getDay(day) self.dates['weekends'].append( normalized_day) # Store days as 0-6 numbers only self.dates['holidays'] = arguments['holidays'] self.dates['easter'] = arguments['easter']
def __init__(self, recv, send, orchestrator, **kw): # print (kw) arguments = defaultArgMerging(BaseHandler.Defaults, kw) # print (arguments) self.receive_function = recv self.send_function = send self.orchestrator = orchestrator self.preferred_send = self.sendAdHoc self.to_send_list = [] self.to_send_raw = [] self.__protocolThread = Thread(target=self.__protocolThreadFunction) self.__protocolThread.daemon = True self.on = True if arguments['start']: self.start()
def __init__(self, recv, send, orchestrator, **kw): """ :param str request_data: The actual payload that is used in messages thet request data. :param tuple delay_between: A `tuple` containing 2 `floats` or `ints`. The beaconing intervals will be calculated randomly between these 2 numbers. :param str fetch_stream: The stream where all the beaconing will be tagged with. """ super(InterrogatingHandler, self).__init__(recv, send, orchestrator, **kw) self.Defaults['fetch_stream'] = orchestrator.getDefaultStream() arguments = defaultArgMerging(self.Defaults, kw) self.request_data = arguments['request_data'] self.delay_between = arguments['delay_between'] self.fetch_stream = arguments['fetch_stream'] self.fetcher_thread = Thread(target=self.__fetcher_function) self.fetcher_thread.daemon = True self.fetcher_thread.start()
def __init__(self, handler, **kw): cmd.Cmd.__init__(self) arguments = defaultArgMerging(BaseShell.Defaults, kw) self.prompt_templ = arguments['prompt'] self.ignore_messages = arguments['ignore_messages'] self.output = arguments['output'] self.debug = arguments['debug'] subshells = arguments['subshells'] self.subshells_dict = {} self.handler = handler for stream_name, subshell_attrs in subshells.items(): if type(subshell_attrs) is tuple: subshell_class, subshell_kwargs = subshell_attrs else: subshell_class, subshell_kwargs = (subshell_attrs, dict()) self.addSubShell(stream_name, subshell_class, subshell_kwargs) handler.onChunk = handlerCallbackHook(handler.onChunk, self.subshells_dict) self.updatePrompt() self.sysinfo = None
def __init__(self, handler, log_unrecognised=False, **kw): args = defaultArgMerging(ExtendableShell.Defaults, kw) BaseShell.__init__(self, handler, **args)
def __init__(self, handler, **kw): args = defaultArgMerging(StandardShell.Defaults, kw) BaseShell.__init__(self, handler, **args) self.sysinfo = None
def __init__( self, handler, **kw ) : args = defaultArgMerging(SimpleShell.Defaults, kw) BaseShell.__init__( self, handler, **args )