def __init__(self, recv, send, orchestrator, **kw): super(ResettableHandler, self).__init__(recv, send, orchestrator, **kw) self.Defaults['reset_data'] = orchestrator.getDefaultStream() arguments = defaultArgMerging(self.Defaults, kw) self.reset_data = arguments['reset_data']
def __init__(self, recv, send, orchestrator, **kw): """ :param `function` recv: A **blocking** function that returns every time a chunk is received. The return type must be raw data, directly fetched from the channel. :param `function` send: A function that takes raw data as argument and sends it across the channel. :param `orchestration.SimpleOrchestrator` orchestrator: An Object that is used to translate raw data to `(stream, message)` tuples. """ #print("KW IS : ") #print (kw) #print("Inside BaseHandler") # print (arguments) and test indent arguments = defaultArgMerging(BaseHandler.Defaults, kw) #Test if pip updates self.receive_function = recv self.send_function = send self.orchestrator = orchestrator self.preferred_send = self.sendAdHoc self.to_send_list = [] self.to_send_raw = [] #print("about to start a thread in the BaseHandler init() " ) self.on = True self.__protocolThread = Thread(target=self.__protocolThreadFunction) self.__protocolThread.daemon = True #moved self.on = True to the top if arguments['start']: #print("Argument is start in basehandler") self.start()
def __init__( self, handler, **kw ) : # handler.getOrchestrator().addStream('meterpreter') # handler.getOrchestrator().streamIdent.setHardStreamName('meterpreter') # handler.getOrchestrator().deleteStream('control') args = defaultArgMerging(MeterpreterShell.Defaults, kw) BaseShell.__init__( self, handler, **args )
def __init__(self, recv, send, orchestrator, **kw): """ :param str stage_stream: The stream where all stages will be received. """ super(StageableHandler, self).__init__(recv, send, orchestrator, **kw) arguments = defaultArgMerging(self.Defaults, kw) self.stage_stream = arguments['stage_stream'] self.addStage(self.stage_stream, stage_obj)
def __init__(self, recv, send, orchestrator, **kw): """ :param str request_data: The data that, when received as message, a stored chunk will be sent. """ super(ResponseOnlyHandler, self).__init__(recv, send, orchestrator, **kw) arguments = defaultArgMerging(self.Defaults, kw) self.request_data = arguments['request_data'] self.preferred_send = self.queueSend
def __init__(self, recv, send, orchestrator, **kw): super(DateableHandler, self).__init__(recv, send, orchestrator, **kw) arguments = defaultArgMerging(self.Defaults, kw) self.dates = {} self.dates['workinghours'] = arguments['workinghours'] self.dates['weekends'] = [] for day in arguments['weekends']: normalized_day = getDay(day) self.dates['weekends'].append( normalized_day) # Store days as 0-6 numbers only self.dates['holidays'] = arguments['holidays'] self.dates['easter'] = arguments['easter']
def __init__( self, recv, send, orchestrator, **kw ) : """ :param str request_data: The actual payload that is used in messages thet request data. :param tuple delay_between: A `tuple` containing 2 `floats` or `ints`. The beaconing intervals will be calculated randomly between these 2 numbers. :param str fetch_stream: The stream where all the beaconing will be tagged with. """ super(InterrogatingHandler, self).__init__( recv, send, orchestrator, **kw ) self.Defaults['fetch_stream'] = orchestrator.getDefaultStream() arguments = defaultArgMerging( self.Defaults, kw ) self.request_data = arguments['request_data'] self.delay_between = arguments['delay_between'] self.fetch_stream = arguments['fetch_stream'] self.fetcher_thread = Thread( target = self.__fetcher_function ) self.fetcher_thread.daemon = True self.fetcher_thread.start()
def __init__(self, handler, log_unrecognised=False, **kw): cmd.Cmd.__init__(self) arguments = defaultArgMerging(BaseShell.Defaults, kw) self.prompt_templ = arguments['prompt'] self.ignore_messages = arguments['ignore_messages'] subshells = arguments['subshells'] self.subshells_dict = {} self.handler = handler for stream_name, subshell_attrs in subshells.items(): # if if type(subshell_attrs) is tuple: subshell_class, subshell_kwargs = subshell_attrs else: subshell_class, subshell_kwargs = (subshell_attrs, dict()) self.addSubShell(stream_name, subshell_class, subshell_kwargs) handler.onChunk = handlerCallbackHook(handler.onChunk, self.subshells_dict) self.updatePrompt()
def __init__(self, handler, log_unrecognised=False, **kw): args = defaultArgMerging(StandardShell.Defaults, kw) BaseShell.__init__(self, handler, log_unrecognised, **args) self.sysinfo = None
def __init__( self, handler, **kw ) : args = defaultArgMerging(StandardShell.Defaults, kw) BaseShell.__init__( self, handler, **args ) self.sysinfo = None
def __init__( self, handler, log_unrecognised = False, **kw ) : args = defaultArgMerging(ExtendableShell.Defaults, kw) BaseShell.__init__( self, handler, **args )
def __init__(self, handler, **kw): args = defaultArgMerging(SimpleShell.Defaults, kw) BaseShell.__init__(self, handler, **args)