async def target(self, event, pipeline, context):
     command_name = await self.command_name.read(event, pipeline, context)
     ebnf = await self.ebnf.read(event, pipeline, context)
     #  ---
     if command_name:
         try:
             command = m42pl.command(command_name)
             source = {
                 command_name: command
             }
         except Exception:
             source = {}
     else:
         source = m42pl.commands.ALIASES
     # ---
     for alias, command in source.items():
         yield derive(event, data={
             'command': {
                 'alias': alias,
                 'aliases': command._aliases_,
                 'schema': command._schema_,
                 'about': command._about_,
                 'syntax': command._syntax_,
                 'type': list(filter(None, map(lambda t: issubclass(command, t) and t.__name__ or None, self.types)))[0],
                 'ebnf': ebnf is True and getattr(command, '_ebnf_', '') or ''
             }
         })
Exemple #2
0
 async def target(self, event, pipeline, context):
     fields = await self.fields.read(event, pipeline, context)
     # Shared queue to receive and forwards data to pipeline
     queue = asyncio.Queue(1)
     # Get new transport instance
     protocol = self.protocols.get(fields.protocol.lower())
     if not protocol:
         raise Exception(
             f'Protocol "{fields.protocol}" is unknown, '
             f'please use one of {", ".join(self.protocols.keys())}')
     self.transport = await protocol.get_transport(queue, fields)
     # Run forever
     self.logger.info(
         f'start listening on '
         f'{fields.protocol.lower()}/{fields.host}:{fields.port}')
     while True:
         data, hostport = await queue.get()
         yield derive(event,
                      data={
                          'msg': {
                              'data': data,
                              'host': hostport[0],
                              'port': hostport[1]
                          }
                      })
 async def target(self, event, pipeline, context):
     if self.dest:
         yield await self.dest.write(
             event,
             self.encoder.decode(await
                                 self.src.read(event, pipeline, context)))
     else:
         yield derive(event,
                      data=self.encoder.decode(await self.src.read(
                          event, pipeline, context)))
Exemple #4
0
 async def target(self, event, pipeline, context):
     cmd = await self.command.read(event, pipeline, context)
     args = [await arg.read(event, pipeline, context) for arg in self.args]
     # ---
     process = subprocess.Popen([
         cmd,
     ] + args, stdout=subprocess.PIPE)
     for row in iter(process.stdout.readline, b''):
         yield derive(event, {'line': row.rstrip().decode('UTF-8')})
     process.terminate()
Exemple #5
0
 async def target(self, event, pipeline, context):
     macros = await context.kvstore.read(self.macros_index, default={})
     for name, macro in macros.items():
         if macro:
             yield derive(event,
                          {self.key_name: {
                              **{
                                  'name': name
                              },
                              **macro
                          }})
 async def target(self, event, pipeline, context):
     try:
         with open(await self.path.read(event, pipeline, context),
                   'r') as fd:
             line = 0
             for chunk in fd.readlines():
                 for text in chunk.splitlines():
                     yield await self.field.write(derive(event), {
                         'text': text,
                         'line': line
                     })
                     line += 1
     except Exception:
         yield event
 async def target(self, event, pipeline, context):
     fields = await self.fields.read(event, pipeline, context)
     # Do not run if we're not in the first chunk, i.e. do not
     # request the same URL in multiple process/tasks/threads/...
     if not self.first_chunk:
         return
     # Setup base request (== request template)
     base_request = {}
     for field in ('headers', 'data', 'json'):
         if isinstance(getattr(fields, field), dict):
             base_request[field] = getattr(fields, field)
     # Run
     async with aiohttp.ClientSession() as session:
         while True:
             # Build requests batch
             requests = []
             for url in fields.urls:
                 requests.append(
                     self.request_one(
                         session,
                         {
                             **base_request,
                             **{
                                 'method': fields.method,
                                 'url': url
                             }
                         }))
             # ---
             # for request in requests:
             #     async for chunk in request:
             #         yield derive(event, chunk)
             # ---
             # Execute requests and yield them as soon as possible
             for request in asyncio.as_completed(requests):
                 # yield derive(event, await anext(request))
                 yield derive(event, data=await request)
             # ---
             # Wait before next requests batch
             if fields.frequency > 0:
                 await asyncio.sleep(fields.frequency)
             # Decrease request count
             fields.count -= 1
             if fields.count <= 0:
                 break
Exemple #8
0
 async def target(self, event, pipeline, context):
     for i in range(self.begin_count, self.end_count):
         yield derive(
             event, self.fields.showinfo and {
                 'id': i,
                 'chunk': {
                     'chunk': self._chunk,
                     'chunks': self._chunks,
                 },
                 'count': {
                     'begin': self.begin_count,
                     'end': self.end_count
                 },
                 'pipeline': {
                     'name': pipeline.name
                 }
             } or {})
         if self.fields.frequency > 0.0:
             if self.fields.freqdelay == 1 or i % self.fields.freqdelay == 0:
                 await sleep(self.fields.frequency)
Exemple #9
0
 async def target(self, event, pipeline, context):
     try:
         with open(await self.path.read(event, pipeline, context), 'r') as fd:
             yield await self.field.write(derive(event), fd.read())
     except Exception as _error:
         yield derive(event)
Exemple #10
0
 async def target(self, event, pipeline, context):
     for item in await self.field.read(event):
         yield await self.field.write(derive(event), item)
Exemple #11
0
 async def target(self, event, pipeline, context):
     async for k, i in context.kvstore.items(self.key):
         yield await LiteralField(k).write(derive(event), i)