Example #1
0
    def _readControl(self):
        ar = arpy.Archive(self.filename)
        ar.read_all_headers()

        if b'control.tar.xz' in ar.archived_files:
            tar = LZMAFile(filename=ar.archived_files[b'control.tar.xz'])
            # NOTE: this requires https://github.com/viraptor/arpy/pull/5

        elif b'control.tar.gz' in ar.archived_files:
            tar = GzipFile(fileobj=ar.archived_files[b'control.tar.gz'])

        else:
            raise ValueError('Unable to find control file')

        raw = TarFile(fileobj=tar)

        control = raw.extractfile('./control').read()
        raw.close()
        tar.close()
        ar.close()

        return control
Example #2
0
class LZMAPlugin:
    """
    Compresses received data using `lzma <https://en.wikipedia.org/wiki/Lempel–Ziv–Markov_chain_algorithm>`_.

    Accepted ``feed_options`` parameters:

    - `lzma_format`
    - `lzma_check`
    - `lzma_preset`
    - `lzma_filters`

    .. note::
        ``lzma_filters`` cannot be used in pypy version 7.3.1 and older.

    See :py:class:`lzma.LZMAFile` for more info about parameters.
    """
    def __init__(self, file: BinaryIO, feed_options: Dict[str, Any]) -> None:
        self.file = file
        self.feed_options = feed_options

        format = self.feed_options.get("lzma_format")
        check = self.feed_options.get("lzma_check", -1)
        preset = self.feed_options.get("lzma_preset")
        filters = self.feed_options.get("lzma_filters")
        self.lzmafile = LZMAFile(filename=self.file,
                                 mode="wb",
                                 format=format,
                                 check=check,
                                 preset=preset,
                                 filters=filters)

    def write(self, data: bytes) -> int:
        return self.lzmafile.write(data)

    def close(self) -> None:
        self.lzmafile.close()
        self.file.close()
Example #3
0
    def GenerateArchive( self, nodeTemplate ):
        """
            Given an xz compressed archive, read off its contents
            encapsulating each record in a node object for further
            processing upstream. 

            @param  Node nodeTemplate - Node object definition, used to encapsulte records
            @return None
        """
        fd = LZMAFile( self.process )

        # Yank Header Information
        buf = fd.next()

        # Begin processing
        while True:
            try:
                line = fd.next()
            except StopIteration as FinishedProcessing:
                break

            yield nodeTemplate( url = line.strip() ) 

        fd.close()
Example #4
0
class Log(glados.Module):
    def __init__(self, server_instance, full_name):
        super(Log, self).__init__(server_instance, full_name)

        self.log_path = os.path.join(self.local_data_dir, 'log')
        if not os.path.exists(self.log_path):
            os.makedirs(self.log_path)

        self.date = datetime.now().strftime('%Y-%m-%d')
        self.log_file = LZMAFile(
            os.path.join(self.log_path, 'chanlog-{}.txt.xz'.format(self.date)),
            'a')

    def __open_new_log_if_necessary(self):
        date = datetime.now().strftime('%Y-%m-%d')
        if not self.date == date:
            self.log_file.close()
            self.date = date
            self.log_file = LZMAFile(
                os.path.join(self.log_path,
                             'chanlog-{}.txt.xz'.format(self.date)), 'a')

    @glados.Permissions.spamalot
    @glados.Module.rule('^.*$', ignorecommands=False)
    async def on_message(self, message, match):
        server_name = message.server.name if message.server else ''
        server_id = message.server.id if message.server else ''
        self.__open_new_log_if_necessary()
        info = u'[{0}] {1}({2}): #{3}: {4}({5}): {6}\n'.format(
            datetime.now().strftime('%Y-%m-%d %H:%M:%S'), server_name,
            server_id, message.channel.name, message.author.name,
            message.author.id, message.clean_content)

        self.log_file.write(info.encode('utf-8'))
        self.log_file.flush()
        return ()