예제 #1
0
파일: log.py 프로젝트: jre21/inbox
def configure_general_logging():
    """ Configure the general server logger to output to screen if a TTY is
        attached, and server.log always.

        Logs are output to a directory configurable via LOGDIR.
    """
    # import here to avoid import loop from config.py
    from .config import config
    assert 'LOGDIR' in config, "LOGDIR not specified in config file"
    assert 'LOGLEVEL' in config, "LOGLEVEL not specified in config file"
    mkdirp(config['LOGDIR'])

    # configure properties that should cascade
    inbox_root_logger = logging.getLogger('inbox.server')
    inbox_root_logger.setLevel(int(config['LOGLEVEL']))
    # don't pass messages up to the root root logger
    inbox_root_logger.propagate = False

    # log everything to screen
    if sys.stdout.isatty():
        inbox_root_logger.addHandler(get_tty_handler())

    logger = get_logger()

    for handler in logger.handlers:
        logger.removeHandler(handler)

    logfile = os.path.join(config['LOGDIR'], 'server.log')
    file_handler = logging.FileHandler(logfile, encoding='utf-8')
    file_handler.setFormatter(file_formatter)
    logger.addHandler(file_handler)

    return logger
예제 #2
0
파일: log.py 프로젝트: AmyWeiner/inbox
def configure_general_logging():
    """
    Configure the general server logger to output to screen and server.log.

    Logs are output to a directory configurable via LOGDIR.

    """
    logdir = config.get_required('LOGDIR')
    loglevel = config.get_required('LOGLEVEL')

    mkdirp(logdir)

    # configure properties that should cascade
    inbox_root_logger = logging.getLogger('inbox')
    inbox_root_logger.setLevel(int(loglevel))
    # don't pass messages up to the root root logger
    inbox_root_logger.propagate = False

    # log everything to screen (or main logfile if redirecting)
    inbox_root_logger.addHandler(get_tty_handler())

    logger = get_logger()

    for handler in logger.handlers:
        logger.removeHandler(handler)

    logfile = os.path.join(logdir, 'server.log')
    file_handler = logging.FileHandler(logfile, encoding='utf-8')
    file_handler.setFormatter(file_formatter)
    logger.addHandler(file_handler)

    return logger
예제 #3
0
파일: base.py 프로젝트: htk/sync-engine
def log(request, config):
    """
    Returns root server logger. For others loggers, use this fixture
    for setup but then call inbox.log.get_logger().

    Testing log file is removed at the end of the test run!

    """
    import logging
    from inbox.util.file import mkdirp
    root_logger = logging.getLogger()
    for handler in root_logger.handlers:
        root_logger.removeHandler(handler)

    logdir = config.get_required('LOGDIR')
    mkdirp(logdir)
    logfile = config.get_required('TEST_LOGFILE')
    fileHandler = logging.FileHandler(logfile, encoding='utf-8')
    root_logger.addHandler(fileHandler)
    root_logger.setLevel(logging.DEBUG)

    def remove_logs():
        try:
            os.remove(logfile)
        except OSError:
            pass
    request.addfinalizer(remove_logs)
예제 #4
0
def log(request, config):
    """
    Returns root server logger. For others loggers, use this fixture
    for setup but then call inbox.log.get_logger().

    Testing log file is removed at the end of the test run!

    """
    import logging
    from inbox.util.file import mkdirp
    root_logger = logging.getLogger()
    for handler in root_logger.handlers:
        root_logger.removeHandler(handler)

    logdir = config.get_required('LOGDIR')
    mkdirp(logdir)
    logfile = config.get_required('TEST_LOGFILE')
    fileHandler = logging.FileHandler(logfile, encoding='utf-8')
    root_logger.addHandler(fileHandler)
    root_logger.setLevel(logging.DEBUG)

    def remove_logs():
        try:
            os.remove(logfile)
        except OSError:
            pass

    request.addfinalizer(remove_logs)
예제 #5
0
파일: cache.py 프로젝트: 0xcd03/inbox
def set_cache(key, val):
    path = _path_from_key(key)
    dirname = os.path.dirname(path)
    mkdirp(dirname)
    log.info("Saving cache to {0}".format(dirname))
    with open(path, 'w') as f:
        msgpack.pack(val, f)
예제 #6
0
def set_cache(key, val):
    path = _path_from_key(key)
    dirname = os.path.dirname(path)
    mkdirp(dirname)
    log.info("Saving cache to {0}".format(dirname))
    with open(path, 'w') as f:
        msgpack.pack(val, f)
예제 #7
0
def configure_general_logging():
    """ Configure the general server logger to output to screen if a TTY is
        attached, and server.log always.

        Logs are output to a directory configurable via LOGDIR.
    """
    # import here to avoid import loop from config.py
    from .config import config
    assert 'LOGDIR' in config, "LOGDIR not specified in config file"
    assert 'LOGLEVEL' in config, "LOGLEVEL not specified in config file"
    mkdirp(config['LOGDIR'])

    # configure properties that should cascade
    inbox_root_logger = logging.getLogger('inbox.server')
    inbox_root_logger.setLevel(int(config['LOGLEVEL']))
    # don't pass messages up to the root root logger
    inbox_root_logger.propagate = False

    # log everything to screen
    if sys.stdout.isatty():
        inbox_root_logger.addHandler(get_tty_handler())

    logger = get_logger()

    for handler in logger.handlers:
        logger.removeHandler(handler)

    logfile = os.path.join(config['LOGDIR'], 'server.log')
    file_handler = logging.FileHandler(logfile, encoding='utf-8')
    file_handler.setFormatter(file_formatter)
    logger.addHandler(file_handler)

    return logger
예제 #8
0
            def _keyfile(self, create_dir=True):
                assert self.key

                assert KEY_DIR
                if create_dir:
                    mkdirp(KEY_DIR)
                key_filename = '{0}'.format(sha256(self.key).hexdigest())
                return os.path.join(KEY_DIR, key_filename)
            def _keyfile(self, create_dir=True):
                assert self.key

                assert KEY_DIR
                if create_dir:
                    mkdirp(KEY_DIR)
                key_filename = "{0}".format(sha256(self.key).hexdigest())
                return os.path.join(KEY_DIR, key_filename)
예제 #10
0
파일: base.py 프로젝트: jre21/inbox
    def _keyfile(self, create_dir=True):
        assert self.key

        key_dir = config.get('KEY_DIR', None)
        assert key_dir
        if create_dir:
            mkdirp(key_dir)
        key_filename = '{0}'.format(sha256(self.key).hexdigest())
        return os.path.join(key_dir, key_filename)
예제 #11
0
def _get_errfilename(account_id, folder_name, uid):
    try:
        errdir = os.path.join(config['LOGDIR'], str(account_id), 'errors',
                              folder_name)
        errfile = os.path.join(errdir, str(uid))
        mkdirp(errdir)
    except UnicodeEncodeError:
        # Rather than wrangling character encodings, just base64-encode the
        # folder name to construct a directory.
        b64_folder_name = base64.b64encode(folder_name.encode('utf-8'))
        return _get_errfilename(account_id, b64_folder_name, uid)
    return errfile
예제 #12
0
def _get_errfilename(account_id, folder_name, uid):
    try:
        errdir = os.path.join(config['LOGDIR'], str(account_id), 'errors',
                              folder_name)
        errfile = os.path.join(errdir, str(uid))
        mkdirp(errdir)
    except UnicodeEncodeError:
        # Rather than wrangling character encodings, just base64-encode the
        # folder name to construct a directory.
        b64_folder_name = base64.b64encode(folder_name.encode('utf-8'))
        return _get_errfilename(account_id, b64_folder_name, uid)
    return errfile
예제 #13
0
파일: log.py 프로젝트: AmyWeiner/inbox
def configure_logging(account_id, purpose):
    logger = get_logger(account_id, purpose)
    logger.propagate = True

    logdir = os.path.join(config['LOGDIR'], str(account_id))
    mkdirp(logdir)
    logfile = os.path.join(logdir, '{0}.log'.format(purpose))
    handler = logging.FileHandler(logfile, encoding='utf-8')
    handler.setFormatter(file_formatter)
    logger.addHandler(handler)

    return logger
예제 #14
0
def configure_logging(account_id, purpose):
    # avoid import loop from config.py
    from .config import config
    logger = get_logger(account_id, purpose)
    logger.propagate = True

    logdir = os.path.join(config['LOGDIR'], str(account_id))
    mkdirp(logdir)
    logfile = os.path.join(logdir, '{0}.log'.format(purpose))
    handler = logging.FileHandler(logfile, encoding='utf-8')
    handler.setFormatter(file_formatter)
    logger.addHandler(handler)

    return logger
예제 #15
0
def save_to_blockstore(data_sha256, data):
    assert data is not None
    assert type(data) is not unicode

    if len(data) == 0:
        log.warning('Not saving 0-length data blob')
        return

    if STORE_MSG_ON_S3:
        _save_to_s3(data_sha256, data)
    else:
        directory = _data_file_directory(data_sha256)
        mkdirp(directory)

        with open(_data_file_path(data_sha256), 'wb') as f:
            f.write(data)
예제 #16
0
def save_to_blockstore(data_sha256, data):
    assert data is not None
    assert type(data) is not unicode

    if len(data) == 0:
        log.warning('Not saving 0-length data blob')
        return

    if STORE_MSG_ON_S3:
        _save_to_s3(data_sha256, data)
    else:
        directory = _data_file_directory(data_sha256)
        mkdirp(directory)

        with open(_data_file_path(data_sha256), 'wb') as f:
            f.write(data)
예제 #17
0
def save_to_blockstore(data_sha256, data):
    # type: (str, bytes) -> None
    assert data is not None
    assert isinstance(data, bytes)

    if len(data) == 0:
        log.warning("Not saving 0-length data blob")
        return

    if STORE_MSG_ON_S3:
        _save_to_s3(data_sha256, data)
    else:
        directory = _data_file_directory(data_sha256)
        mkdirp(directory)

        with open(_data_file_path(data_sha256), "wb") as f:
            f.write(data)
예제 #18
0
def save_to_blockstore(data_sha256, data):
    # this code is save attachments unnecessary to the disk so disabled
    return
    assert data is not None
    assert type(data) is not unicode

    if len(data) == 0:
        log.warning('Not saving 0-length data blob')
        return

    if STORE_MSG_ON_S3:
        _save_to_s3(data_sha256, data)
    else:
        directory = _data_file_directory(data_sha256)
        mkdirp(directory)

        with open(_data_file_path(data_sha256), 'wb') as f:
            f.write(data)
예제 #19
0
파일: message.py 프로젝트: 0xcd03/inbox
def _get_errfilename(account_id, folder_name, uid):
    errdir = os.path.join(config['LOGDIR'], str(account_id), 'errors',
                          folder_name)
    errfile = os.path.join(errdir, str(uid))
    mkdirp(errdir)
    return errfile
예제 #20
0
파일: roles.py 프로젝트: AmyWeiner/inbox
 def _save_to_disk(self, data):
     mkdirp(self._data_file_directory)
     with open(self._data_file_path, 'wb') as f:
         f.write(data)
예제 #21
0
파일: roles.py 프로젝트: caitp/inbox
 def _save_to_disk(self, data):
     mkdirp(self._data_file_directory)
     with open(self._data_file_path, 'wb') as f:
         f.write(data)
예제 #22
0
    def _save_to_disk(self, data):
        directory = _data_file_directory(self.data_sha256)
        mkdirp(directory)

        with open(_data_file_path(self.data_sha256), 'wb') as f:
            f.write(data)
예제 #23
0
파일: base.py 프로젝트: jre21/inbox
    def calculate_sanitized_body(self):
        plain_part, html_part = self.body
        snippet_length = 191
        if html_part:
            assert '\r' not in html_part, "newlines not normalized"

            # Rudimentary stripping out quoted text in 'gmail_quote' div
            # Wrap this in a try/catch because sometimes BeautifulSoup goes
            # down a dark spiral of recursion death
            try:
                soup = BeautifulSoup(html_part.strip(), "lxml")
                for div in soup.findAll('div', 'gmail_quote'):
                    div.extract()
                for container in soup.findAll('div', 'gmail_extra'):
                    if container.contents is not None:
                        for tag in reversed(container.contents):
                            if not hasattr(tag, 'name') or tag.name != 'br':
                                break
                            else:
                                tag.extract()
                    if container.contents is None:
                        # we emptied it!
                        container.extract()

                # Paragraphs don't need trailing line-breaks.
                for container in soup.findAll('p'):
                    if container.contents is not None:
                        for tag in reversed(container.contents):
                            if not hasattr(tag, 'name') or tag.name != 'br':
                                break
                            else:
                                tag.extract()

                # Misc other crap.
                dtd = [item for item in soup.contents if isinstance(
                    item, Doctype)]
                comments = soup.findAll(text=lambda text: isinstance(
                    text, Comment))
                for tag in chain(dtd, comments):
                    tag.extract()
                self.sanitized_body = unicode(soup)

                # trim for snippet
                for tag in soup.findAll(['style', 'head', 'title']):
                    tag.extract()
                self.snippet = soup.get_text(' ')[:191]

            except RuntimeError as exc:
                err_prefix = 'maximum recursion depth exceeded'
                # e.message is deprecated in Python 3
                if exc.args[0].startswith(err_prefix):
                    full_traceback = 'Ignoring error: {}\nOuter stack:\n{}{}'\
                        .format(exc, ''.join(traceback.format_stack()[:-2]),
                                traceback.format_exc(exc))

                    # Note that python doesn't support tail call recursion
                    # optimizations
                    # http://neopythonic.blogspot.com/2009/04/tail-recursion-elimination.html
                    full_traceback = 'Error in BeautifulSoup.' + \
                        'System recursion limit: {0}'.format(
                            sys.getrecursionlimit()) + \
                        '\n\n\n' + \
                        full_traceback

                    # TODO have a better logging service for storing these
                    errdir = os.path.join(config['LOGDIR'],
                                          'bs_parsing_errors', )
                    errfile = os.path.join(errdir, str(self.data_sha256))
                    mkdirp(errdir)

                    with open("{0}_traceback".format(errfile), 'w') as fh:
                        fh.write(full_traceback)
                    # Write the file in binary mode, since it might also have
                    # decoding errors.
                    with open("{0}_data".format(errfile), 'wb') as fh:
                        fh.write(html_part.encode("utf-8"))

                    log.error("BeautifulSoup parsing error. Data logged to\
                              {0}_data and {0}_traceback".format(errfile))
                    self.decode_error = True

                    # Not sanitized, but will still work
                    self.sanitized_body = html_part
                    self.snippet = soup.get_text(' ')[:191]

                else:
                    log.error("Unknown BeautifulSoup exception: {0}".format(
                        exc))
                    raise exc

        elif plain_part is None:
            self.sanitized_body = u''
            self.snippet = u''
        else:
            stripped = strip_plaintext_quote(plain_part.strip())
            self.sanitized_body = plaintext2html(stripped)
            self.snippet = stripped[:snippet_length]
예제 #24
0
def _get_errfilename(account_id, folder_name, uid):
    errdir = os.path.join(config['LOGDIR'], str(account_id), 'errors',
                          folder_name)
    errfile = os.path.join(errdir, str(uid))
    mkdirp(errdir)
    return errfile
예제 #25
0
def _save_to_disk(data_sha256, data):
    directory = _data_file_directory(data_sha256)
    mkdirp(directory)

    with open(_data_file_path(data_sha256), 'wb') as f:
        f.write(data)