def test_logger_003(capsys, caplog): """Test logger basic usage. Test case verifies that very verbose option works for text logs. In this case the length of the log message must be truncated and the message must be in all lower case characters. """ Logger.remove() Logger.configure({ 'debug': False, 'log_json': False, 'log_msg_max': Logger.DEFAULT_LOG_MSG_MAX, 'quiet': False, 'very_verbose': True }) logger = Logger.get_logger('snippy.' + __name__) logger.warning('abcdefghij' * 100) logger.warning('VARIABLE %s', ('ABCDEFGHIJ' * 100)) logger.security('SECURITY %s', ('ABCDEFGHIJ' * 100)) out, err = capsys.readouterr() assert not err assert 'abcdefghijabcdefg...' in out assert 'abcdefghijabcdefgh...' in out assert 'variable abcdefghij' in out assert len(caplog.records[0].msg) == Logger.DEFAULT_LOG_MSG_MAX assert len(caplog.records[1].msg) == Logger.DEFAULT_LOG_MSG_MAX assert len(caplog.records[2].msg) == Logger.DEFAULT_LOG_MSG_MAX assert caplog.records[0].msg.islower() assert caplog.records[1].msg.islower() assert caplog.records[2].msg.islower()
def test_logger_005(capsys, caplog): """Test logger basic usage. Test case verifies that very verbose option works with json logs. """ Logger.remove() Logger.configure({ 'debug': False, 'log_json': True, 'log_msg_max': Logger.DEFAULT_LOG_MSG_MAX, 'quiet': False, 'very_verbose': True }) logger = Logger.get_logger('snippy.' + __name__) logger.warning('abcdefghij' * 100) logger.warning('variable %s', ('abcdefghij' * 100)) out, err = capsys.readouterr() assert not err assert len(json.loads( out.splitlines()[0])['message']) == Logger.DEFAULT_LOG_MSG_MAX assert len(json.loads( out.splitlines()[1])['message']) == Logger.DEFAULT_LOG_MSG_MAX assert len(caplog.records[0].msg) == Logger.DEFAULT_LOG_MSG_MAX assert len(caplog.records[1].msg) == Logger.DEFAULT_LOG_MSG_MAX assert Field.is_iso8601(json.loads(out.splitlines()[0])['asctime']) assert Field.is_iso8601(json.loads(out.splitlines()[1])['asctime'])
def __init__(self, category='', timestamp='', list_=None, dict_=None): self._logger = Logger.get_logger(__name__) self._id = '' self._category = category self._data = () self._brief = '' self._description = '' self._name = '' self._groups = Const.DEFAULT_GROUPS self._tags = () self._links = () self._source = '' self._versions = () self._filename = '' self._created = timestamp self._updated = timestamp self._uuid = '' self._digest = '' if list_ or dict_: self.convert(list_, dict_) if not self._id: self._id = self._get_internal_uuid() if not self._uuid: self._uuid = self._get_external_uuid() self._digest = self._compute_digest()
def test_logger_014(capsys, caplog): """Test custom security level. Test case verifies that the custom ``security`` level is working. """ Logger.remove() Logger.configure({ 'debug': False, 'log_json': False, 'log_msg_max': 120, 'quiet': False, 'very_verbose': True }) logger = Logger.get_logger('snippy.' + __name__) logger.security('SECURITY %s', ('ABCDEFGHIJ' * 100)) out, err = capsys.readouterr() assert not err assert 'security abcdefghij' in out assert len(caplog.records[0].msg) == 120 assert caplog.records[0].appname == 'snippy' assert caplog.records[0].levelname == 'security' assert caplog.records[0].levelno == Logger.SECURITY assert hasattr(caplog.records[0], 'oid')
def __init__(self, storage, category, run_cli): self._logger = Logger.get_logger(__name__) self._category = category self._run_cli = run_cli self._storage = storage self._collection = None self._uniques = ()
def __init__(self): self._logger = Logger.get_logger(__name__) self._db = Const.DB_SQLITE self._connection = None self._columns = () self._regexp = 'REGEXP' self._placeholder = '?' self._catch_violating_column = self.RE_CATCH_UNIQUE_SQLITE_COLUMN
def __init__(self, timestamp, text, collection): """ Args: timestamp (str): IS8601 timestamp used with created resources. text (str): Source text that is parsed. collection (Collection()): Collection where the content is stored. """ self._logger = Logger.get_logger(__name__) self._timestamp = timestamp self._text = text self._collection = collection
def __init__(self, timestamp, dictionary, collection): """ Args: timestamp (str): IS8601 timestamp used with created resources. dictionary (dict): Dictionary where the content is read. collection (Collection()): Collection where the content is stored. """ self._logger = Logger.get_logger(__name__) self._timestamp = timestamp self._dictionary = dictionary self._collection = collection
def test_logger_011(capsys, caplog): """Test logger advanced configuration. Test case verifies that log maximum message length can be configred and that the configuration can be changed. The case also tests that static logger fields are not changed when logger is reconfigured. """ Logger.remove() Logger.configure({ 'debug': False, 'log_json': False, 'log_msg_max': 120, 'quiet': False, 'very_verbose': True }) logger = Logger.get_logger('snippy.' + __name__) logger.warning('abcdefghij' * 100) logger.warning('VARIABLE %s', ('ABCDEFGHIJ' * 100)) out, err = capsys.readouterr() assert not err assert 'abcdefghijabcdefg...' in out assert 'abcdefghijabcdefgh...' in out assert 'variable abcdefghij' in out assert len(caplog.records[0].msg) == 120 assert len(caplog.records[1].msg) == 120 assert caplog.records[0].appname == 'snippy' assert caplog.records[1].appname == 'snippy' caplog.clear() Logger.configure({ 'debug': False, 'log_json': True, 'log_msg_max': Logger.DEFAULT_LOG_MSG_MAX, 'quiet': False, 'very_verbose': True }) logger.warning('abcdefghij' * 100) logger.warning('VARIABLE %s', ('ABCDEFGHIJ' * 100)) out, err = capsys.readouterr() assert not err assert 'abcdefghijabcdefg...' in out assert 'abcdefghijabcdefgh...' in out assert 'variable abcdefghij' in out assert len(caplog.records[0].msg) == Logger.DEFAULT_LOG_MSG_MAX assert len(caplog.records[1].msg) == Logger.DEFAULT_LOG_MSG_MAX assert caplog.records[0].appname == 'snippy' assert caplog.records[1].appname == 'snippy'
def __init__(self, filetype, timestamp, source, collection): """ Args: filetype (str): Filetype that defines used parser. timestamp (str): IS8601 timestamp used with created resources. source (str|dict): Source text or dictionary that is parsed. collection (Collection): Collection object where content is stored. """ self._logger = Logger.get_logger(__name__) self._filetype = filetype self._timestamp = timestamp self._source = source self._collection = collection self._parser = self._parser_factory()
def logger_wrapper(request): """Create logger.""" from snippy.logger import Logger # Previous test may have configured the logger and therefore # the logger must be always reset before test. Logger.reset() logger = Logger.get_logger('snippy.' + __name__) def fin(): """Clear the resources at the end.""" Logger.remove() request.addfinalizer(fin) return logger
def __init__(self, derived): self._logger = Logger.get_logger(__name__) self._logger.debug('config source: {}'.format(derived)) self._derived = derived self._reset_fields = {} self._repr = self._get_repr() self.complete = Const.EMPTY self.debug = False self.defaults = False self.digest = None self.editor = False self.failure = False self.failure_message = Const.EMPTY self.template_format = Const.CONTENT_FORMAT_MKDN self.template_format_used = False self.languages = () self.log_json = False self.log_msg_max = self.DEFAULT_LOG_MSG_MAX self.merge = False self.no_ansi = False self.no_editor = False self.operation = None self.operation_file = Const.EMPTY self.profiler = False self.quiet = False self.run_healthcheck = False self.server_minify_json = False self.server_readonly = False self.server_ssl_ca_cert = None self.server_ssl_cert = None self.server_ssl_key = None self.storage_path = Const.EMPTY self.storage_type = Const.DB_SQLITE self.storage_host = Const.EMPTY self.storage_user = Const.EMPTY self.storage_password = Const.EMPTY self.storage_database = Const.EMPTY self.storage_ssl_cert = None self.storage_ssl_key = None self.storage_ssl_ca_cert = None self.template = False self.uuid = None self.version = __version__ self.very_verbose = False
def test_logger_016(capsys): """Test logs from Gunicorn. Test case verifies that log log messages from Gunicorn are converted correctly to Snippy server logs. The informative logs from Gunicorn must be converted to debug level logs. All other log level must be kept the same. """ Logger.remove() Logger.configure({ 'debug': True, 'log_json': True, 'log_msg_max': Logger.DEFAULT_LOG_MSG_MAX, 'quiet': False, 'very_verbose': False }) logger = Logger.get_logger('snippy.server.gunicorn') # Test log levels. logger.security('testing security level') logger.critical('testing critical level') logger.error('testing error level') logger.warning('testing warning level') logger.info('testing info level') logger.debug('testing debug level') out, err = capsys.readouterr() assert not err assert json.loads(out.splitlines()[0])['levelno'] == 60 assert json.loads(out.splitlines()[0])['levelname'] == 'security' assert json.loads(out.splitlines()[1])['levelno'] == 50 assert json.loads(out.splitlines()[1])['levelname'] == 'crit' assert json.loads(out.splitlines()[2])['levelno'] == 40 assert json.loads(out.splitlines()[2])['levelname'] == 'err' assert json.loads(out.splitlines()[3])['levelno'] == 30 assert json.loads(out.splitlines()[3])['levelname'] == 'warning' assert json.loads(out.splitlines()[4])['levelno'] == 10 assert json.loads(out.splitlines()[4])['levelname'] == 'debug' assert json.loads(out.splitlines()[5])['levelno'] == 10 assert json.loads(out.splitlines()[5])['levelname'] == 'debug'
def test_logger_009(capsys): """Test Logger debugging. Test case verifies that debug methods works. """ Logger.remove() Logger.configure({ 'debug': True, 'log_json': True, 'log_msg_max': Logger.DEFAULT_LOG_MSG_MAX, 'quiet': False, 'very_verbose': False }) logger = Logger.get_logger('snippy.' + __name__) logger.warning('testing logger debug') Logger.debug() out, err = capsys.readouterr() assert not err assert 'snippy.tests.test_ut_logger' in out
def test_logger_010(capsys): """Test removing snippy Logger handlers. Test case verifies that Logger.remove() does not delete other than snippy packages logging handlers. """ Logger.remove() Logger.configure({ 'debug': True, 'log_json': True, 'log_msg_max': Logger.DEFAULT_LOG_MSG_MAX, 'quiet': False, 'very_verbose': False }) _ = Logger.get_logger('other.package') Logger.remove() # Part of the test. Logger.debug() # Part of the test. out, err = capsys.readouterr() assert not err assert 'Handler Stream' in out
def test_logger_015(capsys, caplog): """Test failure handling. Test case verifies that log message length cannot exceed safety limits that are defined for a security reasons. Because the very verbose mode is used, the log messages are limited to default length. """ Logger.remove() Logger.configure({ 'debug': False, 'log_json': False, 'log_msg_max': Logger.SECURITY_LOG_MSG_MAX + Logger.DEFAULT_LOG_MSG_MAX, 'quiet': False, 'very_verbose': True }) logger = Logger.get_logger('snippy.' + __name__) logger.warning('abcdefghij' * 100) logger.warning('VARIABLE %s', ('ABCDEFGHIJ' * 100)) logger.security('SECURITY %s', ('ABCDEFGHIJ' * 100)) out, err = capsys.readouterr() assert not err assert 'abcdefghijabcdefg...' in out assert 'abcdefghijabcdefgh...' in out assert 'variable abcdefghij' in out assert 'log message length: 10080 :cannot exceed security limit: 10000' in caplog.text assert len(caplog.records[1].msg) == Logger.DEFAULT_LOG_MSG_MAX assert len(caplog.records[2].msg) == Logger.DEFAULT_LOG_MSG_MAX assert len(caplog.records[3].msg) == Logger.DEFAULT_LOG_MSG_MAX assert caplog.records[0].msg.islower() assert caplog.records[1].msg.islower() assert caplog.records[2].msg.islower() assert caplog.records[3].msg.islower()
def test_logger_008(capsys): """Test operation ID (OID). Test case verifies that operation ID (OID) refresh works. """ Logger.remove() Logger.configure({ 'debug': True, 'log_json': True, 'log_msg_max': Logger.DEFAULT_LOG_MSG_MAX, 'quiet': False, 'very_verbose': False }) logger = Logger.get_logger('snippy.' + __name__) logger.warning('first message') Logger.refresh_oid() logger.warning('second message') out, err = capsys.readouterr() assert not err assert json.loads(out.splitlines()[0])['oid'] != json.loads( out.splitlines()[1])['oid']
def __init__(self): self._logger = Logger.get_logger(__name__) self._data = self._init()
class Check(object): # pylint: disable=too-few-public-methods """Healthcheck service for the Snippy server.""" _logger = Logger.get_logger(__name__) @classmethod def run(cls): """Run server healthcheck. Print logs only from failures. Printing logs from all successful tests may be considered valid information to trace that the health check was actually run and it was succesful. But creating too many log message loses the relevant logs in a mass of logs. As of now, too many extra logs are avoided. The httplib is a builtin module in Python. The requests module is much better from usage point of view. This implementation is chose to avoid unnecessary dependencies for a very simple task. Returns: int: Exit code 0 for success and 1 of failure. """ if Config.server_ssl_cert: scheme = 'https://' conn = httplib.HTTPSConnection(Config.server_host, timeout=2) else: scheme = 'http://' conn = httplib.HTTPConnection(Config.server_host, timeout=2) url = scheme + Config.server_host + Config.server_base_path_rest + 'hello' exit_code = 1 try: conn.request(method='GET', url=url) resp = conn.getresponse() if resp.status == 200: exit_code = 0 except httplib.HTTPException: # Cannot catch ConnectionRefusedError because it does not exist in # Python 2. Catching it would cause a second exception with any # exception captured here. The Python 2 and 3 compatible solution # falls in this case to next Exception branch. cls._log_exception('server healthcheck failed with exception') except Exception: # pylint: disable=broad-except cls._log_exception( 'server healthcheck failed with unknown exception') try: conn.close() except Exception: # pylint: disable=broad-except cls._log_exception('server healthcheck connection close failed') return exit_code @classmethod def _log_exception(cls, message): """Log exception Args: message (str): Message for the exception log. """ minimized = ' '.join(str(traceback.format_exc()).split()) cls._logger.debug('{}: {}'.format(message, minimized))
def __init__(self, content=None): self._logger = Logger.get_logger(__name__) self._category = content.category if content else None self._content = content
class Cause(object): """Cause code services.""" ALL_OK = 'OK' # HTTP status codes. HTTP_200 = '200 OK' HTTP_201 = '201 Created' HTTP_204 = '204 No Content' HTTP_400 = '400 Bad Request' HTTP_403 = '403 Forbidden' HTTP_405 = '405 Method Not Allowed' HTTP_404 = '404 Not Found' HTTP_409 = '409 Conflict' HTTP_500 = '500 Internal Server Error' HTTP_OK = HTTP_200 HTTP_CREATED = HTTP_201 HTTP_NO_CONTENT = HTTP_204 HTTP_BAD_REQUEST = HTTP_400 HTTP_FORBIDDEN = HTTP_403 HTTP_NOT_FOUND = HTTP_404 HTTP_METHOD_NOT_ALLOWED = HTTP_405 HTTP_CONFLICT = HTTP_409 HTTP_INTERNAL_SERVER_ERROR = HTTP_500 OK_STATUS_LIST = (HTTP_OK, HTTP_CREATED, HTTP_NO_CONTENT) HTTP_200_OK = 200 HTTP_201_CREATED = 201 HTTP_204_NO_CONTENT = 204 HTTP_404_NOT_FOUND = 404 _list = {'errors': []} _logger = Logger.get_logger(__name__) @classmethod def reset(cls): """Reset cause to initial value.""" cause = cls.get_message() cls._list = {'errors': []} return cause @classmethod def push(cls, status, message): """Append cause to list. Message will always contain only the string till the first newline. The reason is that the message may be coming from an exception which message may contain multiple lines. In this case it is always assumed that the first line contains the actual exception message. The whole message is always printed into log. Args: status (str): One of the predefined HTTP status codes. message (str): Description of the cause. Examples -------- >>> Cause.push(Cause.HTTP_CREATED, 'content created') """ # Optimization: Prevent setting the caller module and line number in case # of success causes. Reading of the line number requires file access that # is expensive and avoided in successful cases. caller = 'snippy.cause.cause.optimize:1' if status not in Cause.OK_STATUS_LIST: caller = cls._caller() cls._logger.debug('cause %s with message %s from %s', status, message, caller) cls._list['errors'].append({ 'status': int(status.split()[0]), 'status_string': status, 'module': caller, 'title': message.splitlines()[0] }) @classmethod def insert(cls, status, message): """Insert cause as a first cause. Args: status (str): One of the predefined HTTP status codes. message (str): Description of the cause. Examples -------- >>> Cause.insert(Cause.HTTP_CREATED, 'content created') """ cls.push(status, message) cls._list['errors'].insert(0, cls._list['errors'].pop()) @classmethod def is_ok(cls): """Test if errors were detected. The status is considered ok in following cases: 1. There are no errors at all. 2. There are only accepted error codes. 3. Content has been created and there are only 409 Conflict errors. The last case is a special case. It is considered as a successful case when there are multiple resources imported and some of them are already created. Returns: bool: Define if the cause list can be considered ok. """ is_ok = False if not cls._list['errors']: is_ok = True elif all(error['status_string'] in Cause.OK_STATUS_LIST for error in cls._list['errors']): is_ok = True elif (any(error['status_string'] == cls.HTTP_CREATED for error in cls._list['errors']) and all(error['status_string'] in (cls.HTTP_CREATED, cls.HTTP_CONFLICT) for error in cls._list['errors'])): is_ok = True return is_ok @classmethod def http_status(cls): """Return the HTTP status.""" status = Cause.HTTP_OK if cls._list['errors']: status = cls._list['errors'][0]['status_string'] return status @classmethod def json_message(cls): """Return errors in JSON data structure.""" response = cls._list response['meta'] = { 'version': __version__, 'homepage': __homepage__, 'docs': __docs__, 'openapi': __openapi__ } return response @classmethod def get_message(cls): """Return cause message. Cause codes follow the same rules as the logs with the title or message. If there are variables within the message, the variables are separated with colon. The end user message is beautified so that if there is more than one colon, it indicates that variable is in the middle of the message. This is not considered good layout for command line interface messages. How ever, if there is only one colon, it is used to sepatate the last part which is considered clear for user. Because of these rules, the colon delimiters are removed only if there is more than one. Examples: 1. cannot use empty content uuid for: delete :operation 2. cannot find content with content uuid: 1234567 """ cause = Cause.ALL_OK if not cls.is_ok(): message = cls._list['errors'][0]['title'] if message.count(':') > 1: message = cls._list['errors'][0]['title'].replace(':', '') cause = 'NOK: ' + message return cause @classmethod def print_message(cls): """Print cause message.""" Logger.print_status(cls.get_message()) @classmethod def print_failure(cls): """Print only failure message.""" if not cls.is_ok(): Logger.print_status(cls.get_message()) @classmethod def debug(cls): """Debug Cause.""" for idx, cause in enumerate(cls._list['errors']): print('cause[%d]:' % idx) print(' status : %s\n' ' string : %s\n' ' module : %s\n' ' title : %s\n' % (cause['status'], cause['status_string'], cause['module'], cause['title'])) @classmethod def _is_internal_error(cls): """Test if internal error was detected.""" if any(error['status_string'] == cls.HTTP_INTERNAL_SERVER_ERROR for error in cls._list['errors']): return True return False @staticmethod def _caller(): """Get caller module and code line.""" # Optimization: Inspect.stack reads source code file that generates # expensive file access. The contenxt loading can be switched off # with stack(0) setting /1/. A bit more efficient way is to use # sys._getframe that is according to /2/ four times faster the # stack(0). Testing shows that there is a noticeable difference # but not that much. # # Try to avoid calling this method for performance reasons. # # /1/ https://stackoverflow.com/a/17407257 # /2/ https://stackoverflow.com/a/45196608 frame = sys._getframe(2) # pylint: disable=protected-access info = inspect.getframeinfo(frame) module = inspect.getmodule(frame) location = module.__name__ + ':' + str(info.lineno) return location
class Generate(object): """Generate a body for HTTP REST API response.""" _logger = Logger.get_logger(__name__) @classmethod def resource(cls, collection, request, response, identity, field=Const.EMPTY, pagination=False): """Create HTTP response body with a resource. The links ``self`` and data ``id`` attributes are always created from the resource digest attribute. The digest is considered as a main ID. The ``self`` attribute cannot contain the URI from the request. If content is updated, the request URI is not correct after the resource update when digest in URI is used. The resource digest attributes changes when the resource changes. This should work with HTTP caching. The caching works so that the URI and response are cached. If URI contains an UUID which does not change, the cached result could be incorrect. But when the link with digest changes with the content, the cached result should be always correct. [1] [1] This is something that the author is not too confident. Args: collection (Collection()): Collection with resources to be send in HTTP response. request (object): HTTP request. response (object): HTTP response. identity (str): Partial or full message digest or UUID. field (str): Content field attribute that was used in the HTTP request URL. pagination (bool): Define if pagination is used. Returns: body: JSON body as a string or compressed bytes. """ data = {'data': {}, 'links': {}} for resource in collection.resources(): uri = list(urlparse(request.uri)) uri[2] = uri[2][:uri[2].index( identity)] # Remove everything before resource ID. uri = urlunparse(uri) uri = urljoin(uri, resource.uuid) if field: uri = urljoin(uri + '/', field) data['links'] = {'self': uri} data['data'] = { 'type': resource.category, 'id': resource.uuid, 'attributes': resource.dump_dict(Config.remove_fields) } break if pagination: data['meta'] = {} data['meta']['count'] = 1 data['meta']['limit'] = Config.search_limit data['meta']['offset'] = Config.search_offset data['meta']['total'] = collection.total if not data['data']: data = json.loads('{"links": {"self": "' + request.uri + '"}, "data": null}') return cls.compress(request, response, cls.dumps(data)) @classmethod def collection(cls, collection, request, response, pagination=False): # pylint: disable=too-many-locals,too-many-branches """Generate HTTP body with multiple resources. Created body follows the JSON API specification. Args: collection (Collection()): Collection that has resources to be send in HTTP response. request (object): HTTP request. response (object): HTTP response. pagination (bool): Define if pagination is used. Returns: body: JSON body as a string or compressed bytes. """ data = {'data': []} for resource in collection.resources(): data['data'].append({ 'type': resource.category, 'id': resource.uuid, 'attributes': resource.dump_dict(Config.remove_fields) }) if pagination: data['meta'] = {} data['meta']['count'] = len(collection) data['meta']['limit'] = Config.search_limit data['meta']['offset'] = Config.search_offset data['meta']['total'] = collection.total # Rules # # 1. No pagination needed: add only self, first and last which are all the same. # 2. First page with offset zero: do not add previous link. # 3. Last page: do not add next link. # 4. Sort resulted uri query string in links to get deterministic results for testing. # 5. Add links only when offset parameter is defined. Pagination makes sense only with offset. # 6. In case search limit is zero, only meta is requested and no links are needed. if request.get_param('offset', default=None) and Config.search_limit: data['links'] = {} self_offset = Config.search_offset # Sort query parameter in link URL to have deterministic URL # for testing. url = re.sub(request.query_string, Const.EMPTY, request.uri) for param in sorted(request.params): url = url + param + '=' + quote_plus( request.get_param(param)) + '&' url = url[:-1] # Remove last ambersand. # Set offset of links. if Config.search_offset == 0 and Config.search_limit >= collection.total: last_offset = self_offset first_offset = self_offset else: if Config.search_offset != 0: # prev: o-l <0 ==> o=0 (less) # prev: o-l>=0 ==> o=o-l (over) # prev: o-l >t ==> o=t-l (over) (N/P) prev_offset = Config.search_offset - Config.search_limit if Config.search_offset - Config.search_limit > 0 else 0 prev_link = re.sub(r'offset=\d+', 'offset=' + str(prev_offset), url) data['links']['prev'] = prev_link if Config.search_offset + Config.search_limit < collection.total: # next: o+l<t ==> o=o+l (less) # next: o+l<t-l && o+l<t ==> o=o+l (last) # next: o+l=t ==> N/A (even) # next: o+l>t ==> N/A (over) next_offset = Config.search_offset + Config.search_limit next_link = re.sub(r'offset=\d+', 'offset=' + str(next_offset), url) data['links']['next'] = next_link # last: o+l<=t-l ==> o=ceil(t/l)xl-l (less) # last: o+l<t-l && o+l<t ==> o=o+l (last) # last: o+l=t ==> o=o (even) # last: o+l>t ==> o=t-l (over) if Config.search_offset + Config.search_limit <= collection.total - Config.search_limit: # Explicit float casting is needed for Python 2.7 to # get floating point result for ceil. last_offset = int(math.ceil(float(collection.total) / float(Config.search_limit)) * Config.search_limit - Config.search_limit) # noqa pylint: disable=line-too-long elif collection.total - Config.search_limit < Config.search_offset + Config.search_limit < collection.total: # noqa pylint: disable=line-too-long last_offset = Config.search_offset + Config.search_limit elif Config.search_offset + Config.search_limit == collection.total: last_offset = self_offset else: last_offset = self_offset first_offset = 0 self_link = re.sub(r'offset=\d+', 'offset=' + str(self_offset), url) first_link = re.sub(r'offset=\d+', 'offset=' + str(first_offset), url) last_link = re.sub(r'offset=\d+', 'offset=' + str(last_offset), url) data['links']['self'] = self_link data['links']['first'] = first_link data['links']['last'] = last_link return cls.compress(request, response, cls.dumps(data)) @classmethod def fields(cls, attribute, uniques, request, response): """Generate HTTP body for fields API endpoints. Created body follows the JSON API specification. Args: attribute (str): Resource attribute which unique values are sent uniques (dict): Unique values for the field. Returns: body: JSON body as a string or compressed bytes. """ # Follow CamelCase in field names because expected usage is from # Javascript that uses CamelCase. fields = {} for field in uniques: fields[field[0]] = field[1] fields = OrderedDict( sorted(fields.items(), key=operator.itemgetter(1), reverse=True)) data = { 'data': {}, } data['data'] = {'type': attribute, 'attributes': {attribute: fields}} return cls.compress(request, response, cls.dumps(data)) @classmethod def error(cls, causes): """Generate HTTP body with an error. Created body follows the JSON API specification. Args: cause (Cause()): Cause that is used to build the error response. Returns: body: JSON body as a string or compressed bytes. """ # Follow CamelCase in field names because expected usage is from # Javascript that uses CamelCase. data = {'errors': [], 'meta': {}} for cause in causes['errors']: data['errors'].append({ 'status': str(cause['status']), 'statusString': cause['status_string'], 'title': cause['title'], 'module': cause['module'] }) if not data['errors']: data = { 'errors': [{ 'status': '500', 'statusString': '500 Internal Server Error', 'title': 'Internal errors not found when error detected.' }] } data['meta'] = causes['meta'] return cls.dumps(data) @classmethod def dumps(cls, body): """Create string presentation from a JSON body. The JSON body is converted to a string presentation from the data structure. By default the body is pretty printed to help readability. Optionally it can be minified by removing all whitespaces from the string. Args: body (dict): HTTP JSON response body in a dictionary. Returns: string: JSON string presentation from the HTTP response body. """ # Python 2 and Python 3 have different defaults for separators and # thus they have to be defined here. In case of Python 2, there is # whitespace after the comma which is not there with the Python 3. kwargs = {'indent': 4, 'sort_keys': True, 'separators': (',', ': ')} if Config.server_minify_json: kwargs = {} return json.dumps(body, **kwargs) @classmethod def compress(cls, request, response, body): """Compress the HTTP response body. The response headers are updated if the response body is compressed. Args: request (object): Received HTTP request. response (object): HTTP response which headers may be updated. body (str): String presentation from HTTP response body. Returns: string|bytes: Body compressed to bytes or original JSON string. """ if 'gzip' not in request.get_header('accept-encoding', default='').lower(): return body response.set_header('content-encoding', 'gzip') if Const.PYTHON2: outfile = StringIO.StringIO() gzip_file = gzip.GzipFile(fileobj=outfile, mode="wb") gzip_file.write(body.encode('utf-8')) gzip_file.close() return outfile.getvalue() return gzip.compress(body.encode('utf-8'), compresslevel=9) # slowest with most compression.
class Editor(object): # pylint: disable=too-few-public-methods """Text editor based content management.""" _logger = Logger.get_logger(__name__) @classmethod def read(cls, timestamp, template_format, template, collection): """Read content from editor. Args: timestamp (str): IS8601 timestamp to be used with created collection. template_format (str): Template format. template (str): Default template for editor. collection (Collection()): Collection to store parsed content. """ text = cls._call_editor(template) Parser(template_format, timestamp, text, collection).read() if not collection: Cause.push(Cause.HTTP_BAD_REQUEST, 'edited: {} :content could not be read - please keep template tags in place'.format(template_format)) # noqa pylint: disable=line-too-long @classmethod def _call_editor(cls, template): """Run editor session.""" import tempfile from subprocess import call # External dependencies are isolated in this method to ease # testing. This method is mocked to return the edited text. message = Const.EMPTY template = template.encode('UTF-8') editor = cls._get_editor() cls._logger.debug('using %s as editor', editor) with tempfile.NamedTemporaryFile(prefix='snippy-edit-') as outfile: outfile.write(template) outfile.flush() try: call([editor, outfile.name]) outfile.seek(0) message = outfile.read() message = message.decode('UTF-8') except OSError as error: Cause.push( Cause.HTTP_INTERNAL_SERVER_ERROR, 'required editor %s not installed %s' % (editor, error)) return message @classmethod def _get_editor(cls): """Try to resolve the editor in a secure way.""" # Running code blindly from environment variable is not safe because # the call would execute any command from environment variable. editor = os.environ.get('EDITOR', 'vi') # Avoid usage other than supported editors as of now for security # and functionality reasons. What is the safe way to check the # environment variables? What is the generic way to use editor in # Windows and Mac? if editor != 'vi': cls._logger.debug('enforcing vi as default editor instead of %s', editor) editor = 'vi' return editor
def __init__(self, content): self._logger = Logger.get_logger(__name__) self._category = content.category self._content = content
def __init__(self): self._logger = Logger.get_logger(__name__) self._database = Database() self._database.init()
def test_logger_017(caplog): """Test pretty printing logs. In case of debug when JSON logs are not enabled, the logs are pretty printed. """ Logger.remove() Logger.configure({ 'debug': True, 'log_json': False, 'log_msg_max': Logger.DEFAULT_LOG_MSG_MAX, 'quiet': False, 'very_verbose': False }) logger = Logger.get_logger('snippy.' + __name__) row = [( '0d364a0e-6b63-11e9-b176-2c4d54508088', 'reference', 'https://writingfordevelopers.substack.com/p/how-to-write-commit-messages\n' 'https://chris.beams.io/posts/git-commit/', 'How to write commit messages', '', '', 'git', 'commit,git,howto,message,scm', 'https://writingfordevelopers.substack.com/p/how-to-write-commit-messages\n' 'https://chris.beams.io/posts/git-commit/', '', '', '', '2018-06-22T13:10:33.295299+00:00', '2018-06-27T10:10:16.553052+00:00', '33da9768-1257-4419-b6df-881e19f07bbc', '6d221115da7b95409c59164632893a57419666135c08151ddbf0be976f3b20a3') ] output_p3 = ( "format database row:", " [('0d364a0e-6b63-11e9-b176-2c4d54508088',", " 'reference',", " 'https://writingfordevelopers.substack.com/p/how-to-write-commit-messages\\n'", " 'https://chris.beams.io/posts/git-commit/',", " 'How to write commit messages',", " '',", " '',", " 'git',", " 'commit,git,howto,message,scm',", " 'https://writingfordevelopers.substack.com/p/how-to-write-commit-messages\\n'", " 'https://chris.beams.io/posts/git-commit/',", " '',", " '',", " '',", " '2018-06-22T13:10:33.295299+00:00',", " '2018-06-27T10:10:16.553052+00:00',", " '33da9768-1257-4419-b6df-881e19f07bbc',", " '6d221115da7b95409c59164632893a57419666135c08151ddbf0be976f3b20a3')]" ) output_p2 = ( "format database row:", " [('0d364a0e-6b63-11e9-b176-2c4d54508088',", " 'reference',", " 'https://writingfordevelopers.substack.com/p/how-to-write-commit-messages\\nhttps://chris.beams.io/posts/git-commit/',", " 'How to write commit messages',", " '',", " '',", " 'git',", " 'commit,git,howto,message,scm',", " 'https://writingfordevelopers.substack.com/p/how-to-write-commit-messages\\nhttps://chris.beams.io/posts/git-commit/',", " '',", " '',", " '',", " '2018-06-22T13:10:33.295299+00:00',", " '2018-06-27T10:10:16.553052+00:00',", " '33da9768-1257-4419-b6df-881e19f07bbc',", " '6d221115da7b95409c59164632893a57419666135c08151ddbf0be976f3b20a3')]" ) # Log is pretty printed. logger.debug('format database row:\n%s', row) assert '\n'.join(output_p3) in caplog.text or '\n'.join( output_p2) in caplog.text caplog.clear() Logger.configure({ 'debug': True, 'log_json': True, 'log_msg_max': Logger.DEFAULT_LOG_MSG_MAX, 'quiet': False, 'very_verbose': False }) output = ( "format database row:", "[('0d364a0e-6b63-11e9-b176-2c4d54508088', 'reference', 'https://writingfordevelopers.substack.com/p/how-to-write-commit-messages\\nhttps://chris.beams.io/posts/git-commit/', 'How to write commit messages', '', '', 'git', 'commit,git,howto,message,scm', 'https://writingfordevelopers.substack.com/p/how-to-write-commit-messages\\nhttps://chris.beams.io/posts/git-commit/', '', '', '', '2018-06-22T13:10:33.295299+00:00', '2018-06-27T10:10:16.553052+00:00', '33da9768-1257-4419-b6df-881e19f07bbc', '6d221115da7b95409c59164632893a57419666135c08151ddbf0be976f3b20a3')]" # noqa pylint: disable=line-too-long ) # Log is not pretty printed because JSON logs are actived. logger.debug('format database row:\n%s', row) assert '\n'.join(output) in caplog.text
class Migrate(object): """Import and export management.""" _logger = Logger.get_logger(__name__) @classmethod def dump(cls, collection, filename): """Dump collection into file.""" if not Config.is_supported_file_format(): cls._logger.debug('file format not supported for file %s', filename) return if not collection: Cause.push(Cause.HTTP_NOT_FOUND, 'no content found to be exported') return cls._logger.debug('exporting contents %s', filename) with open(filename, 'w') as outfile: try: dictionary = { 'meta': { 'updated': Config.utcnow(), 'version': __version__, 'homepage': __homepage__ }, 'data': collection.dump_dict(Config.remove_fields) } if Config.is_operation_file_text: outfile.write(collection.dump_text(Config.templates)) elif Config.is_operation_file_json: json.dump(dictionary, outfile) outfile.write(Const.NEWLINE) elif Config.is_operation_file_mkdn: outfile.write(collection.dump_mkdn(Config.templates)) elif Config.is_operation_file_yaml: yaml.safe_dump(dictionary, outfile, default_flow_style=False) else: cls._logger.debug('unknown export file format') except (IOError, TypeError, ValueError, yaml.YAMLError) as error: cls._logger.exception( 'fatal failure to generate formatted export file "%s"', error) Cause.push(Cause.HTTP_INTERNAL_SERVER_ERROR, 'fatal failure while exporting content to file') @classmethod def dump_template(cls, category): """Dump content template into file.""" filename = Config.get_operation_file() resource = Collection.get_resource(category, Config.utcnow()) template = resource.get_template(category, Config.template_format, Config.templates) cls._logger.debug('exporting content template %s', filename) with open(filename, 'w') as outfile: try: outfile.write(template) except IOError as error: cls._logger.exception( 'fatal failure in creating %s template file "%s"', category, error) Cause.push( Cause.HTTP_INTERNAL_SERVER_ERROR, 'fatal failure while exporting template {}'.format( filename)) @classmethod def dump_completion(cls, complete): """Dump shell completion script into a file. Args: complete (str): Name of the shell for completion. """ filename = Config.get_operation_file() path, _ = os.path.split(filename) cls._logger.debug('exporting: %s :completion: %s', Config.complete, filename) if not os.path.exists(path) or not os.access(path, os.W_OK): Cause.push( Cause.HTTP_BAD_REQUEST, 'cannot export: {} :completion file because path is not writable: {}' .format(complete, filename)) return with open(filename, 'w') as outfile: try: outfile.write(Config.completion[Config.complete]) except IOError as error: cls._logger.exception( 'fatal failure when creating {} shell completion file: {}', filename, error) Cause.push( Cause.HTTP_INTERNAL_SERVER_ERROR, 'fatal failure while exporting shell completion {}'.format( filename)) @classmethod def load(cls, filename): """Load dictionary from file.""" collection = Collection() if not Config.is_supported_file_format(): cls._logger.debug('file format not supported for file %s', filename) return collection cls._logger.debug('importing contents from file %s', filename) if os.path.isfile(filename): with open(filename, 'r') as infile: try: timestamp = Config.utcnow() if Config.is_operation_file_text: collection.load_text(timestamp, infile.read()) elif Config.is_operation_file_mkdn: collection.load_mkdn(timestamp, infile.read()) elif Config.is_operation_file_json: dictionary = json.load(infile) collection.load_dict(timestamp, dictionary) elif Config.is_operation_file_yaml: dictionary = yaml.safe_load(infile) collection.load_dict(timestamp, dictionary) else: cls._logger.debug('unknown import file format') except (TypeError, ValueError, yaml.YAMLError) as error: cls._logger.exception( 'fatal exception while loading file "%s"', error) Cause.push( Cause.HTTP_INTERNAL_SERVER_ERROR, 'fatal failure while importing content from file') else: Cause.push(Cause.HTTP_NOT_FOUND, 'cannot read file {}'.format(filename)) return collection
def __init__(self, args): Config.init(args) self._exit_code = 0 self._logger = Logger.get_logger(__name__) self.storage = Storage() self.server = None
def __init__(self): self._logger = Logger.get_logger(__name__) self.tests = []
def __init__(self, storage): self._logger = Logger.get_logger(__name__) self.api = None self.storage = storage