コード例 #1
0
ファイル: test_cli_options.py プロジェクト: fossabot/snippy
    def test_debug_print_001(capsys):
        """Test printing the content.

        Test printing content with print. This is a development test which
        must directly print the snippets with the test case helper method.
        """

        output = (
            '1. Remove all docker containers with volumes @docker [54e41e9b52a02b63]',
            '',
            '   $ docker rm --volumes $(docker ps --all --quiet)',
            '',
            '   # cleanup,container,docker,docker-ce,moby',
            '   > https://docs.docker.com/engine/reference/commandline/rm/',
            '',
            '   ! category    : snippet',
            '   ! created     : 2017-10-14T19:56:31.000001+00:00',
            '   ! description : ',
            '   ! digest      : 54e41e9b52a02b631b5c65a6a053fcbabc77ccd42b02c64fdfbc76efdb18e319 (True)',
            '   ! filename    : ',
            '   ! id          : a1cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            '   ! name        : ',
            '   ! source      : ',
            '   ! updated     : 2017-10-14T19:56:31.000001+00:00',
            '   ! uuid        : 11cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            '   ! versions    : ',
            '',
            '2. Remove docker image with force @docker [53908d68425c61dc]',
            '',
            '   $ docker rm --force redis',
            '',
            '   # cleanup,container,docker,docker-ce,moby',
            '   > https://docs.docker.com/engine/reference/commandline/rm/',
            '   > https://www.digitalocean.com/community/tutorials/how-to-remove-docker-images-containers-and-volumes',
            '',
            '   ! category    : snippet',
            '   ! created     : 2017-10-14T19:56:31.000001+00:00',
            '   ! description : ',
            '   ! digest      : 53908d68425c61dc310c9ce49d530bd858c5be197990491ca20dbe888e6deac5 (True)',
            '   ! filename    : ',
            '   ! id          : a2cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            '   ! name        : ',
            '   ! source      : ',
            '   ! updated     : 2017-10-14T19:56:31.000001+00:00',
            '   ! uuid        : 12cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            '   ! versions    : ',
            '',
            '# collection meta',
            '   ! total : 2'
        )
        print(Content.output())  # Part of the test.
        out, err = capsys.readouterr()
        out = Helper.remove_ansi(out)
        assert Const.NEWLINE.join(output) in out
        assert not err
コード例 #2
0
ファイル: reference.py プロジェクト: nur-oro-y-miel/snippy
class Reference(object):  # pylint: disable=too-few-public-methods
    """Default references for testing."""

    _GITLOG = 0
    _REGEXP = 1
    _PYTEST = 2

    # Default time is same for the default content. See 'Test case layouts and
    # data structures' for more information.
    DEFAULT_TIME = '2018-06-22T13:11:13.678729+00:00'

    # Default content must be always set so that it reflects content stored
    # into database. For example the tags must be sorted in correct order.
    # This forces defining erroneous content in each test case. This improves
    # the readability and maintainability of failure testing.
    _DEFAULTS = ({
        'category': 'reference',
        'data': (),
        'brief': 'How to write commit messages',
        'description': '',
        'name': '',
        'groups': ('git',),
        'tags': ('commit', 'git', 'howto'),
        'links': ('https://chris.beams.io/posts/git-commit/', ),
        'source': '',
        'versions': (),
        'languages': (),
        'filename': '',
        'created': DEFAULT_TIME,
        'updated': DEFAULT_TIME,
        'uuid': '31cd5827-b6ef-4067-b5ac-3ceac07dde9f',
        'digest': '5c2071094dbfaa33787064a6669e1fdfe49a86d07e58f12fffa0780eecdb227f'
    }, {
        'category': 'reference',
        'data': (),
        'brief': 'Python regular expression',
        'description': '',
        'name': '',
        'groups': ('python',),
        'tags': ('howto', 'online', 'python', 'regexp'),
        'links': ('https://www.cheatography.com/davechild/cheat-sheets/regular-expressions/',
                  'https://pythex.org/'),
        'source': '',
        'versions': (),
        'languages': (),
        'filename': '',
        'created': DEFAULT_TIME,
        'updated': DEFAULT_TIME,
        'uuid': '32cd5827-b6ef-4067-b5ac-3ceac07dde9f',
        'digest': 'cb9225a81eab8ced090649f795001509b85161246b46de7d12ab207698373832'
    }, {
        'category': 'reference',
        'data': (),
        'brief': 'Python pytest framework',
        'description': '',
        'name': '',
        'groups': ('python',),
        'tags': ('docs', 'pytest', 'python'),
        'links': ('https://docs.pytest.org/en/latest/skipping.html', ),
        'source': '',
        'versions': (),
        'languages': (),
        'filename': '',
        'created': '2016-04-21T12:10:11.678729+00:00',
        'updated': '2016-04-21T12:10:11.678729+00:00',
        'uuid': '33cd5827-b6ef-4067-b5ac-3ceac07dde9f',
        'digest': '1f9d9496005736efe321d44a28c05ca9ed0e53f7170743df361ddcd7b884455e'
    })

    GITLOG_CREATED = _DEFAULTS[_GITLOG]['created']
    GITLOG_UPDATED = _DEFAULTS[_GITLOG]['updated']
    REGEXP_CREATED = _DEFAULTS[_REGEXP]['created']
    REGEXP_UPDATED = _DEFAULTS[_REGEXP]['updated']
    PYTEST_CREATED = _DEFAULTS[_PYTEST]['created']
    PYTEST_UPDATED = _DEFAULTS[_PYTEST]['updated']

    if not DEFAULT_TIME == GITLOG_CREATED == GITLOG_UPDATED == REGEXP_CREATED == REGEXP_UPDATED:
        raise Exception('default content timestamps must be same - see \'Test case layouts and data structures\'')

    GITLOG_DIGEST = _DEFAULTS[_GITLOG]['digest']
    REGEXP_DIGEST = _DEFAULTS[_REGEXP]['digest']
    PYTEST_DIGEST = _DEFAULTS[_PYTEST]['digest']
    GITLOG_UUID = _DEFAULTS[_GITLOG]['uuid']
    REGEXP_UUID = _DEFAULTS[_REGEXP]['uuid']
    PYTEST_UUID = _DEFAULTS[_PYTEST]['uuid']

    GITLOG = _DEFAULTS[_GITLOG]
    REGEXP = _DEFAULTS[_REGEXP]
    PYTEST = _DEFAULTS[_PYTEST]
    DEFAULT_REFERENCES = (GITLOG, REGEXP)

    TEMPLATE = Helper.read_template('reference.txt').split('\n')
    TEMPLATE_DIGEST_EMPTY = 'bb4c2540fab3a12b051b77b6902f426812ec95f8a1fa9e07ca1b7dc3cca0cc0d'
    TEMPLATE_TEXT = (
        '# Commented lines will be ignored.',
        '#',
        '# Add mandatory links below one link per line.',
        '',
        '',
        '# Add optional brief description below.',
        'Add brief title for content',
        '',
        '# Add optional description below.',
        'Add a description that defines the content in one chapter.',
        '',
        '# Add optional name below.',
        'example content handle',
        '',
        '# Add optional comma separated list of groups below.',
        'groups',
        '',
        '# Add optional comma separated list of tags below.',
        'example,tags',
        '',
        '# Add optional source reference below.',
        'https://www.example.com/source.md',
        '',
        '# Add optional comma separated list of key-value versions below.',
        'example=3.9.0,python>=3',
        '',
        '# Add optional comma separated list of languages below.',
        'example-language',
        '',
        '# Add optional filename below.',
        'example-content.md',
        '',
        '# Meta',
        'category : reference',
        'created  : 2018-02-02T02:02:02.000001+00:00',
        'digest   : f5693f66ec42e8f42687e8639df806e79aead1dad1a6c1b5448b6220995518bd',
        'updated  : 2018-02-02T02:02:02.000001+00:00',
        'uuid     : a1cd5827-b6ef-4067-b5ac-3ceac07dde9f',
        ''
    )
    TEMPLATE_MKDN = (
        '# Add brief title for content @groups',
        '',
        '> Add a description that defines the content in one chapter.',
        '',
        '> [1] https://www.example.com/add-links-here.html',
        '',
        '## Meta',
        '',
        '> category  : reference  ',
        'created   : 2018-02-02T02:02:02.000001+00:00  ',
        'digest    : bb4c2540fab3a12b051b77b6902f426812ec95f8a1fa9e07ca1b7dc3cca0cc0d  ',
        'filename  : example-content.md  ',
        'languages : example-language  ',
        'name      : example content handle  ',
        'source    : https://www.example.com/source.md  ',
        'tags      : example,tags  ',
        'updated   : 2018-02-02T02:02:02.000001+00:00  ',
        'uuid      : a1cd5827-b6ef-4067-b5ac-3ceac07dde9f  ',
        'versions  : example=3.9.0,python>=3  ',
        ''
    )
コード例 #3
0
class Content(object):  # pylint: disable=too-many-public-methods, too-many-lines
    """Helper methods for content testing."""

    # categories
    SNIPPET = Const.SNIPPET

    # contents
    EXPORT_TIME = Helper.EXPORT_TIME
    IMPORT_TIME = Helper.IMPORT_TIME

    # snippets
    REMOVE_TIME = Snippet.REMOVE_CREATED  # Default snippet utc times must be same.
    FORCED_TIME = Snippet.FORCED_CREATED  # Default snippet utc must be same.
    EXITED_TIME = Snippet.EXITED_CREATED
    NETCAT_TIME = Snippet.NETCAT_CREATED
    UMOUNT_TIME = Snippet.UMOUNT_CREATED
    INTERP_TIME = Snippet.INTERP_CREATED

    # solutions
    BEATS_TIME = Solution.BEATS_CREATED  # Default solution utc must be same.
    NGINX_TIME = Solution.NGINX_CREATED  # Default solution utc must be same.
    KAFKA_TIME = Solution.KAFKA_CREATED
    KAFKA_MKDN_TIME = Solution.KAFKA_MKDN_CREATED

    # references
    GITLOG_TIME = Reference.GITLOG_CREATED  # Default reference utc must be same.
    REGEXP_TIME = Reference.REGEXP_CREATED  # Default reference utc must be same.
    PYTEST_TIME = Reference.PYTEST_CREATED

    JSON = Const.CONTENT_FORMAT_JSON
    MKDN = Const.CONTENT_FORMAT_MKDN
    TEXT = Const.CONTENT_FORMAT_TEXT
    YAML = Const.CONTENT_FORMAT_YAML

    # Mocker UUIDs.
    UUID1 = Database.TEST_UUIDS_STR[0]
    UUID2 = Database.TEST_UUIDS_STR[1]
    UUID_EDIT = Database.UUID_EDIT

    # completions
    COMPLETE_BASH = Helper.read_completion('snippy.bash-completion')

    _schema = Helper.get_schema_validator()

    @staticmethod
    def store(content):
        """Store content into database.

        Args:
            content (dict): Content in a dictionary.
        """

        Database.store(content)

    @staticmethod
    def delete():
        """Delete all existing content and the database."""

        Database.delete_all_contents()
        Database.delete_storage()

    @staticmethod
    def output():
        """Print all content stored in database."""

        Database.print_contents()

    @staticmethod
    def db_cli_params():
        """Return required CLI parameters for database."""

        return Database.get_cli_params()

    @staticmethod
    def deepcopy(content):
        """Return a deepcopy from given content.

        This allows user to modify content without changing the original
        content.

        Args:
            content (dict): Single content that is copied.

        Returns:
            dict: Deepcopy of the content.
        """

        return copy.deepcopy(content)

    @classmethod
    def dump_text(cls, content):
        """Return text from given content.

        This can be used for example to convert test case content to text
        string to be used as a response from mocked editor.

        In order to be able to insert multiple Markdown contents to database,
        the UUID must be unique. Because of this, the conversion must not use
        the methods that masks the content fields to common values. This is
        applicaple only to Markdown content which has the full metadata.

        The text string is returned from resource. The collection adds one
        extra newline in the string.

        Args:
            content (dict): Single content that is converted to text.

        Returns:
            str: Text string created from given content.
        """

        collection = Collection()
        collection.load_dict('2018-10-20T06:16:27.000001+00:00',
                             {'data': [content]})

        # Collection adds one extra newline which must be removed. The rstrip()
        # cannot be used because it would remove all the trailing newlines.
        return collection.dump_text(Config.templates)[:-1]

    @classmethod
    def dump_mkdn(cls, content):
        """Return Markdown from given content.

        See dump_text.

        Args:
            content (dict): Single content that is converted to Markdown.

        Returns:
            str: Text string in Markdown format created from given content.
        """

        collection = Collection()
        collection.load_dict('2018-10-20T06:16:27.000001+00:00',
                             {'data': [content]})

        return collection.dump_mkdn(Config.templates)

    @staticmethod
    def dump_dict(content):
        """Return content in dictionary format.

        Args:
            content (str): Content in text string format.

        Returns:
            dict: Content in dictionary format.
        """

        collection = Collection()
        collection.load_text(Content.IMPORT_TIME, content)

        return collection.dump_dict()[0]

    @staticmethod
    def get_collection(content):
        """Return collection from content.

        Args:
            content (dict): Content in a dictionary format.

        Returns:
            CollectionI(): Content stored in collection.
        """

        collection = Collection()
        collection.load(Const.CONTENT_FORMAT_DICT, Content.IMPORT_TIME,
                        {'data': [content]})

        return collection

    @classmethod
    def assert_storage(cls, content):
        """Compare content stored in database.

        The assert comparisons use equality implemented for collection data
        class. This quarantees that the count and content of resources are
        the same in database and expected content.

        If the result and expected content are compared only as collections,
        there are cases that are not noticed. A content fields in collection
        are for example sorted and trimmed in some cases. The given content
        dictionary format from test cases must be set correctly in order to
        keep the content definitions in test correct. Because of this, the
        content must be compared also in dictionary format.

        The original content must not be changed because it is in most cases
        default content shared between all tests.

        Args:
            content (dict): Excepted content compared against database.
        """

        if not content:
            assert not Database.get_collection()

            return

        result_collection = Database.get_collection()
        result_dictionary = cls._get_db_dictionary(result_collection)
        expect_collection = cls._get_expect_collection(content)
        expect_dictionary = content
        try:
            assert result_collection == expect_collection
            assert result_dictionary == expect_dictionary
        except AssertionError:
            Content._print_assert(result_collection, expect_collection)
            Content._print_assert(result_dictionary, expect_dictionary)
            raise AssertionError

    @classmethod
    def assert_storage_size(cls, size):
        """Compare content count stored in database."""

        try:
            assert size == len(Database.get_collection())
        except AssertionError:
            print('database contains {} contents when expected size was {}'.
                  format(len(Database.get_collection()), size))
            raise AssertionError

    @classmethod
    def assert_restapi(cls, result, expect):
        """Compare content received from REST API.

        See the description for assert_storage method.

        Args:
            result (dict): Result JSON from REST API.
            expect (dict): Excepted JSON in REST API response.
        """

        if result is None and result is expect:
            assert 1
            return

        try:
            cls._schema.validate(result)
        except ValidationError as error:
            print('json validation error: {}'.format(error))
            assert 0
        except SchemaError as error:
            print('json scbhema error: {}'.format(error))
            assert 0

        result_dict = Content._get_result_restapi(result)
        expect_dict = Content._get_expect_restapi(expect)
        try:
            assert result_dict == expect_dict
        except AssertionError:
            print('result:')
            pprintpp.pprint(result_dict)
            print('expect:')
            pprintpp.pprint(expect_dict)
            raise AssertionError

    @classmethod
    def assert_json(cls, json_mock, json_file, filename, content):
        """Compare JSON against expected content.

        See the description for assert_storage method.

        Args:
            json_mock (obj): Mocked JSON dump method.
            json_file (obj): Mocked file where the JSON content was saved.
            filename (str): Expected filename.
            content (dict): Excepted content compared against generated JSON.
        """

        result_collection = cls._get_result_collection(
            Const.CONTENT_FORMAT_JSON, json_mock)
        result_dictionary = cls._get_result_dictionary(
            Const.CONTENT_FORMAT_JSON, json_mock)
        expect_collection = cls._get_expect_collection(content)
        expect_dictionary = content
        try:
            assert result_collection == expect_collection
            assert result_dictionary == expect_dictionary
            json_file.assert_called_once_with(filename, 'w')
        except AssertionError:
            Content._print_assert(result_collection, expect_collection)
            Content._print_assert(result_dictionary, expect_dictionary)
            raise AssertionError

    @classmethod
    def assert_mkdn(cls, mkdn_mock, filename, content):
        """Compare Markdown against expected content.

        See the description for assert_storage method.

        Args:
            mkdn_mock (obj): Mocked file where the Markdown content was saved.
            filename (str): Expected filename.
            content (dict): Excepted content compared against Markdown file.
        """

        result_collection = cls._get_result_collection(
            Const.CONTENT_FORMAT_MKDN, mkdn_mock)
        result_markdown = cls._read_text(Const.CONTENT_FORMAT_MKDN, mkdn_mock)
        expect_collection = cls._get_expect_collection(content)
        expect_markdown = result_collection.dump_mkdn(Config.templates)
        try:
            assert result_collection == expect_collection
            assert result_markdown == expect_markdown
            mkdn_mock.assert_called_once_with(filename, 'w')
        except AssertionError:
            Content._print_assert(result_collection, expect_collection)
            Content._print_assert(result_markdown, expect_markdown)
            raise AssertionError

    @classmethod
    def assert_text(cls, text, filename, content):
        """Compare proprietary text format against expected content.

        See description for assert_storage method.

        Args:
            text (obj): Mocked file where the Markdown content was saved.
            filename (str): Expected filename.
            content (dict): Excepted content compared against Markdown file.
        """

        if not filename:
            text.assert_not_called()
            text.return_value.__enter__.return_value.write.assert_not_called()

            return

        result_collection = cls._get_result_collection(
            Const.CONTENT_FORMAT_TEXT, text)
        result_text = cls._read_text(Const.CONTENT_FORMAT_TEXT, text)
        expect_collection = cls._get_expect_collection(content)
        expect_text = expect_collection.dump_text(Config.templates)
        try:
            assert result_collection == expect_collection
            assert result_text == expect_text
            text.assert_called_once_with(filename, 'w')
        except AssertionError:
            Content._print_assert(result_collection, expect_collection)
            Content._print_assert(result_text, expect_text)
            raise AssertionError

    @classmethod
    def assert_yaml(cls, yaml, yaml_file, filenames, content):
        """Compare YAML against expected content.

        See description for assert_storage method.

        Args:
            yaml (obj): Mocked YAML dump method.
            yaml_file (obj): Mocked file where the YAML content was saved.
            filenames (str|list): Expected filename or list of filenames.
            content (dict): Excepted content compared against generated YAML.
        """

        result_collection = cls._get_result_collection(
            Const.CONTENT_FORMAT_YAML, yaml)
        result_dictionary = cls._get_result_dictionary(
            Const.CONTENT_FORMAT_YAML, yaml)
        expect_collection = cls._get_expect_collection(content)
        expect_dictionary = content
        try:
            assert result_collection == expect_collection
            assert result_dictionary == expect_dictionary
            if isinstance(filenames, (list, tuple)):
                for filename in filenames:
                    yaml_file.assert_any_call(filename, 'w')
            else:
                yaml_file.assert_called_once_with(filenames, 'w')
        except AssertionError:
            Content._print_assert(result_collection, expect_collection)
            Content._print_assert(result_dictionary, expect_dictionary)
            raise AssertionError

    @staticmethod
    def get_api_meta():
        """Return default REST API metadata."""

        meta = {
            'version': __version__,
            'homepage': __homepage__,
            'docs': __docs__,
            'openapi': __openapi__
        }

        return meta

    @staticmethod
    def get_cli_meta():
        """Return default metadata for exported data."""

        meta = {
            'updated': Content.EXPORT_TIME,
            'version': __version__,
            'homepage': __homepage__
        }

        return meta

    @staticmethod
    def get_file_content(content_format, contents):
        """Return mocked file.

        The method returns file content for different content formats. Returned
        type changes depending on the content format. For example JSON and YAML
        require simple dictionary for the file content but text files require a
        Mocked open.

        Args:
            content_format (str): Content format.
            contents (dict): Content dictionary.

        Returns:
            Mock or dict: Dictionary or mocked file open depending on content.
        """

        if content_format in (Content.JSON, Content.YAML):
            return {'data': contents['data']}

        mocked_file = Const.EMPTY
        for content in contents['data']:
            if content_format == Content.TEXT:
                mocked_file = mocked_file + Content.dump_text(content)
            elif content_format == Content.MKDN:
                mocked_file = mocked_file + Content.dump_mkdn(content)
                mocked_file = mocked_file + '\n---\n\n'

        if content_format == Content.MKDN:
            mocked_file = mocked_file[:
                                      -6]  # Remove last separator for Markdown content.

        return mock.mock_open(read_data=mocked_file)

    @staticmethod
    def _get_expect_restapi(expect):
        """Return comparable dictionary from expected content.

        The expected dictionary is a default content defined for testing. The
        default content is defined to be in storage format which uses tuples
        instead of lists which are used in the REST API. This method converts
        and copies the expected content to REST API format.

        The original expected data must not be changed because it is shared
        between all tests.

        Only the responses with the 'data' key have to be modified. This key
        contains default content stored in the database format which needs to
        be modified to match the response from the JSON REST API.

        With field category types like ``groups`` or ``tags`` there is no need
        to convert the expected dictionary. These resources are never stored
        in a list.

        Args:
            expect (dict): Excepted JSON in REST API response.

        Returns:
            dict: Comparable JSON REST API dictionary from expected content.
        """
        def _convert(content):
            """Convert content attributes to list."""

            if 'data' in content:
                content['data'] = list(content['data'])
            if 'groups' in content:
                content['groups'] = list(content['groups'])
            if 'tags' in content:
                content['tags'] = list(content['tags'])
            if 'links' in content:
                content['links'] = list(content['links'])
            if 'versions' in content:
                content['versions'] = list(content['versions'])
            if 'languages' in content:
                content['languages'] = list(content['languages'])

        if 'data' not in expect:
            return expect

        if not isinstance(expect['data'], list) and expect['data']['type'] in (
                'groups', 'tags'):
            return expect

        expect = copy.deepcopy(expect)
        try:
            if isinstance(expect['data'], list):
                for data in expect['data']:
                    _convert(data['attributes'])
            else:
                _convert(expect['data']['attributes'])
        except KeyError:
            raise Exception(
                'test case failure:\n\n{}\nwith dictionary:\n{}'.format(
                    traceback.format_exc(),
                    json.dumps(expect, sort_keys=True, indent=4)))

        return expect

    @staticmethod
    def _get_result_restapi(result):
        """Return comparable dictionary from test case result.

        Error response from JSON API can contain error string resulted from
        REST API schema validation. This error string can be a long JSON
        structure. For simplicity and test case maintainability reasons,
        the schema validation error is masked away.

        See the description for assert_storage method.

        Args:
            result (dict): Result JSON from REST API.

        Returns:
            dict: Comparable JSON REST API dictionary frm test case result.
        """

        try:
            for error in result['errors']:
                if 'json media validation failed' in error['title']:
                    error['title'] = 'json media validation failed'
        except KeyError:
            pass

        return result

    @classmethod
    def _get_db_dictionary(cls, collection):
        """Return comparable dictionary from database.

        See the description for assert_storage method.

        Returns:
            dict: Comparable dictionary from database.
        """

        dictionary = {}
        dictionary['data'] = collection.dump_dict()

        return dictionary

    @staticmethod
    def _get_result_dictionary(content_format, mock_object):
        """Return comparable dictionary from test case result.

        See the description for assert_storage method.

        Args:
            content_format (str): Content format stored in mock.
            mock_object (obj): Mock object where content was stored.

        Returns:
            dict: Comparable dictinary from the test case mock.
        """

        dictionary = {}
        if content_format == Const.CONTENT_FORMAT_JSON:
            dictionary = mock_object.dump.mock_calls[0][1][0]
        elif content_format == Const.CONTENT_FORMAT_YAML:
            for call in mock_object.safe_dump.mock_calls:
                if 'data' not in dictionary:
                    dictionary = call[1][0]
                else:
                    dictionary['data'].append(call[1][0]['data'][0])

        return dictionary

    @staticmethod
    def _get_result_collection(content_format, mock_object):
        """Return comparable collection from test case result.

        See the description for assert_storage method.

        Args:
            content_format (str): Content format stored in mock.
            mock_object (obj): Mock object where content was stored.

        Returns:
            Collection(): Comparable collection from test case result.
        """

        collection = Collection()
        if content_format == Const.CONTENT_FORMAT_JSON:
            for call in mock_object.dump.mock_calls:
                collection.load_dict(Content.IMPORT_TIME, call[1][0])
        elif content_format in (Const.CONTENT_FORMAT_MKDN,
                                Const.CONTENT_FORMAT_TEXT):
            handle = mock_object.return_value.__enter__.return_value
            for call in handle.write.mock_calls:
                collection.load(content_format, Content.IMPORT_TIME,
                                call[1][0])
        elif content_format == Const.CONTENT_FORMAT_YAML:
            for call in mock_object.safe_dump.mock_calls:
                collection.load_dict(Content.IMPORT_TIME, call[1][0])

        return collection

    @staticmethod
    def _get_expect_collection(content):
        """Return comparable collection from expected content.

        See the description for assert_storage method.

        Args:
            content (dict): Reference content.

        Returns:
            Collection(): Comparable collection from expected content.
        """

        references = Collection()
        references.load_dict(Content.IMPORT_TIME, {'data': content['data']})

        return references

    @staticmethod
    def _read_text(content_format, mock_object):
        """Return text saved in mock.

        See description for assert_storage method.

        Args:
            content_format (str): Content format stored in mock.
            mock_object (obj): Mock object where content was stored.

        Returns:
            str: String from the mocked object.
        """

        text = Const.EMPTY
        if content_format in (Const.CONTENT_FORMAT_MKDN,
                              Const.CONTENT_FORMAT_TEXT):
            handle = mock_object.return_value.__enter__.return_value
            for call in handle.write.mock_calls:
                text = text + call[1][0]

        return text

    @staticmethod
    def _print_assert(result, expect):
        """Print differences between results and expected values.

        Args:
            result: Result value from test.
            expect: Expected value defined in test.
        """

        print("=" * 120)
        if type(result) is not type(expect):
            print("Cannot compare different types.")
            print(result)
            print(expect)

            return

        if result == expect:
            print("Comparing result and expected types of {} which are equal.".
                  format(type(expect)))

            return

        if isinstance(result, Collection):
            if expect.keys() != result.keys():
                print("Asserted collections do not have same resources.")
                print("result")
                for digest in result.keys():
                    pprintpp.pprint(result[digest].dump_dict([]))
                print("=" * 120)
                print("expect")
                for digest in expect.keys():
                    pprintpp.pprint(expect[digest].dump_dict([]))

                return

            for digest in expect.keys():
                result_dict = result[digest].dump_dict(
                    []) if digest in result.keys() else {}
                expect_dcit = expect[digest].dump_dict([])
                pprintpp.pprint(result_dict)
                pprintpp.pprint(expect_dcit)
                fields = [
                    field for field in result_dict
                    if result_dict[field] != expect_dcit[field]
                ]
                print("Differences in resource: {:.16}".format(digest))
                print("=" * 120)
                for field in fields:
                    print("result[{:.16}].{}:".format(digest, field))
                    pprintpp.pprint(result_dict[field])
                    print("expect[{:.16}].{}:".format(digest, field))
                    pprintpp.pprint(expect_dcit[field])
        elif isinstance(result, dict):
            print(
                "Comparing result and expected types of {} which are different."
                .format(type(expect)))
            pprintpp.pprint(result)
            pprintpp.pprint(expect)
            fields = [
                field for field in expect if expect[field] != result[field]
            ]
            print("=" * 120)
            for field in fields:
                print("result {}:".format(field))
                pprintpp.pprint(result[field])
                print("expect {}:".format(field))
                pprintpp.pprint(expect[field])
        elif isinstance(result, str):
            print(
                "Comparing result and expected types of {} which are different."
                .format(type(expect)))
            print("(%s)" % result)
            print("=" * 120)
            print("(%s)" % expect)
        print("=" * 120)
コード例 #4
0
ファイル: snippet.py プロジェクト: fossabot/snippy
class Snippet(object):  # pylint: disable=too-few-public-methods
    """Default snippets for testing."""

    _REMOVE = 0
    _FORCED = 1
    _EXITED = 2
    _NETCAT = 3
    _UMOUNT = 4
    _INTERP = 5

    # Default time is same for the default content. See 'Test case layouts and
    # data structures' for more information.
    DEFAULT_TIME = '2017-10-14T19:56:31.000001+00:00'

    # Default content must be always set so that it reflects content stored
    # into database. For example the tags must be sorted in correct order.
    # This forces defining erroneous content in each test case. This improves
    # the readability and maintainability of failure testing.
    _DEFAULTS = (
        {
            'category':
            'snippet',
            'data': ('docker rm --volumes $(docker ps --all --quiet)', ),
            'brief':
            'Remove all docker containers with volumes',
            'description':
            '',
            'name':
            '',
            'groups': ('docker', ),
            'tags': ('cleanup', 'container', 'docker', 'docker-ce', 'moby'),
            'links':
            ('https://docs.docker.com/engine/reference/commandline/rm/', ),
            'source':
            '',
            'versions': (),
            'filename':
            '',
            'created':
            DEFAULT_TIME,
            'updated':
            DEFAULT_TIME,
            'uuid':
            '11cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            'digest':
            '54e41e9b52a02b631b5c65a6a053fcbabc77ccd42b02c64fdfbc76efdb18e319'
        },
        {
            'category':
            'snippet',
            'data': ('docker rm --force redis', ),
            'brief':
            'Remove docker image with force',
            'description':
            '',
            'name':
            '',
            'groups': ('docker', ),
            'tags': ('cleanup', 'container', 'docker', 'docker-ce', 'moby'),
            'links':
            ('https://docs.docker.com/engine/reference/commandline/rm/',
             'https://www.digitalocean.com/community/tutorials/how-to-remove-docker-'
             + 'images-containers-and-volumes'),
            'source':
            '',
            'versions': (),
            'filename':
            '',
            'uuid':
            '12cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            'created':
            DEFAULT_TIME,
            'updated':
            DEFAULT_TIME,
            'digest':
            '53908d68425c61dc310c9ce49d530bd858c5be197990491ca20dbe888e6deac5'
        },
        {
            'category':
            'snippet',
            'data':
            ('docker rm $(docker ps --all -q -f status=exited)',
             'docker images -q --filter dangling=true | xargs docker rmi'),
            'brief':
            'Remove all exited containers and dangling images',
            'description':
            '',
            'name':
            '',
            'groups': ('docker', ),
            'tags':
            ('cleanup', 'container', 'docker', 'docker-ce', 'image', 'moby'),
            'links':
            ('https://docs.docker.com/engine/reference/commandline/images/',
             'https://docs.docker.com/engine/reference/commandline/rm/',
             'https://docs.docker.com/engine/reference/commandline/rmi/'),
            'source':
            '',
            'versions': (),
            'filename':
            '',
            'created':
            '2017-10-20T07:08:45.000001+00:00',
            'updated':
            '2017-10-20T07:08:45.000001+00:00',
            'uuid':
            '13cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            'digest':
            '49d6916b6711f13d67960905c4698236d8a66b38922b04753b99d42a310bcf73'
        },
        {
            'category':
            'snippet',
            'data': ('nc -v 10.183.19.189 443', 'nmap 10.183.19.189'),
            'brief':
            'Test if specific port is open',
            'description':
            '',
            'name':
            '',
            'groups': ('linux', ),
            'tags': ('linux', 'netcat', 'networking', 'port'),
            'links':
            ('https://www.commandlinux.com/man-page/man1/nc.1.html', ),
            'source':
            '',
            'versions': (),
            'filename':
            '',
            'created':
            '2017-10-20T07:08:45.000001+00:00',
            'updated':
            '2017-10-20T07:08:45.000001+00:00',
            'uuid':
            '14cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            'digest':
            'f3fd167c64b6f97e5dab4a3aebef678ef7361ba8c4a5acbc1d3faff968d4402d'
        },
        {
            'category':
            'snippet',
            'data': ('lsof | grep \'/tmp/overlayfs/overlay\'', 'kill <pid>',
                     'umount /tmp/overlayfs/overlay'),
            'brief':
            'Umount a busy device',
            'description':
            '',
            'name':
            '',
            'groups': ('linux', ),
            'tags': ('device', 'linux', 'umount'),
            'links': ('https://stackoverflow.com/a/7878763', ),
            'source':
            '',
            'versions': (),
            'filename':
            '',
            'created':
            '2018-05-07T11:11:55.000001+00:00',
            'updated':
            '2018-05-07T11:11:55.000001+00:00',
            'uuid':
            '15cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            'digest':
            '490c913cf941a0bedc14e3d390894958b3db4220dc2b1b856454403c888df17f'
        },
        {
            'category':
            'snippet',
            'data':
            ('find . -type d -name \'.git\' | while read dir ; do sh -c "cd $dir/../ && echo -e \\"\\nGIT STATUS IN ${dir//\\.git/}\\" && git status -s" ; done',
             ),  # pylint: disable=line-too-long
            'brief':
            'Perform recursive git status on subdirectories',
            'description':
            '',
            'name':
            '',
            'groups': ('git', ),
            'tags': ('git', 'status'),
            'links': ('https://gist.github.com/tafkey/664266c00387c98631b3', ),
            'source':
            '',
            'versions': (),
            'filename':
            '',
            'created':
            '2018-01-11T07:59:46.000001+00:00',
            'updated':
            '2018-01-11T07:59:46.000001+00:00',
            'uuid':
            '16cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            'digest':
            '9e1949c2810df2a50137f0a4056b7992529b37632d9db0da7040d17bf16f5bd3'
        })

    REMOVE_CREATED = _DEFAULTS[_REMOVE]['created']
    REMOVE_UPDATED = _DEFAULTS[_REMOVE]['updated']
    FORCED_CREATED = _DEFAULTS[_FORCED]['created']
    FORCED_UPDATED = _DEFAULTS[_FORCED]['updated']
    EXITED_CREATED = _DEFAULTS[_EXITED]['created']
    EXITED_UPDATED = _DEFAULTS[_EXITED]['updated']
    NETCAT_CREATED = _DEFAULTS[_NETCAT]['created']
    NETCAT_UPDATED = _DEFAULTS[_NETCAT]['updated']
    UMOUNT_CREATED = _DEFAULTS[_UMOUNT]['created']
    UMOUNT_UPDATED = _DEFAULTS[_UMOUNT]['updated']
    INTERP_CREATED = _DEFAULTS[_INTERP]['created']
    INTERP_UPDATED = _DEFAULTS[_INTERP]['updated']

    if not DEFAULT_TIME == REMOVE_CREATED == REMOVE_UPDATED == FORCED_CREATED == FORCED_UPDATED:
        raise Exception(
            'default content timestamps must be same - see \'Test case layouts and data structures\''
        )

    REMOVE_DIGEST = _DEFAULTS[_REMOVE]['digest']
    FORCED_DIGEST = _DEFAULTS[_FORCED]['digest']
    EXITED_DIGEST = _DEFAULTS[_EXITED]['digest']
    NETCAT_DIGEST = _DEFAULTS[_NETCAT]['digest']
    UMOUNT_DIGEST = _DEFAULTS[_UMOUNT]['digest']
    INTERP_DIGEST = _DEFAULTS[_INTERP]['digest']
    REMOVE_UUID = _DEFAULTS[_REMOVE]['uuid']
    FORCED_UUID = _DEFAULTS[_FORCED]['uuid']
    EXITED_UUID = _DEFAULTS[_EXITED]['uuid']
    NETCAT_UUID = _DEFAULTS[_NETCAT]['uuid']
    UMOUNT_UUID = _DEFAULTS[_UMOUNT]['uuid']
    INTERP_UUID = _DEFAULTS[_INTERP]['uuid']

    REMOVE = _DEFAULTS[_REMOVE]
    FORCED = _DEFAULTS[_FORCED]
    EXITED = _DEFAULTS[_EXITED]
    NETCAT = _DEFAULTS[_NETCAT]
    UMOUNT = _DEFAULTS[_UMOUNT]
    INTERP = _DEFAULTS[_INTERP]
    DEFAULT_SNIPPETS = (REMOVE, FORCED)

    TEMPLATE = Helper.read_template('snippet.txt').split('\n')
    TEMPLATE_DIGEST_EMPTY = 'b4bedc2603e3b9ea95bcf53cb7b8aa6efa31eabb788eed60fccf3d8029a6a6cc'
    TEMPLATE_TEXT = (
        '# Commented lines will be ignored.', '#',
        '# Add mandatory snippet below.', '', '',
        '# Add optional brief description below.',
        'Add brief title for content', '', '# Add optional description below.',
        'Add a description that defines the content in one chapter.', '',
        '# Add optional name below.', 'example content handle', '',
        '# Add optional comma separated list of groups below.', 'groups', '',
        '# Add optional comma separated list of tags below.', 'example,tags',
        '', '# Add optional links below one link per line.',
        'https://www.example.com/add-links-here.html', '',
        '# Add optional source reference below.',
        'https://www.example.com/source.md', '',
        '# Add optional comma separated list of key-value versions below.',
        'example=3.9.0,python>=3', '', '# Add optional filename below.',
        'example-content.md', '', '# Meta', 'category : snippet',
        'created  : 2018-02-02T02:02:02.000001+00:00',
        'digest   : b3e9bcf808ba86461bee74dca40f62e518ce306ecbac6a18adea78b30a0fe346',
        'updated  : 2018-02-02T02:02:02.000001+00:00',
        'uuid     : a1cd5827-b6ef-4067-b5ac-3ceac07dde9f', '')
    TEMPLATE_MKDN = (
        '# Add brief title for content @groups', '',
        '> Add a description that defines the content in one chapter.', '',
        '> [1] https://www.example.com/add-links-here.html', '',
        '`$ Markdown commands are defined between backtics and prefixed by a dollar sign`',
        '', '## Meta', '', '> category : snippet  ',
        'created  : 2018-02-02T02:02:02.000001+00:00  ',
        'digest   : 8d5193fea452d0334378a73ded829cfa27debea7ee87714d64b1b492d1a4601a  ',
        'filename : example-content.md  ',
        'name     : example content handle  ',
        'source   : https://www.example.com/source.md  ',
        'tags     : example,tags  ',
        'updated  : 2018-02-02T02:02:02.000001+00:00  ',
        'uuid     : a1cd5827-b6ef-4067-b5ac-3ceac07dde9f  ',
        'versions : example=3.9.0,python>=3  ', '')
コード例 #5
0
    def test_collection_operations_001(capsys):
        """Test collection data class operations.

        Verify that collection class implements data class methods correctly.
        In this case there are no resources in collection.
        """

        collection = Collection()

        # Collection with len().
        assert not collection

        # Collection with condition.
        if collection:
            assert 0
        else:
            assert 1

        # Collection with negative condition.
        if not collection:
            assert 1
        else:
            assert 0

        # Equality of two empty collections.
        collection2 = Collection()
        if collection == collection2:
            assert 1
        else:
            assert 0

        # Non equality of two empty collections.
        if collection != collection2:
            assert 0
        else:
            assert 1

        # Iterate resources in collection.
        for resource in collection:
            resource.digest = resource.digest
            assert 0

        # Get list of keys (digest) from collection.
        assert not collection.keys()

        # Get list of values (resources) from collection.
        assert not collection.values()

        # Test generator.
        resources = collection.resources()
        with pytest.raises(StopIteration):
            next(resources)

        # Printing collection.
        output = ('# collection meta', '   ! total : 0', '', '')
        print(collection)  # Part of the test.
        out, err = capsys.readouterr()
        out = Helper.remove_ansi(out)
        assert out == Const.NEWLINE.join(output)
        assert not err

        # Access non existent resource from collection.
        with pytest.raises(KeyError):
            resource = collection[0]

        # Delete non existent resource from collection with string.
        with pytest.raises(KeyError):
            del collection['012123']

        # Delete non existent resource from collection with number.
        with pytest.raises(KeyError):
            del collection[0]

        # Two created objects must not point ot same reference.
        if collection is collection2:
            assert 0

        # Reference of object must be to the same object.
        collection3 = collection
        if collection3 is not collection:
            assert 0
コード例 #6
0
    def test_collection_operations_002(capsys):  # pylint: disable=too-many-branches
        """Test collection data class operations.

        Verify that collection class implements data class methods correctly.
        In this case there is only one resource in collection.
        """

        collection = Collection()
        collection.load_dict(
            Helper.EXPORT_TIME, {
                'data': [{
                    'data': [
                        'tar cvfz mytar.tar.gz --exclude="mytar.tar.gz" ./',
                        'tar xfO mytar.tar.gz manifest.json# Cat file in compressed tar.'
                    ],
                    'brief':
                    'Manipulate compressed tar files',
                    'groups': ['linux'],
                    'tags': ['howto', 'linux', 'tar', 'untar'],
                    'category':
                    Const.SNIPPET
                }]
            })

        # Collection with len().
        assert len(collection) == 1

        # Collection with condition.
        if collection:
            assert 1
        else:
            assert 0

        # Collection with negative condition.
        if not collection:
            assert 0
        else:
            assert 1

        # Equality of two different collections where the UUID differs.
        collection2 = Collection()
        collection2.load_dict(
            Helper.EXPORT_TIME, {
                'data': [{
                    'data': [
                        'tar cvfz mytar.tar.gz --exclude="mytar.tar.gz" ./',
                        'tar xfO mytar.tar.gz manifest.json# Cat file in compressed tar.'
                    ],
                    'brief':
                    'Manipulate compressed tar files',
                    'groups': ['linux'],
                    'tags': ['howto', 'linux', 'tar', 'untar'],
                    'category':
                    Const.SNIPPET
                }]
            })
        if collection == collection2:
            assert 0
        else:
            assert 1

        # Non equality of two different collections.
        if collection != collection2:
            assert 1
        else:
            assert 0

        # Equality of two same collections.
        collection2 = collection
        if collection == collection2:
            assert 1
        else:
            assert 0

        # Non equality of same collections.
        if collection != collection2:
            assert 0
        else:
            assert 1

        # Equality of two collection with different length.
        collection2 = Collection()
        if collection == collection2:
            assert 0
        else:
            assert 1

        # Equality collection and random type.
        if collection == 1:
            assert 0
        else:
            assert 1

        # Iterate resources in collection.
        i = 0
        for resource in collection:
            resource.digest = resource.digest
            i = i + 1
        assert i == 1

        # Get list of keys (digest) from collection.
        assert len(collection.keys()) == 1
        assert collection.keys() == list([
            'e79ae51895908c5a40e570dc60a4dd594febdecf781c77c7b3cad37f9e0b7240'
        ])

        # Get list of values (resources) from collection.
        assert len(collection.values()) == 1
        assert collection.values()[0] == collection[
            'e79ae51895908c5a40e570dc60a4dd594febdecf781c77c7b3cad37f9e0b7240']

        # Test generator.
        resources = collection.resources()
        assert next(resources) == collection[
            'e79ae51895908c5a40e570dc60a4dd594febdecf781c77c7b3cad37f9e0b7240']
        with pytest.raises(StopIteration):
            next(resources)

        # Printing collection.
        output = (
            '1. Manipulate compressed tar files @linux [e79ae51895908c5a]', '',
            '   $ tar cvfz mytar.tar.gz --exclude="mytar.tar.gz" ./',
            '   $ tar xfO mytar.tar.gz manifest.json# Cat file in compressed tar.',
            '', '   # howto,linux,tar,untar', '', '   ! category    : snippet',
            '   ! created     : 2018-02-02T02:02:02.000001+00:00',
            '   ! description : ',
            '   ! digest      : e79ae51895908c5a40e570dc60a4dd594febdecf781c77c7b3cad37f9e0b7240 (True)',
            '   ! filename    : ',
            '   ! id          : a1cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            '   ! languages   : ', '   ! name        : ',
            '   ! source      : ',
            '   ! updated     : 2018-02-02T02:02:02.000001+00:00',
            '   ! uuid        : a1cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            '   ! versions    : ', '', '# collection meta', '   ! total : 1',
            '', '')
        print(collection)  # Part of the test.
        out, err = capsys.readouterr()
        out = Helper.remove_ansi(out)
        assert out == Const.NEWLINE.join(output)
        assert not err

        with pytest.raises(KeyError):
            resource = collection[0]
コード例 #7
0
class Solution(object):  # pylint: disable=too-few-public-methods
    """Default solutions for testing."""

    _BEATS = 0
    _NGINX = 1
    _KAFKA = 2
    _KAFKA_MKDN = 3

    # Default time is same for the default content. See 'Test case layouts and
    # data structures' for more information.
    DEFAULT_TIME = '2017-10-20T11:11:19.000001+00:00'

    # Default content must be always set so that it reflects content stored
    # into database. For example the tags must be sorted in correct order.
    # This forces defining erroneous content in each test case. This improves
    # the readability and maintainability of failure testing.
    _DEFAULTS = (
        {
            'category':
            'solution',
            'data':
            ('################################################################################',
             '## Description',
             '################################################################################',
             '', '    # Debug Elastic Beats', '',
             '################################################################################',
             '## References',
             '################################################################################',
             '', '    # Enable logs from Filebeat',
             '    > https://www.elastic.co/guide/en/beats/filebeat/master/enable-filebeat-debugging.html',
             '',
             '################################################################################',
             '## Commands',
             '################################################################################',
             '', '    # Run Filebeat with full log level',
             '    $ ./filebeat -e -c config/filebeat.yml -d "*"', '',
             '################################################################################',
             '## Solutions',
             '################################################################################',
             '',
             '################################################################################',
             '## Configurations',
             '################################################################################',
             '',
             '################################################################################',
             '## Whiteboard',
             '################################################################################',
             ''),
            'brief':
            'Debugging Elastic Beats',
            'description':
            'Debug Elastic Beats',
            'name':
            '',
            'groups': ('beats', ),
            'tags': ('Elastic', 'beats', 'debug', 'filebeat', 'howto'),
            'links':
            ('https://www.elastic.co/guide/en/beats/filebeat/master/enable-filebeat-debugging.html',
             ),
            'source':
            '',
            'versions': (),
            'languages': (),
            'filename':
            'howto-debug-elastic-beats.txt',
            'created':
            DEFAULT_TIME,
            'updated':
            DEFAULT_TIME,
            'uuid':
            '21cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            'digest':
            '4346ba4c792474308bc66bd16d747875bef9b431044824987e302b726c1d298e'
        },
        {
            'category':
            'solution',
            'data':
            ('################################################################################',
             '## Description',
             '################################################################################',
             '', '    # Instructions how to debug nginx.', '',
             '################################################################################',
             '## References',
             '################################################################################',
             '', '    # Official nginx debugging',
             '    > https://www.nginx.com/resources/admin-guide/debug/', '',
             '################################################################################',
             '## Commands',
             '################################################################################',
             '', '    # Test if nginx is configured with --with-debug',
             "    $ nginx -V 2>&1 | grep -- '--with-debug'", '',
             '    # Check the logs are forwarded to stdout/stderr and remove links',
             '    $ ls -al /var/log/nginx/',
             '    $ unlink /var/log/nginx/access.log',
             '    $ unlink /var/log/nginx/error.log', '',
             '    # Reloading nginx configuration', '    $ nginx -s reload',
             '',
             '################################################################################',
             '## Solutions',
             '################################################################################',
             '',
             '################################################################################',
             '## Configurations',
             '################################################################################',
             '', '    # Configuring nginx default.conf',
             '    $ vi conf.d/default.conf', '      upstream kibana_servers {',
             '          server kibana:5601;', '      }',
             '      upstream elasticsearch_servers {',
             '          server elasticsearch:9200;', '      }', '',
             '################################################################################',
             '## Whiteboard',
             '################################################################################',
             '', '    # Change nginx configuration',
             "    $ docker exec -i -t $(docker ps | egrep -m 1 'petelk/nginx' | awk '{print $1}') /bin/bash",
             ''),
            'brief':
            'Debugging nginx',
            'description':
            'Instructions how to debug nginx.',
            'name':
            '',
            'groups': ('nginx', ),
            'tags': ('debug', 'howto', 'logging', 'nginx'),
            'links': ('https://www.nginx.com/resources/admin-guide/debug/', ),
            'source':
            '',
            'versions': (),
            'languages': (),
            'filename':
            'howto-debug-nginx.txt',
            'created':
            DEFAULT_TIME,
            'updated':
            DEFAULT_TIME,
            'uuid':
            '22cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            'digest':
            '6cfe47a8880a8f81b66ff6bd71e795069ed1dfdd259c9fd181133f683c7697eb'
        },
        {
            'category':
            'solution',
            'data':
            ('################################################################################',
             '## Description',
             '################################################################################',
             '',
             '    # Investigating docker log driver and especially the Kafka plugin.',
             '',
             '################################################################################',
             '## References',
             '################################################################################',
             '', '    # Kube Kafka log driver',
             '    > https://github.com/MickayG/moby-kafka-logdriver', '',
             '    # Logs2Kafka',
             '    > https://groups.google.com/forum/#!topic/kubernetes-users/iLDsG85exRQ',
             '    > https://github.com/garo/logs2kafka', '',
             '################################################################################',
             '## Commands',
             '################################################################################',
             '', '    # Get logs from pods', '    $ kubectl get pods',
             '    $ kubectl logs kafka-0', '',
             '    # Install docker log driver for Kafka',
             '    $ docker ps --format "{{.Names}}" | grep -E \'kafka|logstash\'',
             '    $ docker inspect k8s_POD_kafka-0...',
             "    $ docker inspect --format '{{ .NetworkSettings.IPAddress }}' k8s_POD_kafka-0...",
             '    $ docker plugin install --disable mickyg/kafka-logdriver:latest',
             '    $ docker plugin set mickyg/kafka-logdriver:latest KAFKA_BROKER_ADDR="10.2.28.10:9092"',
             '    $ docker plugin inspect mickyg/kafka-logdriver',
             '    $ docker plugin enable mickyg/kafka-logdriver:latest',
             '    $ docker run --log-driver mickyg/kafka-logdriver:latest hello-world',
             '    $ docker plugin disable mickyg/kafka-logdriver:latest', '',
             '    # Get current docker log driver',
             "    $ docker info |grep 'Logging Driver' # Default driver",
             '    $ docker ps --format "{{.Names}}" | grep -E \'kafka|logstash\'',
             '    $ docker inspect k8s_POD_kafka-0...',
             "    $ docker inspect --format '{{ .NetworkSettings.IPAddress }}' k8s_POD_logstash...",
             "    $ docker inspect --format '{{ .NetworkSettings.IPAddress }}' k8s_POD_kafka-0...",
             '    $ docker inspect $(docker ps | grep POD | awk \'{print $1}\') | grep -E "Hostname|NetworkID',
             '    $ docker inspect $(docker ps | grep POD | awk \'{print $1}\') | while read line ; do egrep -E '
             +
             '\'"Hostname"|"IPAddress"\' ; done | while read line ; do echo $line ; done',
             '',
             '################################################################################',
             '## Solutions',
             '################################################################################',
             '',
             '################################################################################',
             '## Configurations',
             '################################################################################',
             '', '    # Logstash configuration',
             '    $ vi elk-stack/logstash/build/pipeline/kafka.conf',
             '      input {', '          gelf {}', '      }', '',
             '      output {', '          elasticsearch {',
             '            hosts => ["elasticsearch"]', '          }',
             '          stdout {}', '      }', '', '    # Kafka configuration',
             '    $ vi elk-stack/logstash/build/pipeline/kafka.conf',
             '    kafka {', '        type => "argus.docker"',
             '        topics => ["dockerlogs"]', '        codec => "plain"',
             '        bootstrap_servers => "kafka:9092"',
             '        consumer_threads => 1', '    }', '',
             '################################################################################',
             '## Whiteboard',
             '################################################################################',
             ''),
            'brief':
            'Testing docker log drivers',
            'description':
            'Investigating docker log driver and especially the Kafka plugin.',
            'name':
            '',
            'groups': ('docker', ),
            'tags': ('docker', 'driver', 'kafka', 'kubernetes', 'logging',
                     'logs2kafka', 'moby', 'plugin'),
            'links':
            ('https://github.com/MickayG/moby-kafka-logdriver',
             'https://github.com/garo/logs2kafka',
             'https://groups.google.com/forum/#!topic/kubernetes-users/iLDsG85exRQ'
             ),
            'source':
            '',
            'versions': (),
            'languages': (),
            'filename':
            'kubernetes-docker-log-driver-kafka.txt',
            'created':
            '2017-10-20T06:16:27.000001+00:00',
            'updated':
            '2017-10-20T06:16:27.000001+00:00',
            'uuid':
            '23cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            'digest':
            'ee3f2ab7c63d6965ac2531003807f00caee178f6e1cbb870105c7df86e6d5be2'
        },
        {
            'category':
            'solution',
            'data': (
                '## Description',
                '',
                'Investigate docker log drivers and the logs2kafka log plugin.',
                '',
                '## References',
                '',
                '   ```',
                '   # Kube Kafka log driver',
                '   > https://github.com/MickayG/moby-kafka-logdriver',
                '   ```',
                '',
                '   ```',
                '   # Logs2Kafka',
                '   > https://groups.google.com/forum/#!topic/kubernetes-users/iLDsG85exRQ',
                '   > https://github.com/garo/logs2kafka',
                '   ```',
                '',
                '## Commands',
                '',
                '   ```',
                '   # Get logs from pods',
                '   $ kubectl get pods',
                '   $ kubectl logs kafka-0',
                '   ```',
                '',
                '   ```',
                '   # Install docker log driver for Kafka',
                '   $ docker ps --format "{{.Names}}" | grep -E \'kafka|logstash\'',
                '   $ docker inspect k8s_POD_kafka-0...',
                '   $ docker inspect --format \'{{ .NetworkSettings.IPAddress }}\' k8s_POD_kafka-0...',
                '   $ docker plugin install --disable mickyg/kafka-logdriver:latest',
                '   $ docker plugin set mickyg/kafka-logdriver:latest KAFKA_BROKER_ADDR="10.2.28.10:9092"',
                '   $ docker plugin inspect mickyg/kafka-logdriver',
                '   $ docker plugin enable mickyg/kafka-logdriver:latest',
                '   $ docker run --log-driver mickyg/kafka-logdriver:latest hello-world',
                '   $ docker plugin disable mickyg/kafka-logdriver:latest',
                '   ```',
                '',
                '   ```',
                '   # Get current docker log driver',
                '   $ docker info |grep \'Logging Driver\' # Default driver',
                '   $ docker ps --format "{{.Names}}" | grep -E \'kafka|logstash\'',
                '   $ docker inspect k8s_POD_kafka-0...',
                '   $ docker inspect --format \'{{ .NetworkSettings.IPAddress }}\' k8s_POD_logstash...',
                '   $ docker inspect --format \'{{ .NetworkSettings.IPAddress }}\' k8s_POD_kafka-0...',
                '   $ docker inspect $(docker ps | grep POD | awk \'{print $1}\') | grep -E "Hostname|NetworkID',
                '   $ docker inspect $(docker ps | grep POD | awk \'{print $1}\') | while read line ; do egrep -E \'"Hostname"|"IPAddress"\' ; done | while read line ; do echo $line ; done',  # noqa pylint: disable=line-too-long
                '   ```',
                '',
                '## Configurations',
                '',
                '   ```',
                '   # Logstash configuration',
                '   $ vi elk-stack/logstash/build/pipeline/kafka.conf',
                '     input {',
                '         gelf {}',
                '     }',
                '',
                '     output {',
                '         elasticsearch {',
                '           hosts => ["elasticsearch"]',
                '         }',
                '         stdout {}',
                '     }',
                '   ```',
                '',
                '   ```',
                '   # Kafka configuration',
                '   $ vi elk-stack/logstash/build/pipeline/kafka.conf',
                '   kafka {',
                '       type => "argus.docker"',
                '       topics => ["dockerlogs"]',
                '       codec => "plain"',
                '       bootstrap_servers => "kafka:9092"',
                '       consumer_threads => 1',
                '   }',
                '   ```',
                '',
                '## Solutions',
                '',
                '## Whiteboard',
                ''),
            'brief':
            'Testing docker log drivers',
            'description':
            'Investigate docker log drivers and the logs2kafka log plugin.',
            'name':
            '',
            'groups': ('docker', ),
            'tags': ('docker', 'driver', 'kafka', 'kubernetes', 'logging',
                     'logs2kafka', 'moby', 'plugin'),
            'links':
            ('https://github.com/MickayG/moby-kafka-logdriver',
             'https://github.com/garo/logs2kafka',
             'https://groups.google.com/forum/#!topic/kubernetes-users/iLDsG85exRQ'
             ),
            'source':
            '',
            'versions': (),
            'languages': (),
            'filename':
            'kubernetes-docker-log-driver-kafka.mkdn',
            'created':
            '2019-01-04T10:54:49.265512+00:00',
            'updated':
            '2019-01-05T10:54:49.265512+00:00',
            'uuid':
            '24cd5827-b6ef-4067-b5ac-3ceac07dde9f',
            'digest':
            'c54c8a896b94ea35edf6c798879957419d26268bd835328d74b19a6e9ce2324d'
        })

    BEATS_CREATED = _DEFAULTS[_BEATS]['created']
    BEATS_UPDATED = _DEFAULTS[_BEATS]['updated']
    NGINX_CREATED = _DEFAULTS[_NGINX]['created']
    NGINX_UPDATED = _DEFAULTS[_NGINX]['updated']
    KAFKA_CREATED = _DEFAULTS[_KAFKA]['created']
    KAFKA_UPDATED = _DEFAULTS[_KAFKA]['updated']
    KAFKA_MKDN_CREATED = _DEFAULTS[_KAFKA_MKDN]['created']
    KAFKA_MKDN_UPDATED = _DEFAULTS[_KAFKA_MKDN]['updated']

    if not DEFAULT_TIME == BEATS_CREATED == BEATS_UPDATED == NGINX_CREATED == NGINX_UPDATED:
        raise Exception(
            'default content timestamps must be same - see \'Test case layouts and data structures\''
        )

    BEATS_DIGEST = _DEFAULTS[_BEATS]['digest']
    NGINX_DIGEST = _DEFAULTS[_NGINX]['digest']
    KAFKA_DIGEST = _DEFAULTS[_KAFKA]['digest']
    KAFKA_MKDN_DIGEST = _DEFAULTS[_KAFKA_MKDN]['digest']
    BEATS_UUID = _DEFAULTS[_BEATS]['uuid']
    NGINX_UUID = _DEFAULTS[_NGINX]['uuid']
    KAFKA_UUID = _DEFAULTS[_KAFKA]['uuid']
    KAFKA_MKDN_UUID = _DEFAULTS[_KAFKA_MKDN]['uuid']

    BEATS = _DEFAULTS[_BEATS]
    NGINX = _DEFAULTS[_NGINX]
    KAFKA = _DEFAULTS[_KAFKA]
    KAFKA_MKDN = _DEFAULTS[_KAFKA_MKDN]
    DEFAULT_SOLUTIONS = (BEATS, NGINX)

    TEMPLATE = Helper.read_template('solution.txt').split('\n')
    TEMPLATE_DIGEST_TEXT = 'be2ec3ade0e984463c1d3346910a05625897abd8d3feae4b2e54bfd6aecbde2d'
    TEMPLATE_DIGEST_MKDN = '073ea152d867cf06b2ee993fb1aded4c8ccbc618972db5c18158b5b68a5da6e4'

    TEMPLATE_TEXT = (
        '################################################################################',
        '## BRIEF  : Add brief title for content', '##', '## GROUPS : groups',
        '## TAGS   : example,tags', '## FILE   : example-content.md',
        '################################################################################',
        '', '',
        '################################################################################',
        '## Description',
        '################################################################################',
        '',
        '################################################################################',
        '## References',
        '################################################################################',
        '',
        '################################################################################',
        '## Commands',
        '################################################################################',
        '',
        '################################################################################',
        '## Configurations',
        '################################################################################',
        '',
        '################################################################################',
        '## Solutions',
        '################################################################################',
        '',
        '################################################################################',
        '## Whiteboard',
        '################################################################################',
        '',
        '################################################################################',
        '## Meta',
        '################################################################################',
        '', 'category  : solution',
        'created   : 2017-10-14T19:56:31.000001+00:00',
        'digest    : 50c37862816a197c63b2ae72c511586c3463814509c0d5c7ebde534ce0209935',
        'languages : example-language', 'name      : example content handle',
        'source    : https://www.example.com/source.md',
        'updated   : 2017-10-14T19:56:31.000001+00:00',
        'uuid      : a1cd5827-b6ef-4067-b5ac-3ceac07dde9f',
        'versions  : example=3.9.0,python>=3', '')
    TEMPLATE_MKDN = (
        '# Add brief title for content @groups', '',
        '> Add a description that defines the content in one chapter.', '',
        '> ', '', '## Description', '', '## References', '', '## Commands', '',
        '## Configurations', '', '## Solutions', '', '## Whiteboard', '',
        '## Meta', '', '> category  : solution  ',
        'created   : 2017-10-14T19:56:31.000001+00:00  ',
        'digest    : 5facdc16dc81851c2f65b112a0921eb2f2db206c7756714efb45ba0026471f11  ',
        'filename  : example-content.md  ', 'languages : example-language  ',
        'name      : example content handle  ',
        'source    : https://www.example.com/source.md  ',
        'tags      : example,tags  ',
        'updated   : 2017-10-14T19:56:31.000001+00:00  ',
        'uuid      : a1cd5827-b6ef-4067-b5ac-3ceac07dde9f  ',
        'versions  : example=3.9.0,python>=3  ', '')

    _OUTPUTS = [(
        '', '   # Elastic,beats,debug,filebeat,howto',
        '   > https://www.elastic.co/guide/en/beats/filebeat/master/enable-filebeat-debugging.html',
        '',
        '   : ################################################################################',
        '   : ## Description',
        '   : ################################################################################',
        '   : ', '   :     # Debug Elastic Beats', '   : ',
        '   : ################################################################################',
        '   : ## References',
        '   : ################################################################################',
        '   : ', '   :     # Enable logs from Filebeat',
        '   :     > https://www.elastic.co/guide/en/beats/filebeat/master/enable-filebeat-debugging.html',
        '   : ',
        '   : ################################################################################',
        '   : ## Commands',
        '   : ################################################################################',
        '   : ', '   :     # Run Filebeat with full log level',
        '   :     $ ./filebeat -e -c config/filebeat.yml -d "*"', '   : ',
        '   : ################################################################################',
        '   : ## Solutions',
        '   : ################################################################################',
        '   : ',
        '   : ################################################################################',
        '   : ## Configurations',
        '   : ################################################################################',
        '   : ',
        '   : ################################################################################',
        '   : ## Whiteboard',
        '   : ################################################################################'
    )]
    BEATS_OUTPUT = _OUTPUTS[_BEATS]