def test_collect_log_depot(depot_type, depot_machine, depot_credentials,
                           depot_ftp, depot_configured):
    """ Boilerplate test to verify functionality of this concept

    Will be extended and improved.
    """
    # Prepare empty workspace
    with depot_ftp() as ftp:
        ftp.recursively_delete()

    # Start the collection
    configure.ServerLogDepot.collect_all()
    # Check it on FTP
    with depot_ftp() as ftp:
        # Files must have been created after start
        zip_files = ftp.filesystem.search(re.compile(r"^.*?[.]zip$"),
                                          directories=False)
        assert zip_files, "No logs found!"

        # And must be older than the start time.
        for file in zip_files:
            assert file.local_time < parsetime.now(
            ), "%s is older." % file.name

        # No file contains 'unknown_unknown' sequence
        # BZ: 1018578
        bad_files = ftp.filesystem.search(
            re.compile(r"^.*?unknown_unknown.*?[.]zip$"), directories=False)
        if bad_files:
            raise Exception("BUG1018578: Files %s present!" %
                            ", ".join(bad_files))

        # And clean it up
        ftp.recursively_delete()

    # Check the times of the files by names
    datetimes = []
    regexp = re.compile(
        r"^.*?_(?P<y1>[0-9]{4})(?P<m1>[0-9]{2})(?P<d1>[0-9]{2})_"
        r"(?P<h1>[0-9]{2})(?P<M1>[0-9]{2})(?P<S1>[0-9]{2})"
        r"_(?P<y2>[0-9]{4})(?P<m2>[0-9]{2})(?P<d2>[0-9]{2})_"
        r"(?P<h2>[0-9]{2})(?P<M2>[0-9]{2})(?P<S2>[0-9]{2})[.]zip$")
    for file in zip_files:
        data = regexp.match(file.name)
        assert data, "Wrong file matching"
        data = {key: int(value) for key, value in data.groupdict().iteritems()}
        date_from = parsetime(data["y1"], data["m1"], data["d1"], data["h1"],
                              data["M1"], data["S1"])
        date_to = parsetime(data["y2"], data["m2"], data["d2"], data["h2"],
                            data["M2"], data["S2"])
        datetimes.append((date_from, date_to))

    # Check for the gaps
    if len(datetimes) > 1:
        for i in range(len(datetimes) - 1):
            dt = datetimes[i + 1][0] - datetimes[i][1]
            assert dt.total_seconds() >= 0.0, "Negative gap between log files"
def test_collect_log_depot(depot_type,
                           depot_machine,
                           depot_credentials,
                           depot_ftp,
                           depot_configured):
    """ Boilerplate test to verify functionality of this concept

    Will be extended and improved.
    """
    # Prepare empty workspace
    with depot_ftp() as ftp:
        ftp.recursively_delete()

    # Start the collection
    configure.ServerLogDepot.collect_all()
    # Check it on FTP
    with depot_ftp() as ftp:
        # Files must have been created after start
        zip_files = ftp.filesystem.search(re.compile(r"^.*?[.]zip$"), directories=False)
        assert zip_files, "No logs found!"

        # And must be older than the start time.
        for file in zip_files:
            assert file.local_time < parsetime.now(), "%s is older." % file.name

        # No file contains 'unknown_unknown' sequence
        # BZ: 1018578
        bad_files = ftp.filesystem.search(re.compile(r"^.*?unknown_unknown.*?[.]zip$"),
                                          directories=False)
        if bad_files:
            raise Exception("BUG1018578: Files %s present!" % ", ".join(bad_files))

        # And clean it up
        ftp.recursively_delete()

    # Check the times of the files by names
    datetimes = []
    regexp = re.compile(
        r"^.*?_(?P<y1>[0-9]{4})(?P<m1>[0-9]{2})(?P<d1>[0-9]{2})_"
        r"(?P<h1>[0-9]{2})(?P<M1>[0-9]{2})(?P<S1>[0-9]{2})"
        r"_(?P<y2>[0-9]{4})(?P<m2>[0-9]{2})(?P<d2>[0-9]{2})_"
        r"(?P<h2>[0-9]{2})(?P<M2>[0-9]{2})(?P<S2>[0-9]{2})[.]zip$"
    )
    for file in zip_files:
        data = regexp.match(file.name)
        assert data, "Wrong file matching"
        data = {key: int(value) for key, value in data.groupdict().iteritems()}
        date_from = parsetime(
            data["y1"], data["m1"], data["d1"], data["h1"], data["M1"], data["S1"])
        date_to = parsetime(data["y2"], data["m2"], data["d2"], data["h2"], data["M2"], data["S2"])
        datetimes.append((date_from, date_to))

    # Check for the gaps
    if len(datetimes) > 1:
        for i in range(len(datetimes) - 1):
            dt = datetimes[i + 1][0] - datetimes[i][1]
            assert dt.total_seconds() >= 0.0, "Negative gap between log files"
Beispiel #3
0
def test_collect_log_depot(depot_type, depot_machine, depot_credentials,
                           depot_ftp, depot_configured, soft_assert, request):
    """ Boilerplate test to verify functionality of this concept

    Will be extended and improved.
    """
    # Wipe the FTP contents in the end
    @request.addfinalizer
    def _clear_ftp():
        with depot_ftp() as ftp:
            ftp.cwd(ftp.upload_dir)
            ftp.recursively_delete()

    # Prepare empty workspace
    with depot_ftp() as ftp:
        # move to upload folder
        ftp.cwd(ftp.upload_dir)
        # delete all files
        ftp.recursively_delete()

    # Start the collection
    configure.ServerLogDepot.collect_all()
    # Check it on FTP
    with depot_ftp() as ftp:
        # Files must have been created after start
        zip_files = ftp.filesystem.search(re.compile(r"^.*?[.]zip$"),
                                          directories=False)
        assert zip_files, "No logs found!"

        # And must be older than the start time.
        for file in zip_files:
            soft_assert(file.local_time < parsetime.now(),
                        "{} is older.".format(file.name))

        # No file contains 'unknown_unknown' sequence
        # BZ: 1018578
        bad_files = ftp.filesystem.search(
            re.compile(r"^.*?unknown_unknown.*?[.]zip$"), directories=False)
        if bad_files:
            print_list = []
            for file in bad_files:
                random_name = "{}.zip".format(fauxfactory.gen_alphanumeric())
                download_file_name = log_path.join(random_name).strpath
                file.download(download_file_name)
                print_list.append((file, random_name))

            pytest.fail("BUG1018578: Files {} present!".format(", ".join(
                "{} as {}".format(f, r) for f, r in print_list)))

    # Check the times of the files by names
    datetimes = []
    regexp = re.compile(
        r"^.*?_(?P<y1>[0-9]{4})(?P<m1>[0-9]{2})(?P<d1>[0-9]{2})_"
        r"(?P<h1>[0-9]{2})(?P<M1>[0-9]{2})(?P<S1>[0-9]{2})"
        r"_(?P<y2>[0-9]{4})(?P<m2>[0-9]{2})(?P<d2>[0-9]{2})_"
        r"(?P<h2>[0-9]{2})(?P<M2>[0-9]{2})(?P<S2>[0-9]{2})[.]zip$")
    failed = False
    for file in zip_files:
        data = regexp.match(file.name)
        if not soft_assert(data, "Wrong file matching of {}".format(
                file.name)):
            failed = True
            continue
        data = {key: int(value) for key, value in data.groupdict().iteritems()}
        date_from = parsetime(data["y1"], data["m1"], data["d1"], data["h1"],
                              data["M1"], data["S1"])
        date_to = parsetime(data["y2"], data["m2"], data["d2"], data["h2"],
                            data["M2"], data["S2"])
        datetimes.append((date_from, date_to, file.name))

    if not failed:
        # Check for the gaps
        if len(datetimes) > 1:
            for i in range(len(datetimes) - 1):
                dt = datetimes[i + 1][0] - datetimes[i][1]
                soft_assert(
                    dt.total_seconds() >= 0.0,
                    "Negative gap between log files ({}, {})".format(
                        datetimes[i][2], datetimes[i + 1][2]))
def test_collect_log_depot(depot_type, depot_machine_folder, depot_credentials, depot_ftp,
                           depot_configured, soft_assert, depot_machine_ip,
                           request):
    """ Boilerplate test to verify functionality of this concept

    Will be extended and improved.
    """
    # Wipe the FTP contents in the end
    @request.addfinalizer
    def _clear_ftp():
        with depot_ftp(depot_machine_ip) as ftp:
            ftp.cwd(ftp.upload_dir)
            ftp.recursively_delete()

    # Prepare empty workspace
    with depot_ftp(depot_machine_ip) as ftp:
        # move to upload folder
        ftp.cwd(ftp.upload_dir)
        # delete all files
        ftp.recursively_delete()

    # Start the collection
    configure.ServerLogDepot.collect_all()
    # Check it on FTP
    with depot_ftp(depot_machine_ip) as ftp:
        # Files must have been created after start
        zip_files = ftp.filesystem.search(re.compile(r"^.*?[.]zip$"), directories=False)
        assert zip_files, "No logs found!"

        # And must be older than the start time.
        for file in zip_files:
            soft_assert(file.local_time < parsetime.now(), "{} is older.".format(file.name))

        # No file contains 'unknown_unknown' sequence
        # BZ: 1018578
        bad_files = ftp.filesystem.search(re.compile(r"^.*?unknown_unknown.*?[.]zip$"),
                                          directories=False)
        if bad_files:
            print_list = []
            for file in bad_files:
                random_name = "{}.zip".format(fauxfactory.gen_alphanumeric())
                download_file_name = log_path.join(random_name).strpath
                file.download(download_file_name)
                print_list.append((file, random_name))

            pytest.fail(
                "BUG1018578: Files {} present!".format(
                    ", ".join("{} as {}".format(f, r) for f, r in print_list)))

    # Check the times of the files by names
    datetimes = []
    regexp = re.compile(
        r"^.*?_(?P<y1>[0-9]{4})(?P<m1>[0-9]{2})(?P<d1>[0-9]{2})_"
        r"(?P<h1>[0-9]{2})(?P<M1>[0-9]{2})(?P<S1>[0-9]{2})"
        r"_(?P<y2>[0-9]{4})(?P<m2>[0-9]{2})(?P<d2>[0-9]{2})_"
        r"(?P<h2>[0-9]{2})(?P<M2>[0-9]{2})(?P<S2>[0-9]{2})[.]zip$"
    )
    failed = False
    for file in zip_files:
        data = regexp.match(file.name)
        if not soft_assert(data, "Wrong file matching of {}".format(file.name)):
            failed = True
            continue
        data = {key: int(value) for key, value in data.groupdict().iteritems()}
        date_from = parsetime(
            data["y1"], data["m1"], data["d1"], data["h1"], data["M1"], data["S1"])
        date_to = parsetime(data["y2"], data["m2"], data["d2"], data["h2"], data["M2"], data["S2"])
        datetimes.append((date_from, date_to, file.name))

    if not failed:
        # Check for the gaps
        if len(datetimes) > 1:
            for i in range(len(datetimes) - 1):
                dt = datetimes[i + 1][0] - datetimes[i][1]
                soft_assert(
                    dt.total_seconds() >= 0.0,
                    "Negative gap between log files ({}, {})".format(
                        datetimes[i][2], datetimes[i + 1][2]))