Exemplo n.º 1
0
def test_vm_retire_extend(request, testing_vm, soft_assert, retire_extend_button):
    """ Tests extending a retirement using an AE method.

    Prerequisities:
        * A running VM on any provider.

    Steps:
        * It creates a button pointing to ``Request/vm_retire_extend`` instance. The button should
            live in the VM and Instance button group.
        * Then it sets a retirement date for the VM
        * Then it waits until the retirement date is set
        * Then it clicks the button that was created and it waits for the retirement date to extend.

    Metadata:
        test_flag: retire, provision
    """
    soft_assert(testing_vm.retirement_date is None, "The retirement date is not None!")
    retirement_date = parsetime.now() + timedelta(days=5)
    testing_vm.set_retirement_date(retirement_date)
    wait_for(lambda: testing_vm.retirement_date is not None, message="retirement date be set")
    soft_assert(testing_vm.retirement_date is not None, "The retirement date is None!")
    # current_retirement_date = testing_vm.retirement_date

    # Now run the extend stuff
    retire_extend_button()
Exemplo n.º 2
0
def test_vm_retire_extend(request, testing_vm, soft_assert,
                          retire_extend_button):
    """ Tests extending a retirement using an AE method.

    Prerequisities:
        * A running VM on any provider.

    Steps:
        * It creates a button pointing to ``Request/vm_retire_extend`` instance. The button should
            live in the VM and Instance button group.
        * Then it sets a retirement date for the VM
        * Then it waits until the retirement date is set
        * Then it clicks the button that was created and it waits for the retirement date to extend.

    Metadata:
        test_flag: retire, provision
    """
    soft_assert(testing_vm.retirement_date is None,
                "The retirement date is not None!")
    retirement_date = parsetime.now() + timedelta(days=5)
    testing_vm.set_retirement_date(retirement_date)
    wait_for(lambda: testing_vm.retirement_date is not None,
             message="retirement date be set")
    soft_assert(testing_vm.retirement_date is not None,
                "The retirement date is None!")
    # current_retirement_date = testing_vm.retirement_date

    # Now run the extend stuff
    retire_extend_button()
def test_collect_log_depot(depot_type, depot_machine, depot_credentials,
                           depot_ftp, depot_configured):
    """ Boilerplate test to verify functionality of this concept

    Will be extended and improved.
    """
    # Prepare empty workspace
    with depot_ftp() as ftp:
        ftp.recursively_delete()

    # Start the collection
    configure.ServerLogDepot.collect_all()
    # Check it on FTP
    with depot_ftp() as ftp:
        # Files must have been created after start
        zip_files = ftp.filesystem.search(re.compile(r"^.*?[.]zip$"),
                                          directories=False)
        assert zip_files, "No logs found!"

        # And must be older than the start time.
        for file in zip_files:
            assert file.local_time < parsetime.now(
            ), "%s is older." % file.name

        # No file contains 'unknown_unknown' sequence
        # BZ: 1018578
        bad_files = ftp.filesystem.search(
            re.compile(r"^.*?unknown_unknown.*?[.]zip$"), directories=False)
        if bad_files:
            raise Exception("BUG1018578: Files %s present!" %
                            ", ".join(bad_files))

        # And clean it up
        ftp.recursively_delete()

    # Check the times of the files by names
    datetimes = []
    regexp = re.compile(
        r"^.*?_(?P<y1>[0-9]{4})(?P<m1>[0-9]{2})(?P<d1>[0-9]{2})_"
        r"(?P<h1>[0-9]{2})(?P<M1>[0-9]{2})(?P<S1>[0-9]{2})"
        r"_(?P<y2>[0-9]{4})(?P<m2>[0-9]{2})(?P<d2>[0-9]{2})_"
        r"(?P<h2>[0-9]{2})(?P<M2>[0-9]{2})(?P<S2>[0-9]{2})[.]zip$")
    for file in zip_files:
        data = regexp.match(file.name)
        assert data, "Wrong file matching"
        data = {key: int(value) for key, value in data.groupdict().iteritems()}
        date_from = parsetime(data["y1"], data["m1"], data["d1"], data["h1"],
                              data["M1"], data["S1"])
        date_to = parsetime(data["y2"], data["m2"], data["d2"], data["h2"],
                            data["M2"], data["S2"])
        datetimes.append((date_from, date_to))

    # Check for the gaps
    if len(datetimes) > 1:
        for i in range(len(datetimes) - 1):
            dt = datetimes[i + 1][0] - datetimes[i][1]
            assert dt.total_seconds() >= 0.0, "Negative gap between log files"
def test_collect_log_depot(depot_type,
                           depot_machine,
                           depot_credentials,
                           depot_ftp,
                           depot_configured):
    """ Boilerplate test to verify functionality of this concept

    Will be extended and improved.
    """
    # Prepare empty workspace
    with depot_ftp() as ftp:
        ftp.recursively_delete()

    # Start the collection
    configure.ServerLogDepot.collect_all()
    # Check it on FTP
    with depot_ftp() as ftp:
        # Files must have been created after start
        zip_files = ftp.filesystem.search(re.compile(r"^.*?[.]zip$"), directories=False)
        assert zip_files, "No logs found!"

        # And must be older than the start time.
        for file in zip_files:
            assert file.local_time < parsetime.now(), "%s is older." % file.name

        # No file contains 'unknown_unknown' sequence
        # BZ: 1018578
        bad_files = ftp.filesystem.search(re.compile(r"^.*?unknown_unknown.*?[.]zip$"),
                                          directories=False)
        if bad_files:
            raise Exception("BUG1018578: Files %s present!" % ", ".join(bad_files))

        # And clean it up
        ftp.recursively_delete()

    # Check the times of the files by names
    datetimes = []
    regexp = re.compile(
        r"^.*?_(?P<y1>[0-9]{4})(?P<m1>[0-9]{2})(?P<d1>[0-9]{2})_"
        r"(?P<h1>[0-9]{2})(?P<M1>[0-9]{2})(?P<S1>[0-9]{2})"
        r"_(?P<y2>[0-9]{4})(?P<m2>[0-9]{2})(?P<d2>[0-9]{2})_"
        r"(?P<h2>[0-9]{2})(?P<M2>[0-9]{2})(?P<S2>[0-9]{2})[.]zip$"
    )
    for file in zip_files:
        data = regexp.match(file.name)
        assert data, "Wrong file matching"
        data = {key: int(value) for key, value in data.groupdict().iteritems()}
        date_from = parsetime(
            data["y1"], data["m1"], data["d1"], data["h1"], data["M1"], data["S1"])
        date_to = parsetime(data["y2"], data["m2"], data["d2"], data["h2"], data["M2"], data["S2"])
        datetimes.append((date_from, date_to))

    # Check for the gaps
    if len(datetimes) > 1:
        for i in range(len(datetimes) - 1):
            dt = datetimes[i + 1][0] - datetimes[i][1]
            assert dt.total_seconds() >= 0.0, "Negative gap between log files"
Exemplo n.º 5
0
def test_vm_retire_extend(request, testing_vm, soft_assert, retire_extend_button):
    """ Tests extending a retirement

    Metadata:
        test_flag: retire, provision
    """
    soft_assert(testing_vm.retirement_date is None, "The retirement date is not None!")
    retirement_date = parsetime.now() + timedelta(days=5)
    testing_vm.set_retirement_date(retirement_date)
    wait_for(lambda: testing_vm.retirement_date is not None, message="retirement date be set")
    soft_assert(testing_vm.retirement_date is not None, "The retirement date is None!")
    # current_retirement_date = testing_vm.retirement_date

    # Now run the extend stuff
    retire_extend_button()
Exemplo n.º 6
0
def test_retirement_now(test_vm):
    """Tests on-demand retirement of an instance/vm
    """
    # For 5.7 capture two times to assert the retire time is within a window.
    # Too finicky to get it down to minute precision, nor is it really needed here
    retire_times = dict()
    retire_times['start'] = generate_retirement_date_now() + timedelta(minutes=-1)
    test_vm.retire()
    verify_retirement_state(test_vm)
    retire_times['end'] = generate_retirement_date_now() + timedelta(minutes=1)
    if current_version() < '5.7':
        verify_retirement_date(test_vm,
                               expected_date=parsetime.now().to_american_date_only())
    else:
        verify_retirement_date(test_vm, expected_date=retire_times)
def verify_retirement(vm):
    # add condition because of differ behaviour between 5.5 and 5.6
    if current_version() < "5.6":
        wait_for(lambda: vm.exists is False, delay=30, num_sec=360,
                 message="Wait for VM {} removed from provider".format(vm.name))
    else:
        # wait for the info block showing a date as retired date
        wait_for(lambda: vm.is_retired, delay=30, num_sec=720,
                 message="Wait until VM {} will be retired".format(vm.name))

        assert vm.summary.power_management.power_state.text_value in {'off', 'suspended', 'unknown'}

        # make sure retirement date is today
        retirement_date = vm.retirement_date
        today = parsetime.now().to_american_date_only()
        assert retirement_date == today
Exemplo n.º 8
0
def test_retirement_now(test_vm):
    """Tests on-demand retirement of an instance/vm
    """
    # For 5.7 capture two times to assert the retire time is within a window.
    # Too finicky to get it down to minute precision, nor is it really needed here
    retire_times = dict()
    retire_times['start'] = generate_retirement_date_now() + timedelta(
        minutes=-5)
    test_vm.retire()
    verify_retirement_state(test_vm)
    retire_times['end'] = generate_retirement_date_now() + timedelta(minutes=5)
    if current_version() < '5.7':
        verify_retirement_date(
            test_vm, expected_date=parsetime.now().to_american_date_only())
    else:
        verify_retirement_date(test_vm, expected_date=retire_times)
Exemplo n.º 9
0
def test_collect_log_depot(depot_type, depot_machine, depot_credentials,
                           depot_ftp, depot_configured, soft_assert, request):
    """ Boilerplate test to verify functionality of this concept

    Will be extended and improved.
    """
    # Wipe the FTP contents in the end
    @request.addfinalizer
    def _clear_ftp():
        with depot_ftp() as ftp:
            ftp.cwd(ftp.upload_dir)
            ftp.recursively_delete()

    # Prepare empty workspace
    with depot_ftp() as ftp:
        # move to upload folder
        ftp.cwd(ftp.upload_dir)
        # delete all files
        ftp.recursively_delete()

    # Start the collection
    configure.ServerLogDepot.collect_all()
    # Check it on FTP
    with depot_ftp() as ftp:
        # Files must have been created after start
        zip_files = ftp.filesystem.search(re.compile(r"^.*?[.]zip$"),
                                          directories=False)
        assert zip_files, "No logs found!"

        # And must be older than the start time.
        for file in zip_files:
            soft_assert(file.local_time < parsetime.now(),
                        "{} is older.".format(file.name))

        # No file contains 'unknown_unknown' sequence
        # BZ: 1018578
        bad_files = ftp.filesystem.search(
            re.compile(r"^.*?unknown_unknown.*?[.]zip$"), directories=False)
        if bad_files:
            print_list = []
            for file in bad_files:
                random_name = "{}.zip".format(fauxfactory.gen_alphanumeric())
                download_file_name = log_path.join(random_name).strpath
                file.download(download_file_name)
                print_list.append((file, random_name))

            pytest.fail("BUG1018578: Files {} present!".format(", ".join(
                "{} as {}".format(f, r) for f, r in print_list)))

    # Check the times of the files by names
    datetimes = []
    regexp = re.compile(
        r"^.*?_(?P<y1>[0-9]{4})(?P<m1>[0-9]{2})(?P<d1>[0-9]{2})_"
        r"(?P<h1>[0-9]{2})(?P<M1>[0-9]{2})(?P<S1>[0-9]{2})"
        r"_(?P<y2>[0-9]{4})(?P<m2>[0-9]{2})(?P<d2>[0-9]{2})_"
        r"(?P<h2>[0-9]{2})(?P<M2>[0-9]{2})(?P<S2>[0-9]{2})[.]zip$")
    failed = False
    for file in zip_files:
        data = regexp.match(file.name)
        if not soft_assert(data, "Wrong file matching of {}".format(
                file.name)):
            failed = True
            continue
        data = {key: int(value) for key, value in data.groupdict().iteritems()}
        date_from = parsetime(data["y1"], data["m1"], data["d1"], data["h1"],
                              data["M1"], data["S1"])
        date_to = parsetime(data["y2"], data["m2"], data["d2"], data["h2"],
                            data["M2"], data["S2"])
        datetimes.append((date_from, date_to, file.name))

    if not failed:
        # Check for the gaps
        if len(datetimes) > 1:
            for i in range(len(datetimes) - 1):
                dt = datetimes[i + 1][0] - datetimes[i][1]
                soft_assert(
                    dt.total_seconds() >= 0.0,
                    "Negative gap between log files ({}, {})".format(
                        datetimes[i][2], datetimes[i + 1][2]))
def test_collect_log_depot(depot_type, depot_machine_folder, depot_credentials, depot_ftp,
                           depot_configured, soft_assert, depot_machine_ip,
                           request):
    """ Boilerplate test to verify functionality of this concept

    Will be extended and improved.
    """
    # Wipe the FTP contents in the end
    @request.addfinalizer
    def _clear_ftp():
        with depot_ftp(depot_machine_ip) as ftp:
            ftp.cwd(ftp.upload_dir)
            ftp.recursively_delete()

    # Prepare empty workspace
    with depot_ftp(depot_machine_ip) as ftp:
        # move to upload folder
        ftp.cwd(ftp.upload_dir)
        # delete all files
        ftp.recursively_delete()

    # Start the collection
    configure.ServerLogDepot.collect_all()
    # Check it on FTP
    with depot_ftp(depot_machine_ip) as ftp:
        # Files must have been created after start
        zip_files = ftp.filesystem.search(re.compile(r"^.*?[.]zip$"), directories=False)
        assert zip_files, "No logs found!"

        # And must be older than the start time.
        for file in zip_files:
            soft_assert(file.local_time < parsetime.now(), "{} is older.".format(file.name))

        # No file contains 'unknown_unknown' sequence
        # BZ: 1018578
        bad_files = ftp.filesystem.search(re.compile(r"^.*?unknown_unknown.*?[.]zip$"),
                                          directories=False)
        if bad_files:
            print_list = []
            for file in bad_files:
                random_name = "{}.zip".format(fauxfactory.gen_alphanumeric())
                download_file_name = log_path.join(random_name).strpath
                file.download(download_file_name)
                print_list.append((file, random_name))

            pytest.fail(
                "BUG1018578: Files {} present!".format(
                    ", ".join("{} as {}".format(f, r) for f, r in print_list)))

    # Check the times of the files by names
    datetimes = []
    regexp = re.compile(
        r"^.*?_(?P<y1>[0-9]{4})(?P<m1>[0-9]{2})(?P<d1>[0-9]{2})_"
        r"(?P<h1>[0-9]{2})(?P<M1>[0-9]{2})(?P<S1>[0-9]{2})"
        r"_(?P<y2>[0-9]{4})(?P<m2>[0-9]{2})(?P<d2>[0-9]{2})_"
        r"(?P<h2>[0-9]{2})(?P<M2>[0-9]{2})(?P<S2>[0-9]{2})[.]zip$"
    )
    failed = False
    for file in zip_files:
        data = regexp.match(file.name)
        if not soft_assert(data, "Wrong file matching of {}".format(file.name)):
            failed = True
            continue
        data = {key: int(value) for key, value in data.groupdict().iteritems()}
        date_from = parsetime(
            data["y1"], data["m1"], data["d1"], data["h1"], data["M1"], data["S1"])
        date_to = parsetime(data["y2"], data["m2"], data["d2"], data["h2"], data["M2"], data["S2"])
        datetimes.append((date_from, date_to, file.name))

    if not failed:
        # Check for the gaps
        if len(datetimes) > 1:
            for i in range(len(datetimes) - 1):
                dt = datetimes[i + 1][0] - datetimes[i][1]
                soft_assert(
                    dt.total_seconds() >= 0.0,
                    "Negative gap between log files ({}, {})".format(
                        datetimes[i][2], datetimes[i + 1][2]))