Skip to content

Commit

Permalink
Set specific backup name and check retain volume exists
Browse files Browse the repository at this point in the history
  • Loading branch information
TachunLin committed Jul 21, 2024
1 parent 652563b commit 891d8f2
Showing 1 changed file with 14 additions and 7 deletions.
21 changes: 14 additions & 7 deletions harvester_e2e_tests/integrations/test_4_vm_backup_restore.py
Original file line number Diff line number Diff line change
Expand Up @@ -644,20 +644,20 @@ def test_connection(self, api_client, backup_config, config_backup_target):
@pytest.mark.dependency(depends=["TestBackupRestoreWithSnapshot::test_connection"], param=True)
def tests_backup_vm(self, api_client, wait_timeout, backup_config, base_vm_with_data):
unique_vm_name = base_vm_with_data['name']

unique_backup_name = f"{unique_vm_name}-backup-with-snapshot"
# Create backup with the name as VM's name
code, data = api_client.vms.backup(unique_vm_name, unique_vm_name)
code, data = api_client.vms.backup(unique_vm_name, unique_backup_name)
assert 204 == code, (code, data)
# Check backup is ready
endtime = datetime.now() + timedelta(seconds=wait_timeout)
while endtime > datetime.now():
code, backup = api_client.backups.get(unique_vm_name)
code, backup = api_client.backups.get(unique_backup_name)
if 200 == code and backup.get('status', {}).get('readyToUse'):
break
sleep(3)
else:
raise AssertionError(
f'Timed-out waiting for the backup \'{unique_vm_name}\' to be ready.'
f'Timed-out waiting for the backup \'{unique_backup_name}\' to be ready.'
)

@pytest.mark.dependency(depends=["TestBackupRestoreWithSnapshot::tests_backup_vm"], param=True)
Expand All @@ -669,6 +669,7 @@ def test_with_snapshot_restore_with_new_vm(
pub_key, pri_key = ssh_keypair

vm_snapshot_name = unique_vm_name + '-snapshot'
unique_backup_name = f"{unique_vm_name}-backup-with-snapshot"
# take vm snapshot
code, data = api_client.vm_snapshots.create(unique_vm_name, vm_snapshot_name)
assert 201 == code
Expand Down Expand Up @@ -696,9 +697,9 @@ def test_with_snapshot_restore_with_new_vm(
sh.exec_command('sync')

# Restore VM into new
restored_vm_name = f"{backup_config[0].lower()}-restore-{unique_vm_name}"
restored_vm_name = f"{backup_config[0].lower()}-restore-{unique_vm_name}-with-snapshot"
spec = api_client.backups.RestoreSpec.for_new(restored_vm_name)
code, data = api_client.backups.restore(unique_vm_name, spec)
code, data = api_client.backups.restore(unique_backup_name, spec)
assert 201 == code, (code, data)

# Check VM Started then get IPs (vm and host)
Expand Down Expand Up @@ -764,6 +765,7 @@ def test_with_snapshot_restore_replace_retain_vols(
pub_key, pri_key = ssh_keypair

vm_snapshot_name = unique_vm_name + '-snapshot-retain'
unique_backup_name = f"{unique_vm_name}-backup-with-snapshot"
# take vm snapshot
code, data = api_client.vm_snapshots.create(unique_vm_name, vm_snapshot_name)
assert 201 == code
Expand Down Expand Up @@ -798,7 +800,7 @@ def test_with_snapshot_restore_replace_retain_vols(
)

spec = api_client.backups.RestoreSpec.for_existing(delete_volumes=False)
code, data = api_client.backups.restore(unique_vm_name, spec)
code, data = api_client.backups.restore(unique_backup_name, spec)
assert 201 == code, f'Failed to restore backup with current VM replaced, {data}'

# Check VM Started then get IPs (vm and host)
Expand Down Expand Up @@ -829,6 +831,11 @@ def test_with_snapshot_restore_replace_retain_vols(
f"Executed stderr: {err}"
)

# Check the retain volume still exists
retain_vol_name = f"{unique_vm_name}-disk-0"
code, data = api_client.volumes.get(retain_vol_name)
assert 200 == code, f"Retain volume not exists\n{code}, {data}"


@pytest.mark.skip("https://github.com/harvester/harvester/issues/1473")
@pytest.mark.p0
Expand Down

0 comments on commit 891d8f2

Please sign in to comment.