Skip to content

Commit

Permalink
Bind/Overlay device fix (#87)
Browse files Browse the repository at this point in the history
* Update assertion to allow for 1 or more results in instances where overlay filesystems are attached to the chosen device

* Updates to support hosts with overlay/bind mounts

* Restore 84-prevent-snapshot-if-newest-installed-kernel-not-in-use.yml

* Remove rule disable
  • Loading branch information
jchristianh authored Jan 10, 2025
1 parent c3df572 commit 2050852
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 16 deletions.
3 changes: 3 additions & 0 deletions changelogs/fragments/87-check-device-fix.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
minor_changes:
- Updates to support hosts with bind/overlay mounts attached
to the device intended to be operated on.
16 changes: 8 additions & 8 deletions roles/shrink_lv/tasks/check_device.yaml
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
- name: Get the mount point info
ansible.builtin.set_fact:
shrink_lv_mount_info: "{{ ansible_facts.mounts | selectattr('device', 'equalto', item.device) }}"
shrink_lv_mount_info: "{{ ansible_facts['mounts'] | selectattr('device', 'equalto', item['device']) | first }}"

- name: Assert that the mount point exists
ansible.builtin.assert:
that: (shrink_lv_mount_info | length) == 1
fail_msg: "Mount point {{ item.device }} does not exist"
that: shrink_lv_mount_info['device'] is defined
fail_msg: "Mount point {{ item['device'] }} does not exist"

- name: Assert that the filesystem is supported
ansible.builtin.assert:
that: shrink_lv_mount_info[0].fstype in ['ext4']
fail_msg: "Unsupported filesystem '{{ shrink_lv_mount_info[0].fstype }}' on '{{ item.device }}'"
that: shrink_lv_mount_info['fstype'] in ['ext4']
fail_msg: "Unsupported filesystem '{{ shrink_lv_mount_info['fstype'] }}' on '{{ item['device'] }}'"

- name: Assert that the filesystem has enough free space
ansible.builtin.assert:
that: shrink_lv_mount_info[0].block_size * shrink_lv_mount_info[0].block_used < (item.size | ansible.builtin.human_to_bytes)
that: shrink_lv_mount_info['block_size'] * shrink_lv_mount_info['block_used'] < (item['size'] | ansible.builtin.human_to_bytes)
fail_msg: >
Requested size {{ item.size }} is smaller than currently used
{{ (shrink_lv_mount_info[0].block_size * shrink_lv_mount_info[0].block_used) | ansible.builtin.human_readable }}
Requested size {{ item['size'] }} is smaller than currently used
{{ (shrink_lv_mount_info['block_size'] * shrink_lv_mount_info['block_used']) | ansible.builtin.human_readable }}
12 changes: 12 additions & 0 deletions roles/shrink_lv/tasks/check_if_shrunk.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
---
- name: Set device for mount
ansible.builtin.set_fact:
shrink_lv_set_device: "{{ ansible_facts['mounts'] | selectattr('device', 'equalto', item['device']) | first }}"

- name: Assert that the filesystem has shrunk
ansible.builtin.assert:
that: (shrink_lv_set_device['size_total'] | int) <= (item['size'] | ansible.builtin.human_to_bytes)
fail_msg: >
Logical Volume {{ item['device'] }} was NOT shrunk as requested.
success_msg: >
Logical Volume {{ item['device'] }} has been shrunk as requested.
10 changes: 2 additions & 8 deletions roles/shrink_lv/tasks/main.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,6 @@
- "!min"
- mounts

- name: Assert that the filesystem has shrunk
ansible.builtin.assert:
# yamllint disable-line rule:line-length
that: (ansible_facts.mounts | selectattr('device', 'equalto', item.device) | map(attribute='size_total') | join | int) <= (item.size | ansible.builtin.human_to_bytes)
fail_msg: >
Logical Volume {{ item.device }} was not shrunk to {{ item.size }} as requested
success_msg: >
Logical Volume {{ item.device }} has been shrunk to {{ item.size }} as requested.
- name: Check if device has shrunken successfully
ansible.builtin.include_tasks: check_if_shrunk.yml
loop: "{{ shrink_lv_devices }}"

0 comments on commit 2050852

Please sign in to comment.