mirror of
https://github.com/containers/ansible-podman-collections.git
synced 2026-02-04 07:11:49 +00:00
Add unittests
Signed-off-by: Sagi Shnaidman <sshnaidm@redhat.com>
This commit is contained in:
parent
4378ebe114
commit
1c951df027
6 changed files with 706 additions and 31 deletions
86
.github/workflows/test-inventory-examples.yml
vendored
86
.github/workflows/test-inventory-examples.yml
vendored
|
|
@ -2,8 +2,20 @@ name: Test inventory and example playbooks
|
|||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/test-inventory-examples.yml'
|
||||
- 'plugins/inventory/podman_containers.py'
|
||||
- 'plugins/inventory/buildah_containers.py'
|
||||
- 'tests/unit/plugins/inventory/*.py'
|
||||
push:
|
||||
branches: [ main, connections ]
|
||||
paths:
|
||||
- '.github/workflows/test-inventory-examples.yml'
|
||||
- 'plugins/inventory/podman_containers.py'
|
||||
- 'plugins/inventory/buildah_containers.py'
|
||||
- 'tests/unit/plugins/inventory/*.py'
|
||||
branches: [ main ]
|
||||
schedule:
|
||||
- cron: 4 0 * * * # Run daily at 0:03 UTC
|
||||
|
||||
jobs:
|
||||
inventory_test:
|
||||
|
|
@ -221,3 +233,75 @@ jobs:
|
|||
echo "$out" | jq -e '._meta.hostvars["hello-buildah"].ansible_connection == "containers.podman.buildah"'
|
||||
echo "$out" | jq -e '._meta.hostvars["hello-buildah"] | has("buildah_container_id")'
|
||||
echo "$out" | jq -e '._meta.hostvars["hello-buildah"] | has("buildah_container_name")'
|
||||
|
||||
unittests:
|
||||
name: Unit tests inventory
|
||||
runs-on: ${{ matrix.runner-os }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner-os:
|
||||
- ubuntu-24.04
|
||||
# ansible-version:
|
||||
# - git+https://github.com/ansible/ansible.git@stable-2.15
|
||||
runner-python-version:
|
||||
- '3.11'
|
||||
steps:
|
||||
|
||||
- name: Check out ${{ github.repository }} on disk
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ matrix.runner-python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.runner-python-version }}
|
||||
|
||||
- name: Set up pip cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('tests/sanity/requirements.txt') }}-${{ hashFiles('tests/unit/requirements.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
${{ runner.os }}-
|
||||
|
||||
- name: Install requirements for tests
|
||||
run: >-
|
||||
python -m pip install --user -r test-requirements.txt
|
||||
pytest pytest-cov coverage
|
||||
|
||||
- name: Build a collection tarball
|
||||
run: >-
|
||||
~/.local/bin/ansible-galaxy collection build --output-path
|
||||
"${GITHUB_WORKSPACE}/.cache/collection-tarballs"
|
||||
|
||||
- name: Install the collection tarball
|
||||
run: >-
|
||||
~/.local/bin/ansible-galaxy collection install ${GITHUB_WORKSPACE}/.cache/collection-tarballs/*.tar.gz
|
||||
|
||||
- name: Run collection unit tests
|
||||
run: >-
|
||||
~/.local/bin/ansible-test units
|
||||
--python "${{ matrix.runner-python-version }}" -vvv
|
||||
tests/unit/plugins/inventory/
|
||||
working-directory: >-
|
||||
/home/runner/.ansible/collections/ansible_collections/containers/podman
|
||||
|
||||
- name: Run pytest with coverage (inventory only)
|
||||
run: >-
|
||||
~/.local/bin/pytest -vv
|
||||
tests/unit/plugins/inventory/
|
||||
--cov=ansible_collections.containers.podman.plugins.inventory
|
||||
--cov-branch
|
||||
--cov-report=term-missing:skip-covered
|
||||
--cov-report=xml:coverage.xml
|
||||
--cov-report=html:htmlcov
|
||||
working-directory: >-
|
||||
/home/runner/.ansible/collections/ansible_collections/containers/podman
|
||||
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: inventory-coverage
|
||||
path: |
|
||||
/home/runner/.ansible/collections/ansible_collections/containers/podman/coverage.xml
|
||||
/home/runner/.ansible/collections/ansible_collections/containers/podman/htmlcov
|
||||
|
|
|
|||
|
|
@ -1,11 +1,48 @@
|
|||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
|
||||
function setup_venv() {
|
||||
# Create and use a Python venv compatible with ansible-test (3.10/3.11/3.12)
|
||||
for pybin in python3.12 python3.11 python3.10 python3; do
|
||||
if command -v "$pybin" >/dev/null 2>&1; then
|
||||
PYBIN="$pybin"; break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z "${PYBIN:-}" ]]; then
|
||||
echo "No suitable python found (need 3.10/3.11/3.12)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VENV_DIR="${HOME}/.cache/ap-unit-venv-${PYBIN##python}"
|
||||
if [[ ! -d "$VENV_DIR" ]]; then
|
||||
"$PYBIN" -m venv "$VENV_DIR"
|
||||
fi
|
||||
source "$VENV_DIR/bin/activate"
|
||||
python -m pip install --upgrade pip >/dev/null
|
||||
# Install ansible-core which provides ansible-galaxy and ansible-test
|
||||
python -m pip install -U 'ansible-core>=2.16,<2.19' 'pytest>=7' 'pytest-xdist>=3' >/dev/null
|
||||
|
||||
export PATH="${VENV_DIR}/bin:${HOME}/.local/bin:${PATH}"
|
||||
}
|
||||
|
||||
# detect that we are in virtual environment
|
||||
if [[ -z "${VIRTUAL_ENV:-}" ]]; then
|
||||
echo "Setting up virtual environment"
|
||||
setup_venv
|
||||
else
|
||||
echo "Already in virtual environment, skipping setup"
|
||||
fi
|
||||
|
||||
mkdir -p /tmp/ansible-lint-installs
|
||||
mkdir -p /tmp/ansible-lint-collection
|
||||
rm -rf /tmp/ansible-lint-collection/*
|
||||
|
||||
ansible-galaxy collection build --output-path /tmp/ansible-lint-collection --force
|
||||
pushd /tmp/ansible-lint-collection/
|
||||
pushd /tmp/ansible-lint-collection/ >/dev/null
|
||||
ansible-galaxy collection install -vvv --force $(ls /tmp/ansible-lint-collection/) -p /tmp/ansible-lint-installs
|
||||
pushd /tmp/ansible-lint-installs/ansible_collections/containers/podman
|
||||
pushd /tmp/ansible-lint-installs/ansible_collections/containers/podman >/dev/null
|
||||
ansible-test units --python $(python -V | sed "s/Python //g" | awk -F"." {'print $1"."$2'}) -vvv
|
||||
popd
|
||||
popd
|
||||
popd >/dev/null
|
||||
popd >/dev/null
|
||||
|
|
|
|||
|
|
@ -33,10 +33,7 @@ DOCUMENTATION = r"""
|
|||
description: Fully-qualified connection plugin to use for discovered hosts.
|
||||
type: str
|
||||
default: containers.podman.buildah
|
||||
debug:
|
||||
description: Emit extra debug logs during processing.
|
||||
type: bool
|
||||
default: false
|
||||
# Logging uses Ansible verbosity (-v/-vvv). Extra debug option is not required.
|
||||
"""
|
||||
|
||||
EXAMPLES = r"""
|
||||
|
|
@ -71,7 +68,7 @@ class InventoryModule(BaseInventoryPlugin, Cacheable, Constructable):
|
|||
executable = config.get("executable", "buildah")
|
||||
name_patterns = list(config.get("name_patterns", []) or [])
|
||||
connection_plugin = config.get("connection_plugin", "containers.podman.buildah")
|
||||
debug = bool(config.get("debug", False))
|
||||
# Logging is controlled by Ansible verbosity flags
|
||||
|
||||
buildah_path = shutil.which(executable) or executable
|
||||
|
||||
|
|
@ -94,14 +91,12 @@ class InventoryModule(BaseInventoryPlugin, Cacheable, Constructable):
|
|||
# name filtering
|
||||
if name_patterns:
|
||||
if not any(fnmatch.fnmatch(name, pat) or (cid and fnmatch.fnmatch(cid, pat)) for pat in name_patterns):
|
||||
if debug:
|
||||
self.display.vvvv(f"Filtered out {name or cid} by name_patterns option")
|
||||
self.display.vvvv(f"Filtered out {name or cid} by name_patterns option")
|
||||
continue
|
||||
|
||||
host = name or cid
|
||||
if not host:
|
||||
if debug:
|
||||
self.display.vvvv(f"Filtered out {name or cid} by no name or cid")
|
||||
self.display.vvvv(f"Filtered out {name or cid} by no name or cid")
|
||||
continue
|
||||
|
||||
self.inventory.add_host(host)
|
||||
|
|
|
|||
|
|
@ -72,10 +72,7 @@ DOCUMENTATION = r"""
|
|||
description: Include/exclude selection by attributes - C(name), C(id), C(image), C(status), or C(label.<key>).
|
||||
type: dict
|
||||
default: {}
|
||||
debug:
|
||||
description: Emit extra debug logs during processing.
|
||||
type: bool
|
||||
default: false
|
||||
# Logging uses Ansible verbosity (-v/-vvv). Extra debug option is not required.
|
||||
"""
|
||||
|
||||
EXAMPLES = r"""
|
||||
|
|
@ -123,7 +120,7 @@ class InventoryModule(BaseInventoryPlugin, Cacheable, Constructable):
|
|||
keyed_groups = list(config.get("keyed_groups", []) or [])
|
||||
composed_groups = dict(config.get("groups", {}) or {})
|
||||
filters = dict(config.get("filters", {}) or {})
|
||||
debug = bool(config.get("debug", False))
|
||||
# Logging is controlled by Ansible verbosity flags
|
||||
|
||||
podman_path = shutil.which(executable) or executable
|
||||
|
||||
|
|
@ -179,15 +176,13 @@ class InventoryModule(BaseInventoryPlugin, Cacheable, Constructable):
|
|||
# name filtering
|
||||
if name_patterns:
|
||||
if not any(fnmatch.fnmatch(name, pat) or (cid and fnmatch.fnmatch(cid, pat)) for pat in name_patterns):
|
||||
if debug:
|
||||
self.display.vvvv(f"Filtered out {name or cid} by name_patterns option")
|
||||
self.display.vvvv(f"Filtered out {name or cid} by name_patterns option")
|
||||
continue
|
||||
|
||||
# label filtering
|
||||
labels = c.get("Labels") or {}
|
||||
if any(labels.get(k) != v for k, v in label_selectors.items()):
|
||||
if debug:
|
||||
self.display.vvvv(f"Filtered out {name or cid} by label_selectors option")
|
||||
self.display.vvvv(f"Filtered out {name or cid} by label_selectors option")
|
||||
continue
|
||||
|
||||
image = c.get("Image") or c.get("ImageName")
|
||||
|
|
@ -195,14 +190,12 @@ class InventoryModule(BaseInventoryPlugin, Cacheable, Constructable):
|
|||
|
||||
# additional include/exclude filters
|
||||
if filters and not matches_filters(name, cid, image, status, labels):
|
||||
if debug:
|
||||
self.display.vvvv(f"Filtered out {name or cid} by filters option")
|
||||
self.display.vvvv(f"Filtered out {name or cid} by filters option")
|
||||
continue
|
||||
|
||||
host = name or cid
|
||||
if not host:
|
||||
if debug:
|
||||
self.display.vvvv(f"Filtered out {name or cid} by no name or cid")
|
||||
self.display.vvvv(f"Filtered out {name or cid} by no name or cid")
|
||||
continue
|
||||
|
||||
self.inventory.add_host(host)
|
||||
|
|
@ -251,8 +244,7 @@ class InventoryModule(BaseInventoryPlugin, Cacheable, Constructable):
|
|||
try:
|
||||
self._add_host_to_keyed_groups(keyed_groups, hostvars, host)
|
||||
except Exception as _e:
|
||||
if debug:
|
||||
self.display.vvvv(f"_add_host_to_keyed_groups helper failed: {_e}")
|
||||
self.display.vvvv(f"_add_host_to_keyed_groups helper failed: {_e}")
|
||||
# Always run manual keyed grouping to support dotted keys like labels.role
|
||||
for kg in keyed_groups:
|
||||
key_expr = kg.get("key")
|
||||
|
|
@ -307,5 +299,4 @@ class InventoryModule(BaseInventoryPlugin, Cacheable, Constructable):
|
|||
except Exception as exc:
|
||||
if strict:
|
||||
raise
|
||||
if debug:
|
||||
self.display.vvvv(f"Grouping error for host {host}: {exc}")
|
||||
self.display.vvvv(f"Grouping error for host {host}: {exc}")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,116 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
class FakeInventory:
|
||||
def __init__(self):
|
||||
self.hostvars = {}
|
||||
self.groups = {}
|
||||
|
||||
def add_group(self, name):
|
||||
self.groups.setdefault(name, {"hosts": [], "children": []})
|
||||
|
||||
def add_host(self, host, group=None):
|
||||
self.hostvars.setdefault(host, {})
|
||||
if group:
|
||||
self.add_group(group)
|
||||
if host not in self.groups[group]["hosts"]:
|
||||
self.groups[group]["hosts"].append(host)
|
||||
else:
|
||||
self.add_group("ungrouped")
|
||||
if host not in self.groups["ungrouped"]["hosts"]:
|
||||
self.groups["ungrouped"]["hosts"].append(host)
|
||||
|
||||
def set_variable(self, host, var, value):
|
||||
self.hostvars.setdefault(host, {})
|
||||
self.hostvars[host][var] = value
|
||||
|
||||
|
||||
def build_containers_json(entries):
|
||||
return json.dumps(entries).encode("utf-8")
|
||||
|
||||
|
||||
@patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.buildah_containers.shutil.which", return_value="buildah"
|
||||
)
|
||||
def test_basic_buildah_inventory(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.buildah_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
containers = [
|
||||
{"name": "w1", "id": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "containername": "w1"},
|
||||
{"containername": "build/with/slash", "containerid": "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"},
|
||||
{"id": "cccccccccccccccccccccccccccccccc"}, # no name
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.buildah_containers.subprocess.check_output",
|
||||
return_value=build_containers_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {"connection_plugin": "containers.podman.buildah"}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
|
||||
# Names resolved
|
||||
assert "w1" in inv.hostvars
|
||||
assert "build/with/slash" in inv.hostvars
|
||||
# Unnamed container present (either as short id or full id depending on plugin behavior)
|
||||
unnamed_id = "cccccccccccccccccccccccccccccccc"
|
||||
assert (unnamed_id in inv.hostvars) or (unnamed_id[:12] in inv.hostvars)
|
||||
# Hostvars contain id/name
|
||||
assert inv.hostvars["w1"]["buildah_container_id"].startswith("a")
|
||||
assert inv.hostvars["w1"]["buildah_container_name"] == "w1"
|
||||
|
||||
|
||||
@patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.buildah_containers.shutil.which", return_value="buildah"
|
||||
)
|
||||
def test_name_patterns_filtering_buildah(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.buildah_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
containers = [
|
||||
{"name": "alpha", "id": "id1"},
|
||||
{"name": "beta", "id": "id2"},
|
||||
{"name": "gamma", "id": "id3"},
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.buildah_containers.subprocess.check_output",
|
||||
return_value=build_containers_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {"name_patterns": ["b*", "id3"], "connection_plugin": "containers.podman.buildah"}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
|
||||
# Should include beta by name pattern, and gamma via id pattern
|
||||
assert set(inv.hostvars.keys()) == {"beta", "gamma"}
|
||||
|
||||
|
||||
def test_verify_inventory_file_helper():
|
||||
from ansible_collections.containers.podman.plugins.module_utils.inventory.utils import (
|
||||
verify_inventory_file,
|
||||
)
|
||||
|
||||
class Dummy:
|
||||
NAME = "containers.podman.buildah_containers"
|
||||
|
||||
# wrong extension
|
||||
assert not verify_inventory_file(Dummy(), "inv.txt")
|
||||
# missing plugin header
|
||||
p = "/tmp/test_inv.yml"
|
||||
with open(p, "w", encoding="utf-8") as f:
|
||||
f.write("foo: bar\n")
|
||||
assert not verify_inventory_file(Dummy(), p)
|
||||
# correct header
|
||||
with open(p, "w", encoding="utf-8") as f:
|
||||
f.write("plugin: containers.podman.buildah_containers\n")
|
||||
assert verify_inventory_file(Dummy(), p)
|
||||
452
tests/unit/plugins/inventory/test_podman_containers_inventory.py
Normal file
452
tests/unit/plugins/inventory/test_podman_containers_inventory.py
Normal file
|
|
@ -0,0 +1,452 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible.errors import AnsibleParserError
|
||||
|
||||
|
||||
class FakeInventory:
|
||||
def __init__(self):
|
||||
self.hostvars = {}
|
||||
self.groups = {}
|
||||
|
||||
def add_group(self, name):
|
||||
self.groups.setdefault(name, {"hosts": [], "children": []})
|
||||
|
||||
def add_child(self, parent, child):
|
||||
self.add_group(parent)
|
||||
self.add_group(child)
|
||||
if child not in self.groups[parent]["children"]:
|
||||
self.groups[parent]["children"].append(child)
|
||||
|
||||
def add_host(self, host, group=None):
|
||||
self.hostvars.setdefault(host, {})
|
||||
if group:
|
||||
self.add_group(group)
|
||||
if host not in self.groups[group]["hosts"]:
|
||||
self.groups[group]["hosts"].append(host)
|
||||
else:
|
||||
self.add_group("ungrouped")
|
||||
if host not in self.groups["ungrouped"]["hosts"]:
|
||||
self.groups["ungrouped"]["hosts"].append(host)
|
||||
|
||||
def set_variable(self, host, var, value):
|
||||
self.hostvars.setdefault(host, {})
|
||||
self.hostvars[host][var] = value
|
||||
|
||||
|
||||
def build_ps_json(entries):
|
||||
return json.dumps(entries).encode("utf-8")
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_basic_discovery_and_hostvars(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
containers = [
|
||||
{
|
||||
"Names": ["app-1"],
|
||||
"Id": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
|
||||
"Image": "docker.io/library/alpine:latest",
|
||||
"Status": "Up 1 second",
|
||||
"Labels": {"env": "dev", "role": "api"},
|
||||
},
|
||||
{
|
||||
"Names": ["db/primary"],
|
||||
"ID": "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
|
||||
"ImageName": "quay.io/ns/repo-name:1.0",
|
||||
"State": "Exited (0) 2 seconds ago",
|
||||
"Labels": {},
|
||||
},
|
||||
]
|
||||
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
# Feed config directly
|
||||
with patch.object(
|
||||
mod,
|
||||
"_read_config_data",
|
||||
return_value={
|
||||
"executable": "podman",
|
||||
"include_stopped": True,
|
||||
"connection_plugin": "containers.podman.podman",
|
||||
"group_by_image": True,
|
||||
"group_by_label": ["env"],
|
||||
"verbose_output": True,
|
||||
},
|
||||
):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
|
||||
# Hosts discovered
|
||||
assert "app-1" in inv.hostvars
|
||||
assert "db/primary" in inv.hostvars
|
||||
# Hostvars set - image, id, status keys
|
||||
assert inv.hostvars["app-1"]["podman_image"] == "docker.io/library/alpine:latest"
|
||||
assert inv.hostvars["db/primary"]["podman_image"] == "quay.io/ns/repo-name:1.0"
|
||||
assert inv.hostvars["app-1"]["podman_container_id"].startswith("a")
|
||||
assert inv.hostvars["db/primary"]["podman_container_id"].startswith("b")
|
||||
assert inv.hostvars["app-1"]["podman_status"].lower().startswith("up")
|
||||
assert inv.hostvars["db/primary"]["podman_status"].lower().startswith("exited")
|
||||
# Verbose output included
|
||||
assert "podman_ps" in inv.hostvars["app-1"]
|
||||
# Image grouping sanitized
|
||||
assert "image_docker.io_library_alpine_latest" in inv.groups
|
||||
assert "image_quay.io_ns_repo_name_1.0" in inv.groups
|
||||
# Label grouping
|
||||
assert "label_env_dev" in inv.groups
|
||||
assert "app-1" in inv.groups["label_env_dev"]["hosts"]
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_name_patterns_and_label_selectors(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
containers = [
|
||||
{"Names": ["one"], "Id": "id1", "Image": "alpine:latest", "Status": "Up", "Labels": {}},
|
||||
{"Names": ["two"], "Id": "id2", "Image": "alpine:latest", "Status": "Up", "Labels": {"role": "api"}},
|
||||
{"Names": ["three"], "Id": "id3", "Image": "alpine:latest", "Status": "Up", "Labels": {"role": "db"}},
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {
|
||||
"name_patterns": ["t*"],
|
||||
"label_selectors": {"role": "api"},
|
||||
}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
|
||||
# Only 'two' matches both name pattern and label
|
||||
assert list(inv.hostvars.keys()) == ["two"]
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_filters_include_exclude_and_status(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
containers = [
|
||||
{"Names": ["run-a"], "Id": "r1", "Image": "quay.io/ns/a:latest", "Status": "Up", "Labels": {}},
|
||||
{"Names": ["stop-b"], "Id": "s1", "Image": "quay.io/ns/b:latest", "Status": "Exited (0)", "Labels": {}},
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {
|
||||
"include_stopped": True,
|
||||
"filters": {"include": {"image": "quay.io/*"}, "exclude": {"status": "exited*"}},
|
||||
}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
|
||||
# Stopped excluded, running included
|
||||
assert "run-a" in inv.hostvars
|
||||
assert "stop-b" not in inv.hostvars
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_keyed_groups_and_parent_group(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
containers = [
|
||||
{"Names": ["svc"], "Id": "x1", "Image": "img", "Status": "Up", "Labels": {"role": "api"}},
|
||||
{"Names": ["svc2"], "Id": "x2", "Image": "img", "Status": "Up", "Labels": {}},
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {
|
||||
"keyed_groups": [
|
||||
{"key": "labels.role", "prefix": "k", "separator": "-", "parent_group": "keyed"},
|
||||
{"key": "labels.missing", "prefix": "missing", "default_value": "unknown"},
|
||||
]
|
||||
}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
|
||||
assert "k_api" in inv.groups # sanitized hyphen -> underscore
|
||||
assert "svc" in inv.groups["k_api"]["hosts"]
|
||||
assert "keyed" in inv.groups
|
||||
assert "k_api" in inv.groups["keyed"]["children"]
|
||||
assert "missing_unknown" in inv.groups
|
||||
assert set(inv.groups["missing_unknown"]["hosts"]) == {"svc", "svc2"}
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_strict_missing_key_raises(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
containers = [
|
||||
{"Names": ["h"], "Id": "id", "Image": "img", "Status": "Up", "Labels": {}},
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {"strict": True, "keyed_groups": [{"key": "labels.nonexistent"}]}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
with pytest.raises(AnsibleParserError):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_include_stopped_toggles_args(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
def fake_co_with_a(args, stderr=None):
|
||||
# ensure -a present when include_stopped true
|
||||
assert "-a" in args
|
||||
return build_ps_json([])
|
||||
|
||||
def fake_co_without_a(args, stderr=None):
|
||||
# ensure -a absent when include_stopped false
|
||||
assert "-a" not in args
|
||||
return build_ps_json([])
|
||||
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
side_effect=fake_co_without_a,
|
||||
):
|
||||
with patch.object(mod, "_read_config_data", return_value={"include_stopped": False}):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
|
||||
inv2 = FakeInventory()
|
||||
mod2 = InventoryModule()
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
side_effect=fake_co_with_a,
|
||||
):
|
||||
with patch.object(mod2, "_read_config_data", return_value={"include_stopped": True}):
|
||||
mod2.parse(inv2, loader=None, path="dummy.yml", cache=False)
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_debug_paths_and_no_host(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
# One container with no name and no id to hit host==None path
|
||||
containers = [{}]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {"filters": {"include": {"name": "nomatch"}}}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
# Nothing added
|
||||
assert inv.hostvars == {}
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_check_output_exception_path(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
side_effect=RuntimeError("boom"),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
with patch.object(mod, "_read_config_data", return_value={}):
|
||||
with pytest.raises(AnsibleParserError):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_filter_include_only_and_label_match(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
containers = [
|
||||
{"Names": ["svc"], "Id": "x1", "Image": "reg/ns/app:1", "Status": "Up", "Labels": {"tier": "be"}},
|
||||
{"Names": ["svc2"], "Id": "x2", "Image": "reg/ns/oth:1", "Status": "Up", "Labels": {"tier": "fe"}},
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {"filters": {"include": {"label.tier": "be", "image": "reg/*"}}}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
assert list(inv.hostvars.keys()) == ["svc"]
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_group_by_image_and_label_skip_branches(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
# One without Image to skip image grouping, and one without target label for label grouping
|
||||
containers = [
|
||||
{"Names": ["nolbl"], "Id": "y1", "Status": "Up", "Labels": {}},
|
||||
{"Names": ["haslbl"], "Id": "y2", "Image": "img", "Status": "Up", "Labels": {"other": "x"}},
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {"group_by_image": True, "group_by_label": ["tier"]}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
# Image "img" should group when present; this asserts grouping executes while label grouping is skipped
|
||||
assert "image_img" in inv.groups
|
||||
assert "label_tier_x" not in inv.groups
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_keyed_groups_leading_trailing_separators(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
containers = [
|
||||
{"Names": ["svc"], "Id": "x1", "Image": "img", "Status": "Up", "Labels": {"num": 7}},
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {
|
||||
"keyed_groups": [
|
||||
{"key": "labels.num", "prefix": "p", "separator": "-", "leading_separator": True, "trailing_separator": True}
|
||||
]
|
||||
}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
# Expect group name sanitized; verify host assignment in some group containing 'p' and '7'
|
||||
assert any(("p" in g and "7" in g and "svc" in inv.groups[g]["hosts"]) for g in inv.groups)
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_filters_include_by_id_only(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
containers = [
|
||||
{"Names": ["first"], "Id": "idaaa", "Image": "img1", "Status": "Up", "Labels": {}},
|
||||
{"Names": ["second"], "Id": "idbbb", "Image": "img2", "Status": "Up", "Labels": {}},
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {"filters": {"include": {"id": "ida*"}}}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
assert list(inv.hostvars.keys()) == ["first"]
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_name_falls_back_to_short_id_when_no_names(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
long_id = "1234567890abcdef1234567890abcdef"
|
||||
containers = [
|
||||
{"Id": long_id, "Image": "img", "Status": "Up", "Labels": {}},
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
with patch.object(mod, "_read_config_data", return_value={}):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
# Host should be short id
|
||||
assert long_id[:12] in inv.hostvars
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_filters_unknown_key_path(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
containers = [
|
||||
{"Names": ["x"], "Id": "idx", "Image": "img", "Status": "Up", "Labels": {}},
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {"filters": {"include": {"unknown": "val"}}}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
# Include with unknown key should exclude host
|
||||
assert inv.hostvars == {}
|
||||
|
||||
|
||||
@patch("ansible_collections.containers.podman.plugins.inventory.podman_containers.shutil.which", return_value="podman")
|
||||
def test_include_rules_status_only(mock_which):
|
||||
from ansible_collections.containers.podman.plugins.inventory.podman_containers import (
|
||||
InventoryModule,
|
||||
)
|
||||
|
||||
containers = [
|
||||
{"Names": ["run"], "Id": "r1", "Image": "img1", "Status": "Up 2s", "Labels": {}},
|
||||
{"Names": ["stop"], "Id": "s1", "Image": "img2", "Status": "Exited (0)", "Labels": {}},
|
||||
]
|
||||
with patch(
|
||||
"ansible_collections.containers.podman.plugins.inventory.podman_containers.subprocess.check_output",
|
||||
return_value=build_ps_json(containers),
|
||||
):
|
||||
inv = FakeInventory()
|
||||
mod = InventoryModule()
|
||||
cfg = {"include_stopped": True, "filters": {"include": {"status": "up*"}}}
|
||||
with patch.object(mod, "_read_config_data", return_value=cfg):
|
||||
mod.parse(inv, loader=None, path="dummy.yml", cache=False)
|
||||
assert list(inv.hostvars.keys()) == ["run"]
|
||||
Loading…
Add table
Add a link
Reference in a new issue