1
0
Fork 0
mirror of https://github.com/ansible-collections/community.general.git synced 2026-02-04 07:51:50 +00:00

[PR #11095/2b4333a0 backport][stable-12] Use raise from in plugins (#11129)

Use raise from in plugins (#11095)

* Use raise from.

* Add changelog fragment.

(cherry picked from commit 2b4333a033)

Co-authored-by: Felix Fontein <felix@fontein.de>
This commit is contained in:
patchback[bot] 2025-11-12 21:00:39 +01:00 committed by GitHub
parent cddb570e0e
commit cc93dab0fd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
46 changed files with 218 additions and 165 deletions

View file

@ -0,0 +1,45 @@
minor_changes:
- shutdown action plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- redis cache plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- chroot connection plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- jail connection plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- lxc connection plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- lxd connection plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- wsl connection plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- zone connection plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- counter filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- from_csv filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- from_ini filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- hashids filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- jc filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- json_query filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- to_ini filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- gitlab_runners inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- icinga2 inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- linode inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- lxd inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- nmap inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- online inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- opennebula inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- scaleway inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- virtualbox inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- binary_file lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- chef_databag lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- collection_version lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- consul_kv lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- credstash lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- cyberarkpassword lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- dependent lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- dig lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- dnstxt lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- dsv lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- etcd3 lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- github_app_access_token lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- lmdb_kv lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- onepassword lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- passwordstore lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- redis lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- revbitspss lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- shelvefile lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- tss lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
- keys_filter plugin_utils plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).

View file

@ -96,7 +96,7 @@ class ActionModule(ActionBase):
display.debug(f"{self._task.action}: distribution: {distribution}")
return distribution
except KeyError as ke:
raise AnsibleError(f'Failed to get distribution information. Missing "{ke.args[0]}" in output.')
raise AnsibleError(f'Failed to get distribution information. Missing "{ke.args[0]}" in output.') from ke
def get_shutdown_command(self, task_vars, distribution):
def find_command(command, find_search_paths):
@ -124,10 +124,10 @@ class ActionModule(ActionBase):
incorrect_type = any(not is_string(x) for x in search_paths)
if not isinstance(search_paths, list) or incorrect_type:
raise TypeError
except TypeError:
except TypeError as e:
# Error if we didn't get a list
err_msg = f"'search_paths' must be a string or flat list of strings, got {search_paths}"
raise AnsibleError(err_msg)
raise AnsibleError(err_msg) from e
full_path = find_command(shutdown_bin, search_paths) # find the path to the shutdown command
if not full_path: # if we could not find the shutdown command

View file

@ -147,8 +147,10 @@ class CacheModule(BaseCacheModule):
"""
try:
from redis.sentinel import Sentinel
except ImportError:
raise AnsibleError("The 'redis' python module (version 2.9.0 or newer) is required to use redis sentinel.")
except ImportError as e:
raise AnsibleError(
"The 'redis' python module (version 2.9.0 or newer) is required to use redis sentinel."
) from e
if ";" not in uri:
raise AnsibleError("_uri does not have sentinel syntax.")
@ -170,7 +172,7 @@ class CacheModule(BaseCacheModule):
try:
return scon.master_for(self._sentinel_service_name, socket_timeout=0.2)
except Exception as exc:
raise AnsibleError(f"Could not connect to redis sentinel: {exc}")
raise AnsibleError(f"Could not connect to redis sentinel: {exc}") from exc
def _make_key(self, key):
return self._prefix + key

View file

@ -129,7 +129,7 @@ class Connection(ConnectionBase):
try:
self.chroot_cmd = get_bin_path(self.get_option("chroot_exe"))
except ValueError as e:
raise AnsibleError(str(e))
raise AnsibleError(str(e)) from e
super()._connect()
if not self._connected:
@ -191,17 +191,17 @@ class Connection(ConnectionBase):
count = ""
try:
p = self._buffered_exec_command(f"dd of={out_path} bs={BUFSIZE}{count}", stdin=in_file)
except OSError:
raise AnsibleError("chroot connection requires dd command in the chroot")
except OSError as e:
raise AnsibleError("chroot connection requires dd command in the chroot") from e
try:
stdout, stderr = p.communicate()
except Exception:
except Exception as e:
traceback.print_exc()
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
if p.returncode != 0:
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
except IOError:
raise AnsibleError(f"file or module does not exist at: {in_path}")
except IOError as e:
raise AnsibleError(f"file or module does not exist at: {in_path}") from e
def fetch_file(self, in_path, out_path):
"""fetch a file from chroot to local"""
@ -211,8 +211,8 @@ class Connection(ConnectionBase):
in_path = shlex_quote(self._prefix_login_path(in_path))
try:
p = self._buffered_exec_command(f"dd if={in_path} bs={BUFSIZE}")
except OSError:
raise AnsibleError("chroot connection requires dd command in the chroot")
except OSError as e:
raise AnsibleError("chroot connection requires dd command in the chroot") from e
with open(to_bytes(out_path, errors="surrogate_or_strict"), "wb+") as out_file:
try:
@ -220,9 +220,9 @@ class Connection(ConnectionBase):
while chunk:
out_file.write(chunk)
chunk = p.stdout.read(BUFSIZE)
except Exception:
except Exception as e:
traceback.print_exc()
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
stdout, stderr = p.communicate()
if p.returncode != 0:
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")

View file

@ -79,8 +79,8 @@ class Connection(ConnectionBase):
def _search_executable(executable):
try:
return get_bin_path(executable)
except ValueError:
raise AnsibleError(f"{executable} command not found in PATH")
except ValueError as e:
raise AnsibleError(f"{executable} command not found in PATH") from e
def list_jails(self):
p = subprocess.Popen(
@ -161,19 +161,19 @@ class Connection(ConnectionBase):
count = ""
try:
p = self._buffered_exec_command(f"dd of={out_path} bs={BUFSIZE}{count}", stdin=in_file)
except OSError:
raise AnsibleError("jail connection requires dd command in the jail")
except OSError as e:
raise AnsibleError("jail connection requires dd command in the jail") from e
try:
stdout, stderr = p.communicate()
except Exception:
except Exception as e:
traceback.print_exc()
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
if p.returncode != 0:
raise AnsibleError(
f"failed to transfer file {in_path} to {out_path}:\n{to_native(stdout)}\n{to_native(stderr)}"
)
except IOError:
raise AnsibleError(f"file or module does not exist at: {in_path}")
except IOError as e:
raise AnsibleError(f"file or module does not exist at: {in_path}") from e
def fetch_file(self, in_path, out_path):
"""fetch a file from jail to local"""
@ -183,8 +183,8 @@ class Connection(ConnectionBase):
in_path = shlex_quote(self._prefix_login_path(in_path))
try:
p = self._buffered_exec_command(f"dd if={in_path} bs={BUFSIZE}")
except OSError:
raise AnsibleError("jail connection requires dd command in the jail")
except OSError as e:
raise AnsibleError("jail connection requires dd command in the jail") from e
with open(to_bytes(out_path, errors="surrogate_or_strict"), "wb+") as out_file:
try:
@ -192,9 +192,9 @@ class Connection(ConnectionBase):
while chunk:
out_file.write(chunk)
chunk = p.stdout.read(BUFSIZE)
except Exception:
except Exception as e:
traceback.print_exc()
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
stdout, stderr = p.communicate()
if p.returncode != 0:
raise AnsibleError(

View file

@ -173,9 +173,9 @@ class Connection(ConnectionBase):
raise errors.AnsibleFileNotFound(msg)
try:
src_file = open(in_path, "rb")
except IOError:
except IOError as e:
traceback.print_exc()
raise errors.AnsibleError(f"failed to open input file to {in_path}")
raise errors.AnsibleError(f"failed to open input file to {in_path}") from e
try:
def write_file(args):
@ -184,10 +184,10 @@ class Connection(ConnectionBase):
try:
self.container.attach_wait(write_file, None)
except IOError:
except IOError as e:
traceback.print_exc()
msg = f"failed to transfer file to {out_path}"
raise errors.AnsibleError(msg)
raise errors.AnsibleError(msg) from e
finally:
src_file.close()
@ -200,10 +200,10 @@ class Connection(ConnectionBase):
try:
dst_file = open(out_path, "wb")
except IOError:
except IOError as e:
traceback.print_exc()
msg = f"failed to open output file {out_path}"
raise errors.AnsibleError(msg)
raise errors.AnsibleError(msg) from e
try:
def write_file(args):
@ -217,10 +217,10 @@ class Connection(ConnectionBase):
try:
self.container.attach_wait(write_file, None)
except IOError:
except IOError as e:
traceback.print_exc()
msg = f"failed to transfer file from {in_path} to {out_path}"
raise errors.AnsibleError(msg)
raise errors.AnsibleError(msg) from e
finally:
dst_file.close()

View file

@ -93,8 +93,8 @@ class Connection(ConnectionBase):
try:
self._lxc_cmd = get_bin_path("lxc")
except ValueError:
raise AnsibleError("lxc command not found in PATH")
except ValueError as e:
raise AnsibleError("lxc command not found in PATH") from e
def _host(self):
"""translate remote_addr to lxd (short) hostname"""

View file

@ -478,11 +478,11 @@ class Connection(ConnectionBase):
except IOError:
pass # file was not found, but not required to function
except paramiko.hostkeys.InvalidHostKey as e:
raise AnsibleConnectionFailure(f"Invalid host key: {to_text(e.line)}")
raise AnsibleConnectionFailure(f"Invalid host key: {to_text(e.line)}") from e
try:
ssh.load_system_host_keys()
except paramiko.hostkeys.InvalidHostKey as e:
raise AnsibleConnectionFailure(f"Invalid host key: {to_text(e.line)}")
raise AnsibleConnectionFailure(f"Invalid host key: {to_text(e.line)}") from e
ssh_connect_kwargs = self._parse_proxy_command(port)
ssh.set_missing_host_key_policy(MyAddPolicy(self))
@ -518,22 +518,24 @@ class Connection(ConnectionBase):
**ssh_connect_kwargs,
)
except paramiko.ssh_exception.BadHostKeyException as e:
raise AnsibleConnectionFailure(f"host key mismatch for {to_text(e.hostname)}")
raise AnsibleConnectionFailure(f"host key mismatch for {to_text(e.hostname)}") from e
except paramiko.ssh_exception.AuthenticationException as e:
msg = f"Failed to authenticate: {e}"
raise AnsibleAuthenticationFailure(msg)
raise AnsibleAuthenticationFailure(msg) from e
except Exception as e:
msg = to_text(e)
if "PID check failed" in msg:
raise AnsibleError("paramiko version issue, please upgrade paramiko on the machine running ansible")
raise AnsibleError(
"paramiko version issue, please upgrade paramiko on the machine running ansible"
) from e
elif "Private key file is encrypted" in msg:
msg = (
f"ssh {self.get_option('remote_user')}@{self.get_options('remote_addr')}:{port} : "
f"{msg}\nTo connect as a different user, use -u <username>."
)
raise AnsibleConnectionFailure(msg)
raise AnsibleConnectionFailure(msg) from e
else:
raise AnsibleConnectionFailure(msg)
raise AnsibleConnectionFailure(msg) from e
self.ssh = ssh
self._connected = True
return self
@ -609,7 +611,7 @@ class Connection(ConnectionBase):
msg = "Failed to open session"
if text_e:
msg += f": {text_e}"
raise AnsibleConnectionFailure(to_native(msg))
raise AnsibleConnectionFailure(to_native(msg)) from e
display.vvv(f"EXEC {cmd}", host=self.get_option("remote_addr"))
@ -665,8 +667,8 @@ class Connection(ConnectionBase):
elif in_data == b"":
chan.shutdown_write()
except socket.timeout:
raise AnsibleError(f"ssh timed out waiting for privilege escalation.\n{to_text(become_output)}")
except socket.timeout as e:
raise AnsibleError(f"ssh timed out waiting for privilege escalation.\n{to_text(become_output)}") from e
stdout = b"".join(chan.makefile("rb", bufsize))
stderr = b"".join(chan.makefile_stderr("rb", bufsize))
@ -699,7 +701,7 @@ class Connection(ConnectionBase):
)
raise AnsibleError(f"{to_text(stdout)}\n{to_text(stderr)}")
except Exception as e:
raise AnsibleError(f"error occurred while putting file from {in_path} to {out_path}!\n{to_text(e)}")
raise AnsibleError(f"error occurred while putting file from {in_path} to {out_path}!\n{to_text(e)}") from e
def fetch_file(self, in_path: str, out_path: str) -> None:
"""save a remote file to the specified path"""
@ -718,7 +720,7 @@ class Connection(ConnectionBase):
with open(out_path, "wb") as f:
f.write(stdout)
except Exception as e:
raise AnsibleError(f"error occurred while fetching file from {in_path} to {out_path}!\n{to_text(e)}")
raise AnsibleError(f"error occurred while fetching file from {in_path} to {out_path}!\n{to_text(e)}") from e
def reset(self) -> None:
"""reset the connection"""
@ -772,16 +774,16 @@ class Connection(ConnectionBase):
self._save_ssh_host_keys(tmp_keyfile_name)
os.rename(tmp_keyfile_name, self.keyfile)
except LockTimeout:
except LockTimeout as e:
raise AnsibleError(
f"writing lock file for {self.keyfile} ran in to the timeout of {self.get_option('lock_file_timeout')}s"
)
) from e
except paramiko.hostkeys.InvalidHostKey as e:
raise AnsibleConnectionFailure(f"Invalid host key: {e.line}")
raise AnsibleConnectionFailure(f"Invalid host key: {e.line}") from e
except Exception as e:
# unable to save keys, including scenario when key was invalid
# and caught earlier
raise AnsibleError(f"error occurred while writing SSH host keys!\n{to_text(e)}")
raise AnsibleError(f"error occurred while writing SSH host keys!\n{to_text(e)}") from e
finally:
if tmp_keyfile_name is not None:
pathlib.Path(tmp_keyfile_name).unlink(missing_ok=True)

View file

@ -66,8 +66,8 @@ class Connection(ConnectionBase):
def _search_executable(executable):
try:
return get_bin_path(executable)
except ValueError:
raise AnsibleError(f"{executable} command not found in PATH")
except ValueError as e:
raise AnsibleError(f"{executable} command not found in PATH") from e
def list_zones(self):
process = subprocess.Popen(
@ -160,17 +160,17 @@ class Connection(ConnectionBase):
count = ""
try:
p = self._buffered_exec_command(f"dd of={out_path} bs={BUFSIZE}{count}", stdin=in_file)
except OSError:
raise AnsibleError("jail connection requires dd command in the jail")
except OSError as e:
raise AnsibleError("jail connection requires dd command in the jail") from e
try:
stdout, stderr = p.communicate()
except Exception:
except Exception as e:
traceback.print_exc()
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
if p.returncode != 0:
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
except IOError:
raise AnsibleError(f"file or module does not exist at: {in_path}")
except IOError as e:
raise AnsibleError(f"file or module does not exist at: {in_path}") from e
def fetch_file(self, in_path, out_path):
"""fetch a file from zone to local"""
@ -180,8 +180,8 @@ class Connection(ConnectionBase):
in_path = shlex_quote(self._prefix_login_path(in_path))
try:
p = self._buffered_exec_command(f"dd if={in_path} bs={BUFSIZE}")
except OSError:
raise AnsibleError("zone connection requires dd command in the zone")
except OSError as e:
raise AnsibleError("zone connection requires dd command in the zone") from e
with open(out_path, "wb+") as out_file:
try:
@ -189,9 +189,9 @@ class Connection(ConnectionBase):
while chunk:
out_file.write(chunk)
chunk = p.stdout.read(BUFSIZE)
except Exception:
except Exception as e:
traceback.print_exc()
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
stdout, stderr = p.communicate()
if p.returncode != 0:
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")

View file

@ -51,7 +51,7 @@ def counter(sequence):
except TypeError as e:
raise AnsibleFilterError(
f"community.general.counter needs a sequence with hashable elements (int, float or str) - {e}"
)
) from e
return result

View file

@ -98,7 +98,7 @@ def from_csv(data, dialect="excel", fieldnames=None, delimiter=None, skipinitial
try:
dialect = initialize_dialect(dialect, **dialect_params)
except (CustomDialectFailureError, DialectNotAvailableError) as e:
raise AnsibleFilterError(str(e))
raise AnsibleFilterError(str(e)) from e
reader = read_csv(data, dialect, fieldnames)
@ -108,7 +108,7 @@ def from_csv(data, dialect="excel", fieldnames=None, delimiter=None, skipinitial
for row in reader:
data_list.append(row)
except CSVError as e:
raise AnsibleFilterError(f"Unable to process file: {e}")
raise AnsibleFilterError(f"Unable to process file: {e}") from e
return data_list

View file

@ -79,7 +79,7 @@ def from_ini(obj):
try:
parser.read_file(StringIO(obj))
except Exception as ex:
raise AnsibleFilterError(f"from_ini failed to parse given string: {ex}", orig_exc=ex)
raise AnsibleFilterError(f"from_ini failed to parse given string: {ex}", orig_exc=ex) from ex
return parser.as_dict()

View file

@ -37,7 +37,7 @@ def initialize_hashids(**kwargs):
raise AnsibleFilterError(
"The provided parameters %s are invalid: %s"
% (", ".join(["%s=%s" % (k, v) for k, v in params.items()]), to_native(e))
)
) from e
def hashids_encode(nums, salt=None, alphabet=None, min_length=None):
@ -60,7 +60,7 @@ def hashids_encode(nums, salt=None, alphabet=None, min_length=None):
try:
hashid = hashids.encode(*nums)
except TypeError as e:
raise AnsibleTypeError(f"Data to encode must by a tuple or list of ints: {e}")
raise AnsibleTypeError(f"Data to encode must by a tuple or list of ints: {e}") from e
return hashid

View file

@ -149,7 +149,7 @@ def jc_filter(data, parser, quiet=True, raw=False):
return jc_parser.parse(data, quiet=quiet, raw=raw)
except Exception as e:
raise AnsibleFilterError(f"Error in jc filter plugin: {e}")
raise AnsibleFilterError(f"Error in jc filter plugin: {e}") from e
class FilterModule:

View file

@ -141,10 +141,10 @@ def json_query(data, expr):
try:
return jmespath.search(expr, data)
except jmespath.exceptions.JMESPathError as e:
raise AnsibleFilterError(f"JMESPathError in json_query filter plugin:\n{e}")
raise AnsibleFilterError(f"JMESPathError in json_query filter plugin:\n{e}") from e
except Exception as e:
# For older jmespath, we can get ValueError and TypeError without much info.
raise AnsibleFilterError(f"Error in jmespath.search in json_query filter plugin:\n{e}")
raise AnsibleFilterError(f"Error in jmespath.search in json_query filter plugin:\n{e}") from e
class FilterModule:

View file

@ -72,7 +72,7 @@ def to_ini(obj):
try:
ini_parser.read_dict(obj)
except Exception as ex:
raise AnsibleFilterError(f"to_ini failed to parse given dict:{ex}", orig_exc=ex)
raise AnsibleFilterError(f"to_ini failed to parse given dict:{ex}", orig_exc=ex) from ex
# catching empty dicts
if obj == dict():

View file

@ -124,7 +124,9 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
# Create groups based on variable values and add the corresponding hosts to it
self._add_host_to_keyed_groups(self.get_option("keyed_groups"), host_attrs, host, strict=strict)
except Exception as e:
raise AnsibleParserError(f"Unable to fetch hosts from GitLab API, this was the original exception: {e}")
raise AnsibleParserError(
f"Unable to fetch hosts from GitLab API, this was the original exception: {e}"
) from e
def verify_file(self, path):
"""Return the possibly of a file being consumable by this plugin."""

View file

@ -166,8 +166,10 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
except Exception:
error_body = {"status": None}
if e.code == 404 and error_body.get("status") == "No objects found.":
raise AnsibleParserError("Host filter returned no data. Please confirm your host_filter value is valid")
raise AnsibleParserError(f"Unexpected data returned: {e} -- {error_body}")
raise AnsibleParserError(
"Host filter returned no data. Please confirm your host_filter value is valid"
) from e
raise AnsibleParserError(f"Unexpected data returned: {e} -- {error_body}") from e
response_body = response.read()
json_data = json.loads(response_body.decode("utf-8"))

View file

@ -160,7 +160,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
try:
self.instances = self.client.linode.instances()
except LinodeApiError as exception:
raise AnsibleError(f"Linode client raised: {exception}")
raise AnsibleError(f"Linode client raised: {exception}") from exception
def _add_groups(self):
"""Add Linode instance groups to the dynamic inventory."""

View file

@ -213,7 +213,7 @@ class InventoryModule(BaseInventoryPlugin):
with open(path, "r") as json_file:
return json.load(json_file)
except (IOError, json.decoder.JSONDecodeError) as err:
raise AnsibleParserError(f"Could not load the test data from {to_native(path)}: {err}")
raise AnsibleParserError(f"Could not load the test data from {to_native(path)}: {err}") from err
def save_json_data(self, path, file_name=None):
"""save data as json
@ -243,7 +243,7 @@ class InventoryModule(BaseInventoryPlugin):
with open(os.path.abspath(os.path.join(cwd, *path)), "w") as json_file:
json.dump(self.data, json_file)
except IOError as err:
raise AnsibleParserError(f"Could not save data: {err}")
raise AnsibleParserError(f"Could not save data: {err}") from err
def verify_file(self, path):
"""Check the config
@ -602,7 +602,7 @@ class InventoryModule(BaseInventoryPlugin):
else:
path[instance_name][key] = value
except KeyError as err:
raise AnsibleParserError(f"Unable to store Information: {err}")
raise AnsibleParserError(f"Unable to store Information: {err}") from err
def extract_information_from_instance_configs(self):
"""Process configuration information
@ -853,7 +853,7 @@ class InventoryModule(BaseInventoryPlugin):
except ValueError as err:
raise AnsibleParserError(
f"Error while parsing network range {self.groupby[group_name].get('attribute')}: {err}"
)
) from err
for instance_name in self.inventory.hosts:
if self.data["inventory"][instance_name].get("network_interfaces") is not None:
@ -1203,6 +1203,6 @@ class InventoryModule(BaseInventoryPlugin):
self.trust_password = self.get_option("trust_password")
self.url = self.get_option("url")
except Exception as err:
raise AnsibleParserError(f"All correct options required: {err}")
raise AnsibleParserError(f"All correct options required: {err}") from err
# Call our internal helper to populate the dynamic inventory
self._populate()

View file

@ -180,7 +180,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
try:
self._nmap = get_bin_path("nmap")
except ValueError as e:
raise AnsibleParserError(f"nmap inventory plugin requires the nmap cli tool to work: {e}")
raise AnsibleParserError(f"nmap inventory plugin requires the nmap cli tool to work: {e}") from e
super().parse(inventory, loader, path, cache=cache)
@ -265,7 +265,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
try:
t_stdout = to_text(stdout, errors="surrogate_or_strict")
except UnicodeError as e:
raise AnsibleParserError(f"Invalid (non unicode) input returned: {e}")
raise AnsibleParserError(f"Invalid (non unicode) input returned: {e}") from e
for line in t_stdout.splitlines():
hits = self.find_host.match(line)
@ -310,7 +310,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
results[-1]["ports"] = ports
except Exception as e:
raise AnsibleParserError(f"failed to parse {to_native(path)}: {e} ")
raise AnsibleParserError(f"failed to parse {to_native(path)}: {e} ") from e
if cache_needs_update:
self._cache[cache_key] = results

View file

@ -141,13 +141,13 @@ class InventoryModule(BaseInventoryPlugin):
try:
raw_data = to_text(response.read(), errors="surrogate_or_strict")
except UnicodeError:
raise AnsibleError("Incorrect encoding of fetched payload from Online servers")
except UnicodeError as e:
raise AnsibleError("Incorrect encoding of fetched payload from Online servers") from e
try:
return json.loads(raw_data)
except ValueError:
raise AnsibleError("Incorrect JSON payload")
except ValueError as e:
raise AnsibleError("Incorrect JSON payload") from e
@staticmethod
def extract_rpn_lookup_cache(rpn_list):

View file

@ -122,10 +122,10 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
with open(authfile, "r") as fp:
authstring = fp.read().rstrip()
username, password = authstring.split(":")
except (OSError, IOError):
raise AnsibleError(f"Could not find or read ONE_AUTH file at '{authfile}'")
except Exception:
raise AnsibleError(f"Error occurs when reading ONE_AUTH file at '{authfile}'")
except (OSError, IOError) as e:
raise AnsibleError(f"Could not find or read ONE_AUTH file at '{authfile}'") from e
except Exception as e:
raise AnsibleError(f"Error occurs when reading ONE_AUTH file at '{authfile}'") from e
auth_params = namedtuple("auth", ("url", "username", "password"))
@ -167,7 +167,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
try:
vm_pool = one_client.vmpool.infoextended(-2, -1, -1, 3)
except Exception as e:
raise AnsibleError(f"Something happened during XML-RPC call: {e}")
raise AnsibleError(f"Something happened during XML-RPC call: {e}") from e
return vm_pool

View file

@ -143,16 +143,16 @@ def _fetch_information(token, url):
try:
response = open_url(paginated_url, headers={"X-Auth-Token": token, "Content-type": "application/json"})
except Exception as e:
raise AnsibleError(f"Error while fetching {url}: {e}")
raise AnsibleError(f"Error while fetching {url}: {e}") from e
try:
raw_json = json.loads(to_text(response.read()))
except ValueError:
raise AnsibleError("Incorrect JSON payload")
except ValueError as e:
raise AnsibleError("Incorrect JSON payload") from e
try:
results.extend(raw_json["servers"])
except KeyError:
raise AnsibleError("Incorrect format from the Scaleway API response")
except KeyError as e:
raise AnsibleError("Incorrect format from the Scaleway API response") from e
link = response.headers["Link"]
if not link:

View file

@ -312,7 +312,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
try:
self._vbox_path = get_bin_path(self.VBOX)
except ValueError as e:
raise AnsibleParserError(e)
raise AnsibleParserError(e) from e
super().parse(inventory, loader, path)
@ -354,7 +354,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
try:
p = Popen(cmd, stdout=PIPE)
except Exception as e:
raise AnsibleParserError(str(e))
raise AnsibleParserError(str(e)) from e
source_data = p.stdout.read().splitlines()

View file

@ -107,6 +107,6 @@ class LookupModule(LookupBase):
with open(path, "rb") as f:
result.append(base64.b64encode(f.read()).decode("utf-8"))
except Exception as exc:
raise AnsibleLookupError(f"Error while reading {path}: {exc}")
raise AnsibleLookupError(f"Error while reading {path}: {exc}") from exc
return result

View file

@ -77,8 +77,8 @@ class LookupModule(LookupBase):
continue
parsed = str(arg_raw)
setattr(self, arg, parsed)
except ValueError:
raise AnsibleError(f"can't parse arg {arg}={arg_raw} as string")
except ValueError as e:
raise AnsibleError(f"can't parse arg {arg}={arg_raw} as string") from e
if args:
raise AnsibleError(f"unrecognized arguments to with_sequence: {list(args.keys())!r}")

View file

@ -122,7 +122,7 @@ class LookupModule(LookupBase):
try:
data = load_collection_meta(collection_pkg, no_version=no_version)
except Exception as exc:
raise AnsibleLookupError(f"Error while loading metadata for {term}: {exc}")
raise AnsibleLookupError(f"Error while loading metadata for {term}: {exc}") from exc
result.append(data.get("version", no_version))

View file

@ -170,7 +170,7 @@ class LookupModule(LookupBase):
else:
values.append(to_text(results[1]["Value"]))
except Exception as e:
raise AnsibleError(f"Error locating '{term}' in kv store. Error was {e}")
raise AnsibleError(f"Error locating '{term}' in kv store. Error was {e}") from e
return values
@ -194,6 +194,6 @@ class LookupModule(LookupBase):
raise AnsibleAssertionError(f"{name} not a valid consul lookup parameter")
paramvals[name] = value
except (ValueError, AssertionError) as e:
raise AnsibleError(e)
raise AnsibleError(e) from e
return paramvals

View file

@ -145,9 +145,9 @@ class LookupModule(LookupBase):
for term in terms:
try:
ret.append(credstash.getSecret(term, version, region, table, context=context, **kwargs_pass))
except credstash.ItemNotFound:
raise AnsibleError(f"Key {term} not found")
except credstash.ItemNotFound as e:
raise AnsibleError(f"Key {term} not found") from e
except Exception as e:
raise AnsibleError(f"Encountered exception while fetching {term}: {e}")
raise AnsibleError(f"Encountered exception while fetching {term}: {e}") from e
return ret

View file

@ -164,11 +164,11 @@ class CyberarkPassword:
result_dict[output_names[i]] = to_native(output_values[i])
except subprocess.CalledProcessError as e:
raise AnsibleError(e.output)
raise AnsibleError(e.output) from e
except OSError as e:
raise AnsibleError(
f"ERROR - AIM not installed or clipasswordsdk not in standard location. ERROR=({e.errno}) => {e.strerror} "
)
) from e
return [result_dict]

View file

@ -174,7 +174,7 @@ class LookupModule(LookupBase):
try:
values = self.__evaluate(expression, templar, variables=vars)
except Exception as e:
raise AnsibleLookupError(f'Caught "{e}" while evaluating {key!r} with item == {current!r}')
raise AnsibleLookupError(f'Caught "{e}" while evaluating {key!r} with item == {current!r}') from e
if isinstance(values, Mapping):
for idx, val in sorted(values.items()):

View file

@ -387,7 +387,7 @@ class LookupModule(LookupBase):
try:
rdclass = dns.rdataclass.from_text(self.get_option("class"))
except Exception as e:
raise AnsibleError(f"dns lookup illegal CLASS: {e}")
raise AnsibleError(f"dns lookup illegal CLASS: {e}") from e
myres.retry_servfail = self.get_option("retry_servfail")
for t in terms:
@ -405,7 +405,7 @@ class LookupModule(LookupBase):
nsaddr = dns.resolver.query(ns)[0].address
nameservers.append(nsaddr)
except Exception as e:
raise AnsibleError(f"dns lookup NS: {e}")
raise AnsibleError(f"dns lookup NS: {e}") from e
continue
if "=" in t:
try:
@ -421,7 +421,7 @@ class LookupModule(LookupBase):
try:
rdclass = dns.rdataclass.from_text(arg)
except Exception as e:
raise AnsibleError(f"dns lookup illegal CLASS: {e}")
raise AnsibleError(f"dns lookup illegal CLASS: {e}") from e
elif opt == "retry_servfail":
myres.retry_servfail = boolean(arg)
elif opt == "fail_on_error":
@ -458,7 +458,7 @@ class LookupModule(LookupBase):
except dns.exception.SyntaxError:
pass
except Exception as e:
raise AnsibleError(f"dns.reversename unhandled exception {e}")
raise AnsibleError(f"dns.reversename unhandled exception {e}") from e
domains = reversed_domains
if len(domains) > 1:
@ -487,20 +487,20 @@ class LookupModule(LookupBase):
ret.append(rd)
except Exception as err:
if fail_on_error:
raise AnsibleError(f"Lookup failed: {err}")
raise AnsibleError(f"Lookup failed: {err}") from err
ret.append(str(err))
except dns.resolver.NXDOMAIN as err:
if fail_on_error:
raise AnsibleError(f"Lookup failed: {err}")
raise AnsibleError(f"Lookup failed: {err}") from err
if not real_empty:
ret.append("NXDOMAIN")
except (dns.resolver.NoAnswer, dns.resolver.Timeout, dns.resolver.NoNameservers) as err:
if fail_on_error:
raise AnsibleError(f"Lookup failed: {err}")
raise AnsibleError(f"Lookup failed: {err}") from err
if not real_empty:
ret.append("")
except dns.exception.DNSException as err:
raise AnsibleError(f"dns.resolver unhandled exception {err}")
raise AnsibleError(f"dns.resolver unhandled exception {err}") from err
return ret

View file

@ -105,7 +105,7 @@ class LookupModule(LookupBase):
continue
string = ""
except DNSException as e:
raise AnsibleError(f"dns.resolver unhandled exception {e}")
raise AnsibleError(f"dns.resolver unhandled exception {e}") from e
ret.append("".join(string))

View file

@ -109,8 +109,8 @@ class LookupModule(LookupBase):
try:
vault = SecretsVault(**vault_parameters)
return vault
except TypeError:
raise AnsibleError("python-dsv-sdk==0.0.1 must be installed to use this plugin")
except TypeError as e:
raise AnsibleError("python-dsv-sdk==0.0.1 must be installed to use this plugin") from e
def run(self, terms, variables, **kwargs):
if sdk_is_missing:
@ -140,5 +140,5 @@ class LookupModule(LookupBase):
display.vvv(f"DevOps Secrets Vault GET /secrets/{path}")
result.append(vault.get_secret_json(path))
except SecretsVaultError as error:
raise AnsibleError(f"DevOps Secrets Vault lookup failure: {error.message}")
raise AnsibleError(f"DevOps Secrets Vault lookup failure: {error.message}") from error
return result

View file

@ -167,7 +167,7 @@ def etcd3_client(client_params):
etcd = etcd3.client(**client_params)
etcd.status()
except Exception as exp:
raise AnsibleLookupError(f"Cannot connect to etcd cluster: {exp}")
raise AnsibleLookupError(f"Cannot connect to etcd cluster: {exp}") from exp
return etcd

View file

@ -116,7 +116,7 @@ class PythonJWT:
with open(path, "rb") as pem_file:
return jwk_from_pem(pem_file.read())
except Exception as e:
raise AnsibleError(f"Error while parsing key file: {e}")
raise AnsibleError(f"Error while parsing key file: {e}") from e
@staticmethod
def encode_jwt(app_id, jwk, exp=600):
@ -129,7 +129,7 @@ class PythonJWT:
try:
return jwt_instance.encode(payload, jwk, alg="RS256")
except Exception as e:
raise AnsibleError(f"Error while encoding jwt: {e}")
raise AnsibleError(f"Error while encoding jwt: {e}") from e
def read_key(path, private_key=None):
@ -143,7 +143,7 @@ def read_key(path, private_key=None):
key_bytes = pem_file.read()
return serialization.load_pem_private_key(key_bytes, password=None)
except Exception as e:
raise AnsibleError(f"Error while parsing key file: {e}")
raise AnsibleError(f"Error while parsing key file: {e}") from e
def encode_jwt(app_id, private_key_obj, exp=600):
@ -158,7 +158,7 @@ def encode_jwt(app_id, private_key_obj, exp=600):
try:
return jwt.encode(payload, private_key_obj, algorithm="RS256")
except Exception as e:
raise AnsibleError(f"Error while encoding jwt: {e}")
raise AnsibleError(f"Error while encoding jwt: {e}") from e
def post_request(generated_jwt, installation_id, api_base):
@ -178,15 +178,15 @@ def post_request(generated_jwt, installation_id, api_base):
except Exception:
error_body = {}
if e.code == 404:
raise AnsibleError("Github return error. Please confirm your installation_id value is valid")
raise AnsibleError("Github return error. Please confirm your installation_id value is valid") from e
elif e.code == 401:
raise AnsibleError("Github return error. Please confirm your private key is valid")
raise AnsibleError(f"Unexpected data returned: {e} -- {error_body}")
raise AnsibleError("Github return error. Please confirm your private key is valid") from e
raise AnsibleError(f"Unexpected data returned: {e} -- {error_body}") from e
response_body = response.read()
try:
json_data = json.loads(response_body.decode("utf-8"))
except json.decoder.JSONDecodeError as e:
raise AnsibleError(f"Error while dencoding JSON respone from github: {e}")
raise AnsibleError(f"Error while dencoding JSON respone from github: {e}") from e
return json_data.get("token")

View file

@ -93,7 +93,7 @@ class LookupModule(LookupBase):
try:
env = lmdb.open(str(db), readonly=True)
except Exception as e:
raise AnsibleError(f"LMDB cannot open database {db}: {e}")
raise AnsibleError(f"LMDB cannot open database {db}: {e}") from e
ret = []
if len(terms) == 0:

View file

@ -198,13 +198,13 @@ class OnePassCLIBase(metaclass=abc.ABCMeta):
based on the current version."""
try:
bin_path = get_bin_path(cls.bin)
except ValueError:
raise AnsibleLookupError(f"Unable to locate '{cls.bin}' command line tool")
except ValueError as e:
raise AnsibleLookupError(f"Unable to locate '{cls.bin}' command line tool") from e
try:
b_out = subprocess.check_output([bin_path, "--version"], stderr=subprocess.PIPE)
except subprocess.CalledProcessError as cpe:
raise AnsibleLookupError(f"Unable to get the op version: {cpe}")
raise AnsibleLookupError(f"Unable to get the op version: {cpe}") from cpe
return to_text(b_out).strip()
@ -653,7 +653,7 @@ class OnePass:
self.connect_token,
)
except TypeError as e:
raise AnsibleLookupError(e)
raise AnsibleLookupError(e) from e
raise AnsibleLookupError(f"op version {version} is unsupported")

View file

@ -309,7 +309,7 @@ class LookupModule(LookupBase):
)
self.realpass = "pass: the standard unix password manager" in passoutput
except subprocess.CalledProcessError as e:
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}")
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}") from e
return self.realpass
@ -329,14 +329,14 @@ class LookupModule(LookupBase):
raise AnsibleAssertionError(f"{name} not in paramvals")
self.paramvals[name] = value
except (ValueError, AssertionError) as e:
raise AnsibleError(e)
raise AnsibleError(e) from e
# check and convert values
try:
for key in ["create", "returnall", "overwrite", "backup", "nosymbols"]:
if not isinstance(self.paramvals[key], bool):
self.paramvals[key] = boolean(self.paramvals[key])
except (ValueError, AssertionError) as e:
raise AnsibleError(e)
raise AnsibleError(e) from e
if self.paramvals["missing"] not in ["error", "warn", "create", "empty"]:
raise AnsibleError(f"{self.paramvals['missing']} is not a valid option for missing")
if not isinstance(self.paramvals["length"], int):
@ -395,7 +395,7 @@ class LookupModule(LookupBase):
except subprocess.CalledProcessError as e:
# 'not in password store' is the expected error if a password wasn't found
if "not in the password store" not in e.output:
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}")
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}") from e
if self.paramvals["missing"] == "error":
raise AnsibleError(f"passwordstore: passname {self.passname} not found and missing=error is set")
@ -459,7 +459,7 @@ class LookupModule(LookupBase):
try:
check_output2([self.pass_cmd, "insert", "-f", "-m", self.passname], input=msg, env=self.env)
except subprocess.CalledProcessError as e:
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}")
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}") from e
return newpass
def generate_password(self):
@ -480,7 +480,7 @@ class LookupModule(LookupBase):
try:
check_output2([self.pass_cmd, "insert", "-f", "-m", self.passname], input=msg, env=self.env)
except subprocess.CalledProcessError as e:
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}")
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}") from e
return newpass

View file

@ -112,5 +112,5 @@ class LookupModule(LookupBase):
ret.append(to_text(res))
except Exception as e:
# connection failed or key not found
raise AnsibleError(f"Encountered exception while fetching {term}: {e}")
raise AnsibleError(f"Encountered exception while fetching {term}: {e}") from e
return ret

View file

@ -98,5 +98,5 @@ class LookupModule(LookupBase):
display.vvv(f"Secret Server lookup of Secret with ID {term}")
result.append({term: secret_server.get_pam_secret(term)})
except Exception as error:
raise AnsibleError(f"Secret Server lookup failure: {error.message}")
raise AnsibleError(f"Secret Server lookup failure: {error.message}") from error
return result

View file

@ -74,7 +74,7 @@ class LookupModule(LookupBase):
except (ValueError, AssertionError) as e:
# In case "file" or "key" are not present
raise AnsibleError(e)
raise AnsibleError(e) from e
key = paramvals["key"]

View file

@ -377,8 +377,8 @@ class TSSClient(metaclass=abc.ABCMeta): # noqa: B024
file_content = i["itemValue"].content
with open(os.path.join(file_download_path, f"{obj['id']}_{i['slug']}"), "wb") as f:
f.write(file_content)
except ValueError:
raise AnsibleOptionsError(f"Failed to download {i['slug']}")
except ValueError as e:
raise AnsibleOptionsError(f"Failed to download {i['slug']}") from e
except AttributeError:
display.warning(f"Could not read file content for {i['slug']}")
finally:
@ -403,15 +403,15 @@ class TSSClient(metaclass=abc.ABCMeta): # noqa: B024
def _term_to_secret_id(term):
try:
return int(term)
except ValueError:
raise AnsibleOptionsError("Secret ID must be an integer")
except ValueError as e:
raise AnsibleOptionsError("Secret ID must be an integer") from e
@staticmethod
def _term_to_folder_id(term):
try:
return int(term)
except ValueError:
raise AnsibleOptionsError("Folder ID must be an integer")
except ValueError as e:
raise AnsibleOptionsError("Folder ID must be an integer") from e
class TSSClientV0(TSSClient):
@ -493,4 +493,4 @@ class LookupModule(LookupBase):
for term in terms
]
except SecretServerError as error:
raise AnsibleError(f"Secret Server lookup failure: {error.message}")
raise AnsibleError(f"Secret Server lookup failure: {error.message}") from error

View file

@ -78,9 +78,9 @@ def _keys_filter_target_str(target, matching_parameter):
r = target[0]
try:
tt = re.compile(r)
except re.error:
except re.error as e:
msg = "The target must be a valid regex if matching_parameter=regex. target is %s"
raise AnsibleFilterError(msg % r)
raise AnsibleFilterError(msg % r) from e
elif isinstance(target, str):
tt = (target,)
else:
@ -129,12 +129,12 @@ def _keys_filter_target_dict(target, matching_parameter):
try:
tr = map(re.compile, before)
tz = list(zip(tr, after))
except re.error:
except re.error as e:
msg = (
"The attributes before must be valid regex if matching_parameter=regex."
" Not all items are valid regex in: %s"
)
raise AnsibleFilterError(msg % before)
raise AnsibleFilterError(msg % before) from e
else:
tz = list(zip(before, after))

View file

@ -151,8 +151,8 @@ class ModuleTestCase:
for mock_name, mock_spec in self.mock_specs.items():
try:
mock_class = mocks_map[mock_name]
except KeyError:
raise Exception(f"Cannot find TestCaseMock class for: {mock_name}")
except KeyError as e:
raise Exception(f"Cannot find TestCaseMock class for: {mock_name}") from e
self.mocks[mock_name] = mock_class.build_mock(mock_spec)
self._fixtures.update(self.mocks[mock_name].fixtures())