mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-02-04 07:51:50 +00:00
Use raise from in plugins (#11095)
* Use raise from. * Add changelog fragment.
This commit is contained in:
parent
1a82e93c6d
commit
2b4333a033
46 changed files with 218 additions and 165 deletions
45
changelogs/fragments/11095-raise-from.yml
Normal file
45
changelogs/fragments/11095-raise-from.yml
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
minor_changes:
|
||||||
|
- shutdown action plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- redis cache plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- chroot connection plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- jail connection plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- lxc connection plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- lxd connection plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- wsl connection plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- zone connection plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- counter filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- from_csv filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- from_ini filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- hashids filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- jc filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- json_query filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- to_ini filter plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- gitlab_runners inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- icinga2 inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- linode inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- lxd inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- nmap inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- online inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- opennebula inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- scaleway inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- virtualbox inventory plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- binary_file lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- chef_databag lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- collection_version lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- consul_kv lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- credstash lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- cyberarkpassword lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- dependent lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- dig lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- dnstxt lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- dsv lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- etcd3 lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- github_app_access_token lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- lmdb_kv lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- onepassword lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- passwordstore lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- redis lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- revbitspss lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- shelvefile lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- tss lookup plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
- keys_filter plugin_utils plugin - use ``raise ... from ...`` when passing on exceptions (https://github.com/ansible-collections/community.general/pull/11095).
|
||||||
|
|
@ -96,7 +96,7 @@ class ActionModule(ActionBase):
|
||||||
display.debug(f"{self._task.action}: distribution: {distribution}")
|
display.debug(f"{self._task.action}: distribution: {distribution}")
|
||||||
return distribution
|
return distribution
|
||||||
except KeyError as ke:
|
except KeyError as ke:
|
||||||
raise AnsibleError(f'Failed to get distribution information. Missing "{ke.args[0]}" in output.')
|
raise AnsibleError(f'Failed to get distribution information. Missing "{ke.args[0]}" in output.') from ke
|
||||||
|
|
||||||
def get_shutdown_command(self, task_vars, distribution):
|
def get_shutdown_command(self, task_vars, distribution):
|
||||||
def find_command(command, find_search_paths):
|
def find_command(command, find_search_paths):
|
||||||
|
|
@ -124,10 +124,10 @@ class ActionModule(ActionBase):
|
||||||
incorrect_type = any(not is_string(x) for x in search_paths)
|
incorrect_type = any(not is_string(x) for x in search_paths)
|
||||||
if not isinstance(search_paths, list) or incorrect_type:
|
if not isinstance(search_paths, list) or incorrect_type:
|
||||||
raise TypeError
|
raise TypeError
|
||||||
except TypeError:
|
except TypeError as e:
|
||||||
# Error if we didn't get a list
|
# Error if we didn't get a list
|
||||||
err_msg = f"'search_paths' must be a string or flat list of strings, got {search_paths}"
|
err_msg = f"'search_paths' must be a string or flat list of strings, got {search_paths}"
|
||||||
raise AnsibleError(err_msg)
|
raise AnsibleError(err_msg) from e
|
||||||
|
|
||||||
full_path = find_command(shutdown_bin, search_paths) # find the path to the shutdown command
|
full_path = find_command(shutdown_bin, search_paths) # find the path to the shutdown command
|
||||||
if not full_path: # if we could not find the shutdown command
|
if not full_path: # if we could not find the shutdown command
|
||||||
|
|
|
||||||
8
plugins/cache/redis.py
vendored
8
plugins/cache/redis.py
vendored
|
|
@ -147,8 +147,10 @@ class CacheModule(BaseCacheModule):
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
from redis.sentinel import Sentinel
|
from redis.sentinel import Sentinel
|
||||||
except ImportError:
|
except ImportError as e:
|
||||||
raise AnsibleError("The 'redis' python module (version 2.9.0 or newer) is required to use redis sentinel.")
|
raise AnsibleError(
|
||||||
|
"The 'redis' python module (version 2.9.0 or newer) is required to use redis sentinel."
|
||||||
|
) from e
|
||||||
|
|
||||||
if ";" not in uri:
|
if ";" not in uri:
|
||||||
raise AnsibleError("_uri does not have sentinel syntax.")
|
raise AnsibleError("_uri does not have sentinel syntax.")
|
||||||
|
|
@ -170,7 +172,7 @@ class CacheModule(BaseCacheModule):
|
||||||
try:
|
try:
|
||||||
return scon.master_for(self._sentinel_service_name, socket_timeout=0.2)
|
return scon.master_for(self._sentinel_service_name, socket_timeout=0.2)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise AnsibleError(f"Could not connect to redis sentinel: {exc}")
|
raise AnsibleError(f"Could not connect to redis sentinel: {exc}") from exc
|
||||||
|
|
||||||
def _make_key(self, key):
|
def _make_key(self, key):
|
||||||
return self._prefix + key
|
return self._prefix + key
|
||||||
|
|
|
||||||
|
|
@ -129,7 +129,7 @@ class Connection(ConnectionBase):
|
||||||
try:
|
try:
|
||||||
self.chroot_cmd = get_bin_path(self.get_option("chroot_exe"))
|
self.chroot_cmd = get_bin_path(self.get_option("chroot_exe"))
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise AnsibleError(str(e))
|
raise AnsibleError(str(e)) from e
|
||||||
|
|
||||||
super()._connect()
|
super()._connect()
|
||||||
if not self._connected:
|
if not self._connected:
|
||||||
|
|
@ -191,17 +191,17 @@ class Connection(ConnectionBase):
|
||||||
count = ""
|
count = ""
|
||||||
try:
|
try:
|
||||||
p = self._buffered_exec_command(f"dd of={out_path} bs={BUFSIZE}{count}", stdin=in_file)
|
p = self._buffered_exec_command(f"dd of={out_path} bs={BUFSIZE}{count}", stdin=in_file)
|
||||||
except OSError:
|
except OSError as e:
|
||||||
raise AnsibleError("chroot connection requires dd command in the chroot")
|
raise AnsibleError("chroot connection requires dd command in the chroot") from e
|
||||||
try:
|
try:
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
except Exception:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
||||||
except IOError:
|
except IOError as e:
|
||||||
raise AnsibleError(f"file or module does not exist at: {in_path}")
|
raise AnsibleError(f"file or module does not exist at: {in_path}") from e
|
||||||
|
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
"""fetch a file from chroot to local"""
|
"""fetch a file from chroot to local"""
|
||||||
|
|
@ -211,8 +211,8 @@ class Connection(ConnectionBase):
|
||||||
in_path = shlex_quote(self._prefix_login_path(in_path))
|
in_path = shlex_quote(self._prefix_login_path(in_path))
|
||||||
try:
|
try:
|
||||||
p = self._buffered_exec_command(f"dd if={in_path} bs={BUFSIZE}")
|
p = self._buffered_exec_command(f"dd if={in_path} bs={BUFSIZE}")
|
||||||
except OSError:
|
except OSError as e:
|
||||||
raise AnsibleError("chroot connection requires dd command in the chroot")
|
raise AnsibleError("chroot connection requires dd command in the chroot") from e
|
||||||
|
|
||||||
with open(to_bytes(out_path, errors="surrogate_or_strict"), "wb+") as out_file:
|
with open(to_bytes(out_path, errors="surrogate_or_strict"), "wb+") as out_file:
|
||||||
try:
|
try:
|
||||||
|
|
@ -220,9 +220,9 @@ class Connection(ConnectionBase):
|
||||||
while chunk:
|
while chunk:
|
||||||
out_file.write(chunk)
|
out_file.write(chunk)
|
||||||
chunk = p.stdout.read(BUFSIZE)
|
chunk = p.stdout.read(BUFSIZE)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
||||||
|
|
|
||||||
|
|
@ -79,8 +79,8 @@ class Connection(ConnectionBase):
|
||||||
def _search_executable(executable):
|
def _search_executable(executable):
|
||||||
try:
|
try:
|
||||||
return get_bin_path(executable)
|
return get_bin_path(executable)
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise AnsibleError(f"{executable} command not found in PATH")
|
raise AnsibleError(f"{executable} command not found in PATH") from e
|
||||||
|
|
||||||
def list_jails(self):
|
def list_jails(self):
|
||||||
p = subprocess.Popen(
|
p = subprocess.Popen(
|
||||||
|
|
@ -161,19 +161,19 @@ class Connection(ConnectionBase):
|
||||||
count = ""
|
count = ""
|
||||||
try:
|
try:
|
||||||
p = self._buffered_exec_command(f"dd of={out_path} bs={BUFSIZE}{count}", stdin=in_file)
|
p = self._buffered_exec_command(f"dd of={out_path} bs={BUFSIZE}{count}", stdin=in_file)
|
||||||
except OSError:
|
except OSError as e:
|
||||||
raise AnsibleError("jail connection requires dd command in the jail")
|
raise AnsibleError("jail connection requires dd command in the jail") from e
|
||||||
try:
|
try:
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
except Exception:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
f"failed to transfer file {in_path} to {out_path}:\n{to_native(stdout)}\n{to_native(stderr)}"
|
f"failed to transfer file {in_path} to {out_path}:\n{to_native(stdout)}\n{to_native(stderr)}"
|
||||||
)
|
)
|
||||||
except IOError:
|
except IOError as e:
|
||||||
raise AnsibleError(f"file or module does not exist at: {in_path}")
|
raise AnsibleError(f"file or module does not exist at: {in_path}") from e
|
||||||
|
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
"""fetch a file from jail to local"""
|
"""fetch a file from jail to local"""
|
||||||
|
|
@ -183,8 +183,8 @@ class Connection(ConnectionBase):
|
||||||
in_path = shlex_quote(self._prefix_login_path(in_path))
|
in_path = shlex_quote(self._prefix_login_path(in_path))
|
||||||
try:
|
try:
|
||||||
p = self._buffered_exec_command(f"dd if={in_path} bs={BUFSIZE}")
|
p = self._buffered_exec_command(f"dd if={in_path} bs={BUFSIZE}")
|
||||||
except OSError:
|
except OSError as e:
|
||||||
raise AnsibleError("jail connection requires dd command in the jail")
|
raise AnsibleError("jail connection requires dd command in the jail") from e
|
||||||
|
|
||||||
with open(to_bytes(out_path, errors="surrogate_or_strict"), "wb+") as out_file:
|
with open(to_bytes(out_path, errors="surrogate_or_strict"), "wb+") as out_file:
|
||||||
try:
|
try:
|
||||||
|
|
@ -192,9 +192,9 @@ class Connection(ConnectionBase):
|
||||||
while chunk:
|
while chunk:
|
||||||
out_file.write(chunk)
|
out_file.write(chunk)
|
||||||
chunk = p.stdout.read(BUFSIZE)
|
chunk = p.stdout.read(BUFSIZE)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
|
|
|
||||||
|
|
@ -173,9 +173,9 @@ class Connection(ConnectionBase):
|
||||||
raise errors.AnsibleFileNotFound(msg)
|
raise errors.AnsibleFileNotFound(msg)
|
||||||
try:
|
try:
|
||||||
src_file = open(in_path, "rb")
|
src_file = open(in_path, "rb")
|
||||||
except IOError:
|
except IOError as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise errors.AnsibleError(f"failed to open input file to {in_path}")
|
raise errors.AnsibleError(f"failed to open input file to {in_path}") from e
|
||||||
try:
|
try:
|
||||||
|
|
||||||
def write_file(args):
|
def write_file(args):
|
||||||
|
|
@ -184,10 +184,10 @@ class Connection(ConnectionBase):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.container.attach_wait(write_file, None)
|
self.container.attach_wait(write_file, None)
|
||||||
except IOError:
|
except IOError as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
msg = f"failed to transfer file to {out_path}"
|
msg = f"failed to transfer file to {out_path}"
|
||||||
raise errors.AnsibleError(msg)
|
raise errors.AnsibleError(msg) from e
|
||||||
finally:
|
finally:
|
||||||
src_file.close()
|
src_file.close()
|
||||||
|
|
||||||
|
|
@ -200,10 +200,10 @@ class Connection(ConnectionBase):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
dst_file = open(out_path, "wb")
|
dst_file = open(out_path, "wb")
|
||||||
except IOError:
|
except IOError as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
msg = f"failed to open output file {out_path}"
|
msg = f"failed to open output file {out_path}"
|
||||||
raise errors.AnsibleError(msg)
|
raise errors.AnsibleError(msg) from e
|
||||||
try:
|
try:
|
||||||
|
|
||||||
def write_file(args):
|
def write_file(args):
|
||||||
|
|
@ -217,10 +217,10 @@ class Connection(ConnectionBase):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.container.attach_wait(write_file, None)
|
self.container.attach_wait(write_file, None)
|
||||||
except IOError:
|
except IOError as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
msg = f"failed to transfer file from {in_path} to {out_path}"
|
msg = f"failed to transfer file from {in_path} to {out_path}"
|
||||||
raise errors.AnsibleError(msg)
|
raise errors.AnsibleError(msg) from e
|
||||||
finally:
|
finally:
|
||||||
dst_file.close()
|
dst_file.close()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -93,8 +93,8 @@ class Connection(ConnectionBase):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._lxc_cmd = get_bin_path("lxc")
|
self._lxc_cmd = get_bin_path("lxc")
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise AnsibleError("lxc command not found in PATH")
|
raise AnsibleError("lxc command not found in PATH") from e
|
||||||
|
|
||||||
def _host(self):
|
def _host(self):
|
||||||
"""translate remote_addr to lxd (short) hostname"""
|
"""translate remote_addr to lxd (short) hostname"""
|
||||||
|
|
|
||||||
|
|
@ -478,11 +478,11 @@ class Connection(ConnectionBase):
|
||||||
except IOError:
|
except IOError:
|
||||||
pass # file was not found, but not required to function
|
pass # file was not found, but not required to function
|
||||||
except paramiko.hostkeys.InvalidHostKey as e:
|
except paramiko.hostkeys.InvalidHostKey as e:
|
||||||
raise AnsibleConnectionFailure(f"Invalid host key: {to_text(e.line)}")
|
raise AnsibleConnectionFailure(f"Invalid host key: {to_text(e.line)}") from e
|
||||||
try:
|
try:
|
||||||
ssh.load_system_host_keys()
|
ssh.load_system_host_keys()
|
||||||
except paramiko.hostkeys.InvalidHostKey as e:
|
except paramiko.hostkeys.InvalidHostKey as e:
|
||||||
raise AnsibleConnectionFailure(f"Invalid host key: {to_text(e.line)}")
|
raise AnsibleConnectionFailure(f"Invalid host key: {to_text(e.line)}") from e
|
||||||
|
|
||||||
ssh_connect_kwargs = self._parse_proxy_command(port)
|
ssh_connect_kwargs = self._parse_proxy_command(port)
|
||||||
ssh.set_missing_host_key_policy(MyAddPolicy(self))
|
ssh.set_missing_host_key_policy(MyAddPolicy(self))
|
||||||
|
|
@ -518,22 +518,24 @@ class Connection(ConnectionBase):
|
||||||
**ssh_connect_kwargs,
|
**ssh_connect_kwargs,
|
||||||
)
|
)
|
||||||
except paramiko.ssh_exception.BadHostKeyException as e:
|
except paramiko.ssh_exception.BadHostKeyException as e:
|
||||||
raise AnsibleConnectionFailure(f"host key mismatch for {to_text(e.hostname)}")
|
raise AnsibleConnectionFailure(f"host key mismatch for {to_text(e.hostname)}") from e
|
||||||
except paramiko.ssh_exception.AuthenticationException as e:
|
except paramiko.ssh_exception.AuthenticationException as e:
|
||||||
msg = f"Failed to authenticate: {e}"
|
msg = f"Failed to authenticate: {e}"
|
||||||
raise AnsibleAuthenticationFailure(msg)
|
raise AnsibleAuthenticationFailure(msg) from e
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = to_text(e)
|
msg = to_text(e)
|
||||||
if "PID check failed" in msg:
|
if "PID check failed" in msg:
|
||||||
raise AnsibleError("paramiko version issue, please upgrade paramiko on the machine running ansible")
|
raise AnsibleError(
|
||||||
|
"paramiko version issue, please upgrade paramiko on the machine running ansible"
|
||||||
|
) from e
|
||||||
elif "Private key file is encrypted" in msg:
|
elif "Private key file is encrypted" in msg:
|
||||||
msg = (
|
msg = (
|
||||||
f"ssh {self.get_option('remote_user')}@{self.get_options('remote_addr')}:{port} : "
|
f"ssh {self.get_option('remote_user')}@{self.get_options('remote_addr')}:{port} : "
|
||||||
f"{msg}\nTo connect as a different user, use -u <username>."
|
f"{msg}\nTo connect as a different user, use -u <username>."
|
||||||
)
|
)
|
||||||
raise AnsibleConnectionFailure(msg)
|
raise AnsibleConnectionFailure(msg) from e
|
||||||
else:
|
else:
|
||||||
raise AnsibleConnectionFailure(msg)
|
raise AnsibleConnectionFailure(msg) from e
|
||||||
self.ssh = ssh
|
self.ssh = ssh
|
||||||
self._connected = True
|
self._connected = True
|
||||||
return self
|
return self
|
||||||
|
|
@ -609,7 +611,7 @@ class Connection(ConnectionBase):
|
||||||
msg = "Failed to open session"
|
msg = "Failed to open session"
|
||||||
if text_e:
|
if text_e:
|
||||||
msg += f": {text_e}"
|
msg += f": {text_e}"
|
||||||
raise AnsibleConnectionFailure(to_native(msg))
|
raise AnsibleConnectionFailure(to_native(msg)) from e
|
||||||
|
|
||||||
display.vvv(f"EXEC {cmd}", host=self.get_option("remote_addr"))
|
display.vvv(f"EXEC {cmd}", host=self.get_option("remote_addr"))
|
||||||
|
|
||||||
|
|
@ -665,8 +667,8 @@ class Connection(ConnectionBase):
|
||||||
elif in_data == b"":
|
elif in_data == b"":
|
||||||
chan.shutdown_write()
|
chan.shutdown_write()
|
||||||
|
|
||||||
except socket.timeout:
|
except socket.timeout as e:
|
||||||
raise AnsibleError(f"ssh timed out waiting for privilege escalation.\n{to_text(become_output)}")
|
raise AnsibleError(f"ssh timed out waiting for privilege escalation.\n{to_text(become_output)}") from e
|
||||||
|
|
||||||
stdout = b"".join(chan.makefile("rb", bufsize))
|
stdout = b"".join(chan.makefile("rb", bufsize))
|
||||||
stderr = b"".join(chan.makefile_stderr("rb", bufsize))
|
stderr = b"".join(chan.makefile_stderr("rb", bufsize))
|
||||||
|
|
@ -699,7 +701,7 @@ class Connection(ConnectionBase):
|
||||||
)
|
)
|
||||||
raise AnsibleError(f"{to_text(stdout)}\n{to_text(stderr)}")
|
raise AnsibleError(f"{to_text(stdout)}\n{to_text(stderr)}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"error occurred while putting file from {in_path} to {out_path}!\n{to_text(e)}")
|
raise AnsibleError(f"error occurred while putting file from {in_path} to {out_path}!\n{to_text(e)}") from e
|
||||||
|
|
||||||
def fetch_file(self, in_path: str, out_path: str) -> None:
|
def fetch_file(self, in_path: str, out_path: str) -> None:
|
||||||
"""save a remote file to the specified path"""
|
"""save a remote file to the specified path"""
|
||||||
|
|
@ -718,7 +720,7 @@ class Connection(ConnectionBase):
|
||||||
with open(out_path, "wb") as f:
|
with open(out_path, "wb") as f:
|
||||||
f.write(stdout)
|
f.write(stdout)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"error occurred while fetching file from {in_path} to {out_path}!\n{to_text(e)}")
|
raise AnsibleError(f"error occurred while fetching file from {in_path} to {out_path}!\n{to_text(e)}") from e
|
||||||
|
|
||||||
def reset(self) -> None:
|
def reset(self) -> None:
|
||||||
"""reset the connection"""
|
"""reset the connection"""
|
||||||
|
|
@ -772,16 +774,16 @@ class Connection(ConnectionBase):
|
||||||
self._save_ssh_host_keys(tmp_keyfile_name)
|
self._save_ssh_host_keys(tmp_keyfile_name)
|
||||||
|
|
||||||
os.rename(tmp_keyfile_name, self.keyfile)
|
os.rename(tmp_keyfile_name, self.keyfile)
|
||||||
except LockTimeout:
|
except LockTimeout as e:
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
f"writing lock file for {self.keyfile} ran in to the timeout of {self.get_option('lock_file_timeout')}s"
|
f"writing lock file for {self.keyfile} ran in to the timeout of {self.get_option('lock_file_timeout')}s"
|
||||||
)
|
) from e
|
||||||
except paramiko.hostkeys.InvalidHostKey as e:
|
except paramiko.hostkeys.InvalidHostKey as e:
|
||||||
raise AnsibleConnectionFailure(f"Invalid host key: {e.line}")
|
raise AnsibleConnectionFailure(f"Invalid host key: {e.line}") from e
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# unable to save keys, including scenario when key was invalid
|
# unable to save keys, including scenario when key was invalid
|
||||||
# and caught earlier
|
# and caught earlier
|
||||||
raise AnsibleError(f"error occurred while writing SSH host keys!\n{to_text(e)}")
|
raise AnsibleError(f"error occurred while writing SSH host keys!\n{to_text(e)}") from e
|
||||||
finally:
|
finally:
|
||||||
if tmp_keyfile_name is not None:
|
if tmp_keyfile_name is not None:
|
||||||
pathlib.Path(tmp_keyfile_name).unlink(missing_ok=True)
|
pathlib.Path(tmp_keyfile_name).unlink(missing_ok=True)
|
||||||
|
|
|
||||||
|
|
@ -66,8 +66,8 @@ class Connection(ConnectionBase):
|
||||||
def _search_executable(executable):
|
def _search_executable(executable):
|
||||||
try:
|
try:
|
||||||
return get_bin_path(executable)
|
return get_bin_path(executable)
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise AnsibleError(f"{executable} command not found in PATH")
|
raise AnsibleError(f"{executable} command not found in PATH") from e
|
||||||
|
|
||||||
def list_zones(self):
|
def list_zones(self):
|
||||||
process = subprocess.Popen(
|
process = subprocess.Popen(
|
||||||
|
|
@ -160,17 +160,17 @@ class Connection(ConnectionBase):
|
||||||
count = ""
|
count = ""
|
||||||
try:
|
try:
|
||||||
p = self._buffered_exec_command(f"dd of={out_path} bs={BUFSIZE}{count}", stdin=in_file)
|
p = self._buffered_exec_command(f"dd of={out_path} bs={BUFSIZE}{count}", stdin=in_file)
|
||||||
except OSError:
|
except OSError as e:
|
||||||
raise AnsibleError("jail connection requires dd command in the jail")
|
raise AnsibleError("jail connection requires dd command in the jail") from e
|
||||||
try:
|
try:
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
except Exception:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
||||||
except IOError:
|
except IOError as e:
|
||||||
raise AnsibleError(f"file or module does not exist at: {in_path}")
|
raise AnsibleError(f"file or module does not exist at: {in_path}") from e
|
||||||
|
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
"""fetch a file from zone to local"""
|
"""fetch a file from zone to local"""
|
||||||
|
|
@ -180,8 +180,8 @@ class Connection(ConnectionBase):
|
||||||
in_path = shlex_quote(self._prefix_login_path(in_path))
|
in_path = shlex_quote(self._prefix_login_path(in_path))
|
||||||
try:
|
try:
|
||||||
p = self._buffered_exec_command(f"dd if={in_path} bs={BUFSIZE}")
|
p = self._buffered_exec_command(f"dd if={in_path} bs={BUFSIZE}")
|
||||||
except OSError:
|
except OSError as e:
|
||||||
raise AnsibleError("zone connection requires dd command in the zone")
|
raise AnsibleError("zone connection requires dd command in the zone") from e
|
||||||
|
|
||||||
with open(out_path, "wb+") as out_file:
|
with open(out_path, "wb+") as out_file:
|
||||||
try:
|
try:
|
||||||
|
|
@ -189,9 +189,9 @@ class Connection(ConnectionBase):
|
||||||
while chunk:
|
while chunk:
|
||||||
out_file.write(chunk)
|
out_file.write(chunk)
|
||||||
chunk = p.stdout.read(BUFSIZE)
|
chunk = p.stdout.read(BUFSIZE)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
||||||
|
|
|
||||||
|
|
@ -51,7 +51,7 @@ def counter(sequence):
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
raise AnsibleFilterError(
|
raise AnsibleFilterError(
|
||||||
f"community.general.counter needs a sequence with hashable elements (int, float or str) - {e}"
|
f"community.general.counter needs a sequence with hashable elements (int, float or str) - {e}"
|
||||||
)
|
) from e
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -98,7 +98,7 @@ def from_csv(data, dialect="excel", fieldnames=None, delimiter=None, skipinitial
|
||||||
try:
|
try:
|
||||||
dialect = initialize_dialect(dialect, **dialect_params)
|
dialect = initialize_dialect(dialect, **dialect_params)
|
||||||
except (CustomDialectFailureError, DialectNotAvailableError) as e:
|
except (CustomDialectFailureError, DialectNotAvailableError) as e:
|
||||||
raise AnsibleFilterError(str(e))
|
raise AnsibleFilterError(str(e)) from e
|
||||||
|
|
||||||
reader = read_csv(data, dialect, fieldnames)
|
reader = read_csv(data, dialect, fieldnames)
|
||||||
|
|
||||||
|
|
@ -108,7 +108,7 @@ def from_csv(data, dialect="excel", fieldnames=None, delimiter=None, skipinitial
|
||||||
for row in reader:
|
for row in reader:
|
||||||
data_list.append(row)
|
data_list.append(row)
|
||||||
except CSVError as e:
|
except CSVError as e:
|
||||||
raise AnsibleFilterError(f"Unable to process file: {e}")
|
raise AnsibleFilterError(f"Unable to process file: {e}") from e
|
||||||
|
|
||||||
return data_list
|
return data_list
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -79,7 +79,7 @@ def from_ini(obj):
|
||||||
try:
|
try:
|
||||||
parser.read_file(StringIO(obj))
|
parser.read_file(StringIO(obj))
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise AnsibleFilterError(f"from_ini failed to parse given string: {ex}", orig_exc=ex)
|
raise AnsibleFilterError(f"from_ini failed to parse given string: {ex}", orig_exc=ex) from ex
|
||||||
|
|
||||||
return parser.as_dict()
|
return parser.as_dict()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,7 @@ def initialize_hashids(**kwargs):
|
||||||
raise AnsibleFilterError(
|
raise AnsibleFilterError(
|
||||||
"The provided parameters %s are invalid: %s"
|
"The provided parameters %s are invalid: %s"
|
||||||
% (", ".join(["%s=%s" % (k, v) for k, v in params.items()]), to_native(e))
|
% (", ".join(["%s=%s" % (k, v) for k, v in params.items()]), to_native(e))
|
||||||
)
|
) from e
|
||||||
|
|
||||||
|
|
||||||
def hashids_encode(nums, salt=None, alphabet=None, min_length=None):
|
def hashids_encode(nums, salt=None, alphabet=None, min_length=None):
|
||||||
|
|
@ -60,7 +60,7 @@ def hashids_encode(nums, salt=None, alphabet=None, min_length=None):
|
||||||
try:
|
try:
|
||||||
hashid = hashids.encode(*nums)
|
hashid = hashids.encode(*nums)
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
raise AnsibleTypeError(f"Data to encode must by a tuple or list of ints: {e}")
|
raise AnsibleTypeError(f"Data to encode must by a tuple or list of ints: {e}") from e
|
||||||
|
|
||||||
return hashid
|
return hashid
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -149,7 +149,7 @@ def jc_filter(data, parser, quiet=True, raw=False):
|
||||||
return jc_parser.parse(data, quiet=quiet, raw=raw)
|
return jc_parser.parse(data, quiet=quiet, raw=raw)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleFilterError(f"Error in jc filter plugin: {e}")
|
raise AnsibleFilterError(f"Error in jc filter plugin: {e}") from e
|
||||||
|
|
||||||
|
|
||||||
class FilterModule:
|
class FilterModule:
|
||||||
|
|
|
||||||
|
|
@ -141,10 +141,10 @@ def json_query(data, expr):
|
||||||
try:
|
try:
|
||||||
return jmespath.search(expr, data)
|
return jmespath.search(expr, data)
|
||||||
except jmespath.exceptions.JMESPathError as e:
|
except jmespath.exceptions.JMESPathError as e:
|
||||||
raise AnsibleFilterError(f"JMESPathError in json_query filter plugin:\n{e}")
|
raise AnsibleFilterError(f"JMESPathError in json_query filter plugin:\n{e}") from e
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# For older jmespath, we can get ValueError and TypeError without much info.
|
# For older jmespath, we can get ValueError and TypeError without much info.
|
||||||
raise AnsibleFilterError(f"Error in jmespath.search in json_query filter plugin:\n{e}")
|
raise AnsibleFilterError(f"Error in jmespath.search in json_query filter plugin:\n{e}") from e
|
||||||
|
|
||||||
|
|
||||||
class FilterModule:
|
class FilterModule:
|
||||||
|
|
|
||||||
|
|
@ -72,7 +72,7 @@ def to_ini(obj):
|
||||||
try:
|
try:
|
||||||
ini_parser.read_dict(obj)
|
ini_parser.read_dict(obj)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise AnsibleFilterError(f"to_ini failed to parse given dict:{ex}", orig_exc=ex)
|
raise AnsibleFilterError(f"to_ini failed to parse given dict:{ex}", orig_exc=ex) from ex
|
||||||
|
|
||||||
# catching empty dicts
|
# catching empty dicts
|
||||||
if obj == dict():
|
if obj == dict():
|
||||||
|
|
|
||||||
|
|
@ -124,7 +124,9 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||||
# Create groups based on variable values and add the corresponding hosts to it
|
# Create groups based on variable values and add the corresponding hosts to it
|
||||||
self._add_host_to_keyed_groups(self.get_option("keyed_groups"), host_attrs, host, strict=strict)
|
self._add_host_to_keyed_groups(self.get_option("keyed_groups"), host_attrs, host, strict=strict)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleParserError(f"Unable to fetch hosts from GitLab API, this was the original exception: {e}")
|
raise AnsibleParserError(
|
||||||
|
f"Unable to fetch hosts from GitLab API, this was the original exception: {e}"
|
||||||
|
) from e
|
||||||
|
|
||||||
def verify_file(self, path):
|
def verify_file(self, path):
|
||||||
"""Return the possibly of a file being consumable by this plugin."""
|
"""Return the possibly of a file being consumable by this plugin."""
|
||||||
|
|
|
||||||
|
|
@ -166,8 +166,10 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||||
except Exception:
|
except Exception:
|
||||||
error_body = {"status": None}
|
error_body = {"status": None}
|
||||||
if e.code == 404 and error_body.get("status") == "No objects found.":
|
if e.code == 404 and error_body.get("status") == "No objects found.":
|
||||||
raise AnsibleParserError("Host filter returned no data. Please confirm your host_filter value is valid")
|
raise AnsibleParserError(
|
||||||
raise AnsibleParserError(f"Unexpected data returned: {e} -- {error_body}")
|
"Host filter returned no data. Please confirm your host_filter value is valid"
|
||||||
|
) from e
|
||||||
|
raise AnsibleParserError(f"Unexpected data returned: {e} -- {error_body}") from e
|
||||||
|
|
||||||
response_body = response.read()
|
response_body = response.read()
|
||||||
json_data = json.loads(response_body.decode("utf-8"))
|
json_data = json.loads(response_body.decode("utf-8"))
|
||||||
|
|
|
||||||
|
|
@ -160,7 +160,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||||
try:
|
try:
|
||||||
self.instances = self.client.linode.instances()
|
self.instances = self.client.linode.instances()
|
||||||
except LinodeApiError as exception:
|
except LinodeApiError as exception:
|
||||||
raise AnsibleError(f"Linode client raised: {exception}")
|
raise AnsibleError(f"Linode client raised: {exception}") from exception
|
||||||
|
|
||||||
def _add_groups(self):
|
def _add_groups(self):
|
||||||
"""Add Linode instance groups to the dynamic inventory."""
|
"""Add Linode instance groups to the dynamic inventory."""
|
||||||
|
|
|
||||||
|
|
@ -213,7 +213,7 @@ class InventoryModule(BaseInventoryPlugin):
|
||||||
with open(path, "r") as json_file:
|
with open(path, "r") as json_file:
|
||||||
return json.load(json_file)
|
return json.load(json_file)
|
||||||
except (IOError, json.decoder.JSONDecodeError) as err:
|
except (IOError, json.decoder.JSONDecodeError) as err:
|
||||||
raise AnsibleParserError(f"Could not load the test data from {to_native(path)}: {err}")
|
raise AnsibleParserError(f"Could not load the test data from {to_native(path)}: {err}") from err
|
||||||
|
|
||||||
def save_json_data(self, path, file_name=None):
|
def save_json_data(self, path, file_name=None):
|
||||||
"""save data as json
|
"""save data as json
|
||||||
|
|
@ -243,7 +243,7 @@ class InventoryModule(BaseInventoryPlugin):
|
||||||
with open(os.path.abspath(os.path.join(cwd, *path)), "w") as json_file:
|
with open(os.path.abspath(os.path.join(cwd, *path)), "w") as json_file:
|
||||||
json.dump(self.data, json_file)
|
json.dump(self.data, json_file)
|
||||||
except IOError as err:
|
except IOError as err:
|
||||||
raise AnsibleParserError(f"Could not save data: {err}")
|
raise AnsibleParserError(f"Could not save data: {err}") from err
|
||||||
|
|
||||||
def verify_file(self, path):
|
def verify_file(self, path):
|
||||||
"""Check the config
|
"""Check the config
|
||||||
|
|
@ -602,7 +602,7 @@ class InventoryModule(BaseInventoryPlugin):
|
||||||
else:
|
else:
|
||||||
path[instance_name][key] = value
|
path[instance_name][key] = value
|
||||||
except KeyError as err:
|
except KeyError as err:
|
||||||
raise AnsibleParserError(f"Unable to store Information: {err}")
|
raise AnsibleParserError(f"Unable to store Information: {err}") from err
|
||||||
|
|
||||||
def extract_information_from_instance_configs(self):
|
def extract_information_from_instance_configs(self):
|
||||||
"""Process configuration information
|
"""Process configuration information
|
||||||
|
|
@ -853,7 +853,7 @@ class InventoryModule(BaseInventoryPlugin):
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
raise AnsibleParserError(
|
raise AnsibleParserError(
|
||||||
f"Error while parsing network range {self.groupby[group_name].get('attribute')}: {err}"
|
f"Error while parsing network range {self.groupby[group_name].get('attribute')}: {err}"
|
||||||
)
|
) from err
|
||||||
|
|
||||||
for instance_name in self.inventory.hosts:
|
for instance_name in self.inventory.hosts:
|
||||||
if self.data["inventory"][instance_name].get("network_interfaces") is not None:
|
if self.data["inventory"][instance_name].get("network_interfaces") is not None:
|
||||||
|
|
@ -1203,6 +1203,6 @@ class InventoryModule(BaseInventoryPlugin):
|
||||||
self.trust_password = self.get_option("trust_password")
|
self.trust_password = self.get_option("trust_password")
|
||||||
self.url = self.get_option("url")
|
self.url = self.get_option("url")
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
raise AnsibleParserError(f"All correct options required: {err}")
|
raise AnsibleParserError(f"All correct options required: {err}") from err
|
||||||
# Call our internal helper to populate the dynamic inventory
|
# Call our internal helper to populate the dynamic inventory
|
||||||
self._populate()
|
self._populate()
|
||||||
|
|
|
||||||
|
|
@ -180,7 +180,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||||
try:
|
try:
|
||||||
self._nmap = get_bin_path("nmap")
|
self._nmap = get_bin_path("nmap")
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise AnsibleParserError(f"nmap inventory plugin requires the nmap cli tool to work: {e}")
|
raise AnsibleParserError(f"nmap inventory plugin requires the nmap cli tool to work: {e}") from e
|
||||||
|
|
||||||
super().parse(inventory, loader, path, cache=cache)
|
super().parse(inventory, loader, path, cache=cache)
|
||||||
|
|
||||||
|
|
@ -265,7 +265,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||||
try:
|
try:
|
||||||
t_stdout = to_text(stdout, errors="surrogate_or_strict")
|
t_stdout = to_text(stdout, errors="surrogate_or_strict")
|
||||||
except UnicodeError as e:
|
except UnicodeError as e:
|
||||||
raise AnsibleParserError(f"Invalid (non unicode) input returned: {e}")
|
raise AnsibleParserError(f"Invalid (non unicode) input returned: {e}") from e
|
||||||
|
|
||||||
for line in t_stdout.splitlines():
|
for line in t_stdout.splitlines():
|
||||||
hits = self.find_host.match(line)
|
hits = self.find_host.match(line)
|
||||||
|
|
@ -310,7 +310,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||||
results[-1]["ports"] = ports
|
results[-1]["ports"] = ports
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleParserError(f"failed to parse {to_native(path)}: {e} ")
|
raise AnsibleParserError(f"failed to parse {to_native(path)}: {e} ") from e
|
||||||
|
|
||||||
if cache_needs_update:
|
if cache_needs_update:
|
||||||
self._cache[cache_key] = results
|
self._cache[cache_key] = results
|
||||||
|
|
|
||||||
|
|
@ -141,13 +141,13 @@ class InventoryModule(BaseInventoryPlugin):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
raw_data = to_text(response.read(), errors="surrogate_or_strict")
|
raw_data = to_text(response.read(), errors="surrogate_or_strict")
|
||||||
except UnicodeError:
|
except UnicodeError as e:
|
||||||
raise AnsibleError("Incorrect encoding of fetched payload from Online servers")
|
raise AnsibleError("Incorrect encoding of fetched payload from Online servers") from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return json.loads(raw_data)
|
return json.loads(raw_data)
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise AnsibleError("Incorrect JSON payload")
|
raise AnsibleError("Incorrect JSON payload") from e
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def extract_rpn_lookup_cache(rpn_list):
|
def extract_rpn_lookup_cache(rpn_list):
|
||||||
|
|
|
||||||
|
|
@ -122,10 +122,10 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||||
with open(authfile, "r") as fp:
|
with open(authfile, "r") as fp:
|
||||||
authstring = fp.read().rstrip()
|
authstring = fp.read().rstrip()
|
||||||
username, password = authstring.split(":")
|
username, password = authstring.split(":")
|
||||||
except (OSError, IOError):
|
except (OSError, IOError) as e:
|
||||||
raise AnsibleError(f"Could not find or read ONE_AUTH file at '{authfile}'")
|
raise AnsibleError(f"Could not find or read ONE_AUTH file at '{authfile}'") from e
|
||||||
except Exception:
|
except Exception as e:
|
||||||
raise AnsibleError(f"Error occurs when reading ONE_AUTH file at '{authfile}'")
|
raise AnsibleError(f"Error occurs when reading ONE_AUTH file at '{authfile}'") from e
|
||||||
|
|
||||||
auth_params = namedtuple("auth", ("url", "username", "password"))
|
auth_params = namedtuple("auth", ("url", "username", "password"))
|
||||||
|
|
||||||
|
|
@ -167,7 +167,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||||
try:
|
try:
|
||||||
vm_pool = one_client.vmpool.infoextended(-2, -1, -1, 3)
|
vm_pool = one_client.vmpool.infoextended(-2, -1, -1, 3)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"Something happened during XML-RPC call: {e}")
|
raise AnsibleError(f"Something happened during XML-RPC call: {e}") from e
|
||||||
|
|
||||||
return vm_pool
|
return vm_pool
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -143,16 +143,16 @@ def _fetch_information(token, url):
|
||||||
try:
|
try:
|
||||||
response = open_url(paginated_url, headers={"X-Auth-Token": token, "Content-type": "application/json"})
|
response = open_url(paginated_url, headers={"X-Auth-Token": token, "Content-type": "application/json"})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"Error while fetching {url}: {e}")
|
raise AnsibleError(f"Error while fetching {url}: {e}") from e
|
||||||
try:
|
try:
|
||||||
raw_json = json.loads(to_text(response.read()))
|
raw_json = json.loads(to_text(response.read()))
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise AnsibleError("Incorrect JSON payload")
|
raise AnsibleError("Incorrect JSON payload") from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
results.extend(raw_json["servers"])
|
results.extend(raw_json["servers"])
|
||||||
except KeyError:
|
except KeyError as e:
|
||||||
raise AnsibleError("Incorrect format from the Scaleway API response")
|
raise AnsibleError("Incorrect format from the Scaleway API response") from e
|
||||||
|
|
||||||
link = response.headers["Link"]
|
link = response.headers["Link"]
|
||||||
if not link:
|
if not link:
|
||||||
|
|
|
||||||
|
|
@ -312,7 +312,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||||
try:
|
try:
|
||||||
self._vbox_path = get_bin_path(self.VBOX)
|
self._vbox_path = get_bin_path(self.VBOX)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise AnsibleParserError(e)
|
raise AnsibleParserError(e) from e
|
||||||
|
|
||||||
super().parse(inventory, loader, path)
|
super().parse(inventory, loader, path)
|
||||||
|
|
||||||
|
|
@ -354,7 +354,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||||
try:
|
try:
|
||||||
p = Popen(cmd, stdout=PIPE)
|
p = Popen(cmd, stdout=PIPE)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleParserError(str(e))
|
raise AnsibleParserError(str(e)) from e
|
||||||
|
|
||||||
source_data = p.stdout.read().splitlines()
|
source_data = p.stdout.read().splitlines()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -107,6 +107,6 @@ class LookupModule(LookupBase):
|
||||||
with open(path, "rb") as f:
|
with open(path, "rb") as f:
|
||||||
result.append(base64.b64encode(f.read()).decode("utf-8"))
|
result.append(base64.b64encode(f.read()).decode("utf-8"))
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise AnsibleLookupError(f"Error while reading {path}: {exc}")
|
raise AnsibleLookupError(f"Error while reading {path}: {exc}") from exc
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
|
||||||
|
|
@ -77,8 +77,8 @@ class LookupModule(LookupBase):
|
||||||
continue
|
continue
|
||||||
parsed = str(arg_raw)
|
parsed = str(arg_raw)
|
||||||
setattr(self, arg, parsed)
|
setattr(self, arg, parsed)
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise AnsibleError(f"can't parse arg {arg}={arg_raw} as string")
|
raise AnsibleError(f"can't parse arg {arg}={arg_raw} as string") from e
|
||||||
if args:
|
if args:
|
||||||
raise AnsibleError(f"unrecognized arguments to with_sequence: {list(args.keys())!r}")
|
raise AnsibleError(f"unrecognized arguments to with_sequence: {list(args.keys())!r}")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -122,7 +122,7 @@ class LookupModule(LookupBase):
|
||||||
try:
|
try:
|
||||||
data = load_collection_meta(collection_pkg, no_version=no_version)
|
data = load_collection_meta(collection_pkg, no_version=no_version)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise AnsibleLookupError(f"Error while loading metadata for {term}: {exc}")
|
raise AnsibleLookupError(f"Error while loading metadata for {term}: {exc}") from exc
|
||||||
|
|
||||||
result.append(data.get("version", no_version))
|
result.append(data.get("version", no_version))
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -170,7 +170,7 @@ class LookupModule(LookupBase):
|
||||||
else:
|
else:
|
||||||
values.append(to_text(results[1]["Value"]))
|
values.append(to_text(results[1]["Value"]))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"Error locating '{term}' in kv store. Error was {e}")
|
raise AnsibleError(f"Error locating '{term}' in kv store. Error was {e}") from e
|
||||||
|
|
||||||
return values
|
return values
|
||||||
|
|
||||||
|
|
@ -194,6 +194,6 @@ class LookupModule(LookupBase):
|
||||||
raise AnsibleAssertionError(f"{name} not a valid consul lookup parameter")
|
raise AnsibleAssertionError(f"{name} not a valid consul lookup parameter")
|
||||||
paramvals[name] = value
|
paramvals[name] = value
|
||||||
except (ValueError, AssertionError) as e:
|
except (ValueError, AssertionError) as e:
|
||||||
raise AnsibleError(e)
|
raise AnsibleError(e) from e
|
||||||
|
|
||||||
return paramvals
|
return paramvals
|
||||||
|
|
|
||||||
|
|
@ -145,9 +145,9 @@ class LookupModule(LookupBase):
|
||||||
for term in terms:
|
for term in terms:
|
||||||
try:
|
try:
|
||||||
ret.append(credstash.getSecret(term, version, region, table, context=context, **kwargs_pass))
|
ret.append(credstash.getSecret(term, version, region, table, context=context, **kwargs_pass))
|
||||||
except credstash.ItemNotFound:
|
except credstash.ItemNotFound as e:
|
||||||
raise AnsibleError(f"Key {term} not found")
|
raise AnsibleError(f"Key {term} not found") from e
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"Encountered exception while fetching {term}: {e}")
|
raise AnsibleError(f"Encountered exception while fetching {term}: {e}") from e
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
|
||||||
|
|
@ -164,11 +164,11 @@ class CyberarkPassword:
|
||||||
result_dict[output_names[i]] = to_native(output_values[i])
|
result_dict[output_names[i]] = to_native(output_values[i])
|
||||||
|
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
raise AnsibleError(e.output)
|
raise AnsibleError(e.output) from e
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
f"ERROR - AIM not installed or clipasswordsdk not in standard location. ERROR=({e.errno}) => {e.strerror} "
|
f"ERROR - AIM not installed or clipasswordsdk not in standard location. ERROR=({e.errno}) => {e.strerror} "
|
||||||
)
|
) from e
|
||||||
|
|
||||||
return [result_dict]
|
return [result_dict]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -174,7 +174,7 @@ class LookupModule(LookupBase):
|
||||||
try:
|
try:
|
||||||
values = self.__evaluate(expression, templar, variables=vars)
|
values = self.__evaluate(expression, templar, variables=vars)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleLookupError(f'Caught "{e}" while evaluating {key!r} with item == {current!r}')
|
raise AnsibleLookupError(f'Caught "{e}" while evaluating {key!r} with item == {current!r}') from e
|
||||||
|
|
||||||
if isinstance(values, Mapping):
|
if isinstance(values, Mapping):
|
||||||
for idx, val in sorted(values.items()):
|
for idx, val in sorted(values.items()):
|
||||||
|
|
|
||||||
|
|
@ -387,7 +387,7 @@ class LookupModule(LookupBase):
|
||||||
try:
|
try:
|
||||||
rdclass = dns.rdataclass.from_text(self.get_option("class"))
|
rdclass = dns.rdataclass.from_text(self.get_option("class"))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"dns lookup illegal CLASS: {e}")
|
raise AnsibleError(f"dns lookup illegal CLASS: {e}") from e
|
||||||
myres.retry_servfail = self.get_option("retry_servfail")
|
myres.retry_servfail = self.get_option("retry_servfail")
|
||||||
|
|
||||||
for t in terms:
|
for t in terms:
|
||||||
|
|
@ -405,7 +405,7 @@ class LookupModule(LookupBase):
|
||||||
nsaddr = dns.resolver.query(ns)[0].address
|
nsaddr = dns.resolver.query(ns)[0].address
|
||||||
nameservers.append(nsaddr)
|
nameservers.append(nsaddr)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"dns lookup NS: {e}")
|
raise AnsibleError(f"dns lookup NS: {e}") from e
|
||||||
continue
|
continue
|
||||||
if "=" in t:
|
if "=" in t:
|
||||||
try:
|
try:
|
||||||
|
|
@ -421,7 +421,7 @@ class LookupModule(LookupBase):
|
||||||
try:
|
try:
|
||||||
rdclass = dns.rdataclass.from_text(arg)
|
rdclass = dns.rdataclass.from_text(arg)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"dns lookup illegal CLASS: {e}")
|
raise AnsibleError(f"dns lookup illegal CLASS: {e}") from e
|
||||||
elif opt == "retry_servfail":
|
elif opt == "retry_servfail":
|
||||||
myres.retry_servfail = boolean(arg)
|
myres.retry_servfail = boolean(arg)
|
||||||
elif opt == "fail_on_error":
|
elif opt == "fail_on_error":
|
||||||
|
|
@ -458,7 +458,7 @@ class LookupModule(LookupBase):
|
||||||
except dns.exception.SyntaxError:
|
except dns.exception.SyntaxError:
|
||||||
pass
|
pass
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"dns.reversename unhandled exception {e}")
|
raise AnsibleError(f"dns.reversename unhandled exception {e}") from e
|
||||||
domains = reversed_domains
|
domains = reversed_domains
|
||||||
|
|
||||||
if len(domains) > 1:
|
if len(domains) > 1:
|
||||||
|
|
@ -487,20 +487,20 @@ class LookupModule(LookupBase):
|
||||||
ret.append(rd)
|
ret.append(rd)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
if fail_on_error:
|
if fail_on_error:
|
||||||
raise AnsibleError(f"Lookup failed: {err}")
|
raise AnsibleError(f"Lookup failed: {err}") from err
|
||||||
ret.append(str(err))
|
ret.append(str(err))
|
||||||
|
|
||||||
except dns.resolver.NXDOMAIN as err:
|
except dns.resolver.NXDOMAIN as err:
|
||||||
if fail_on_error:
|
if fail_on_error:
|
||||||
raise AnsibleError(f"Lookup failed: {err}")
|
raise AnsibleError(f"Lookup failed: {err}") from err
|
||||||
if not real_empty:
|
if not real_empty:
|
||||||
ret.append("NXDOMAIN")
|
ret.append("NXDOMAIN")
|
||||||
except (dns.resolver.NoAnswer, dns.resolver.Timeout, dns.resolver.NoNameservers) as err:
|
except (dns.resolver.NoAnswer, dns.resolver.Timeout, dns.resolver.NoNameservers) as err:
|
||||||
if fail_on_error:
|
if fail_on_error:
|
||||||
raise AnsibleError(f"Lookup failed: {err}")
|
raise AnsibleError(f"Lookup failed: {err}") from err
|
||||||
if not real_empty:
|
if not real_empty:
|
||||||
ret.append("")
|
ret.append("")
|
||||||
except dns.exception.DNSException as err:
|
except dns.exception.DNSException as err:
|
||||||
raise AnsibleError(f"dns.resolver unhandled exception {err}")
|
raise AnsibleError(f"dns.resolver unhandled exception {err}") from err
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
|
||||||
|
|
@ -105,7 +105,7 @@ class LookupModule(LookupBase):
|
||||||
continue
|
continue
|
||||||
string = ""
|
string = ""
|
||||||
except DNSException as e:
|
except DNSException as e:
|
||||||
raise AnsibleError(f"dns.resolver unhandled exception {e}")
|
raise AnsibleError(f"dns.resolver unhandled exception {e}") from e
|
||||||
|
|
||||||
ret.append("".join(string))
|
ret.append("".join(string))
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -109,8 +109,8 @@ class LookupModule(LookupBase):
|
||||||
try:
|
try:
|
||||||
vault = SecretsVault(**vault_parameters)
|
vault = SecretsVault(**vault_parameters)
|
||||||
return vault
|
return vault
|
||||||
except TypeError:
|
except TypeError as e:
|
||||||
raise AnsibleError("python-dsv-sdk==0.0.1 must be installed to use this plugin")
|
raise AnsibleError("python-dsv-sdk==0.0.1 must be installed to use this plugin") from e
|
||||||
|
|
||||||
def run(self, terms, variables, **kwargs):
|
def run(self, terms, variables, **kwargs):
|
||||||
if sdk_is_missing:
|
if sdk_is_missing:
|
||||||
|
|
@ -140,5 +140,5 @@ class LookupModule(LookupBase):
|
||||||
display.vvv(f"DevOps Secrets Vault GET /secrets/{path}")
|
display.vvv(f"DevOps Secrets Vault GET /secrets/{path}")
|
||||||
result.append(vault.get_secret_json(path))
|
result.append(vault.get_secret_json(path))
|
||||||
except SecretsVaultError as error:
|
except SecretsVaultError as error:
|
||||||
raise AnsibleError(f"DevOps Secrets Vault lookup failure: {error.message}")
|
raise AnsibleError(f"DevOps Secrets Vault lookup failure: {error.message}") from error
|
||||||
return result
|
return result
|
||||||
|
|
|
||||||
|
|
@ -167,7 +167,7 @@ def etcd3_client(client_params):
|
||||||
etcd = etcd3.client(**client_params)
|
etcd = etcd3.client(**client_params)
|
||||||
etcd.status()
|
etcd.status()
|
||||||
except Exception as exp:
|
except Exception as exp:
|
||||||
raise AnsibleLookupError(f"Cannot connect to etcd cluster: {exp}")
|
raise AnsibleLookupError(f"Cannot connect to etcd cluster: {exp}") from exp
|
||||||
return etcd
|
return etcd
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -116,7 +116,7 @@ class PythonJWT:
|
||||||
with open(path, "rb") as pem_file:
|
with open(path, "rb") as pem_file:
|
||||||
return jwk_from_pem(pem_file.read())
|
return jwk_from_pem(pem_file.read())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"Error while parsing key file: {e}")
|
raise AnsibleError(f"Error while parsing key file: {e}") from e
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def encode_jwt(app_id, jwk, exp=600):
|
def encode_jwt(app_id, jwk, exp=600):
|
||||||
|
|
@ -129,7 +129,7 @@ class PythonJWT:
|
||||||
try:
|
try:
|
||||||
return jwt_instance.encode(payload, jwk, alg="RS256")
|
return jwt_instance.encode(payload, jwk, alg="RS256")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"Error while encoding jwt: {e}")
|
raise AnsibleError(f"Error while encoding jwt: {e}") from e
|
||||||
|
|
||||||
|
|
||||||
def read_key(path, private_key=None):
|
def read_key(path, private_key=None):
|
||||||
|
|
@ -143,7 +143,7 @@ def read_key(path, private_key=None):
|
||||||
key_bytes = pem_file.read()
|
key_bytes = pem_file.read()
|
||||||
return serialization.load_pem_private_key(key_bytes, password=None)
|
return serialization.load_pem_private_key(key_bytes, password=None)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"Error while parsing key file: {e}")
|
raise AnsibleError(f"Error while parsing key file: {e}") from e
|
||||||
|
|
||||||
|
|
||||||
def encode_jwt(app_id, private_key_obj, exp=600):
|
def encode_jwt(app_id, private_key_obj, exp=600):
|
||||||
|
|
@ -158,7 +158,7 @@ def encode_jwt(app_id, private_key_obj, exp=600):
|
||||||
try:
|
try:
|
||||||
return jwt.encode(payload, private_key_obj, algorithm="RS256")
|
return jwt.encode(payload, private_key_obj, algorithm="RS256")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"Error while encoding jwt: {e}")
|
raise AnsibleError(f"Error while encoding jwt: {e}") from e
|
||||||
|
|
||||||
|
|
||||||
def post_request(generated_jwt, installation_id, api_base):
|
def post_request(generated_jwt, installation_id, api_base):
|
||||||
|
|
@ -178,15 +178,15 @@ def post_request(generated_jwt, installation_id, api_base):
|
||||||
except Exception:
|
except Exception:
|
||||||
error_body = {}
|
error_body = {}
|
||||||
if e.code == 404:
|
if e.code == 404:
|
||||||
raise AnsibleError("Github return error. Please confirm your installation_id value is valid")
|
raise AnsibleError("Github return error. Please confirm your installation_id value is valid") from e
|
||||||
elif e.code == 401:
|
elif e.code == 401:
|
||||||
raise AnsibleError("Github return error. Please confirm your private key is valid")
|
raise AnsibleError("Github return error. Please confirm your private key is valid") from e
|
||||||
raise AnsibleError(f"Unexpected data returned: {e} -- {error_body}")
|
raise AnsibleError(f"Unexpected data returned: {e} -- {error_body}") from e
|
||||||
response_body = response.read()
|
response_body = response.read()
|
||||||
try:
|
try:
|
||||||
json_data = json.loads(response_body.decode("utf-8"))
|
json_data = json.loads(response_body.decode("utf-8"))
|
||||||
except json.decoder.JSONDecodeError as e:
|
except json.decoder.JSONDecodeError as e:
|
||||||
raise AnsibleError(f"Error while dencoding JSON respone from github: {e}")
|
raise AnsibleError(f"Error while dencoding JSON respone from github: {e}") from e
|
||||||
return json_data.get("token")
|
return json_data.get("token")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -93,7 +93,7 @@ class LookupModule(LookupBase):
|
||||||
try:
|
try:
|
||||||
env = lmdb.open(str(db), readonly=True)
|
env = lmdb.open(str(db), readonly=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError(f"LMDB cannot open database {db}: {e}")
|
raise AnsibleError(f"LMDB cannot open database {db}: {e}") from e
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
if len(terms) == 0:
|
if len(terms) == 0:
|
||||||
|
|
|
||||||
|
|
@ -198,13 +198,13 @@ class OnePassCLIBase(metaclass=abc.ABCMeta):
|
||||||
based on the current version."""
|
based on the current version."""
|
||||||
try:
|
try:
|
||||||
bin_path = get_bin_path(cls.bin)
|
bin_path = get_bin_path(cls.bin)
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise AnsibleLookupError(f"Unable to locate '{cls.bin}' command line tool")
|
raise AnsibleLookupError(f"Unable to locate '{cls.bin}' command line tool") from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
b_out = subprocess.check_output([bin_path, "--version"], stderr=subprocess.PIPE)
|
b_out = subprocess.check_output([bin_path, "--version"], stderr=subprocess.PIPE)
|
||||||
except subprocess.CalledProcessError as cpe:
|
except subprocess.CalledProcessError as cpe:
|
||||||
raise AnsibleLookupError(f"Unable to get the op version: {cpe}")
|
raise AnsibleLookupError(f"Unable to get the op version: {cpe}") from cpe
|
||||||
|
|
||||||
return to_text(b_out).strip()
|
return to_text(b_out).strip()
|
||||||
|
|
||||||
|
|
@ -653,7 +653,7 @@ class OnePass:
|
||||||
self.connect_token,
|
self.connect_token,
|
||||||
)
|
)
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
raise AnsibleLookupError(e)
|
raise AnsibleLookupError(e) from e
|
||||||
|
|
||||||
raise AnsibleLookupError(f"op version {version} is unsupported")
|
raise AnsibleLookupError(f"op version {version} is unsupported")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -309,7 +309,7 @@ class LookupModule(LookupBase):
|
||||||
)
|
)
|
||||||
self.realpass = "pass: the standard unix password manager" in passoutput
|
self.realpass = "pass: the standard unix password manager" in passoutput
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}")
|
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}") from e
|
||||||
|
|
||||||
return self.realpass
|
return self.realpass
|
||||||
|
|
||||||
|
|
@ -329,14 +329,14 @@ class LookupModule(LookupBase):
|
||||||
raise AnsibleAssertionError(f"{name} not in paramvals")
|
raise AnsibleAssertionError(f"{name} not in paramvals")
|
||||||
self.paramvals[name] = value
|
self.paramvals[name] = value
|
||||||
except (ValueError, AssertionError) as e:
|
except (ValueError, AssertionError) as e:
|
||||||
raise AnsibleError(e)
|
raise AnsibleError(e) from e
|
||||||
# check and convert values
|
# check and convert values
|
||||||
try:
|
try:
|
||||||
for key in ["create", "returnall", "overwrite", "backup", "nosymbols"]:
|
for key in ["create", "returnall", "overwrite", "backup", "nosymbols"]:
|
||||||
if not isinstance(self.paramvals[key], bool):
|
if not isinstance(self.paramvals[key], bool):
|
||||||
self.paramvals[key] = boolean(self.paramvals[key])
|
self.paramvals[key] = boolean(self.paramvals[key])
|
||||||
except (ValueError, AssertionError) as e:
|
except (ValueError, AssertionError) as e:
|
||||||
raise AnsibleError(e)
|
raise AnsibleError(e) from e
|
||||||
if self.paramvals["missing"] not in ["error", "warn", "create", "empty"]:
|
if self.paramvals["missing"] not in ["error", "warn", "create", "empty"]:
|
||||||
raise AnsibleError(f"{self.paramvals['missing']} is not a valid option for missing")
|
raise AnsibleError(f"{self.paramvals['missing']} is not a valid option for missing")
|
||||||
if not isinstance(self.paramvals["length"], int):
|
if not isinstance(self.paramvals["length"], int):
|
||||||
|
|
@ -395,7 +395,7 @@ class LookupModule(LookupBase):
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
# 'not in password store' is the expected error if a password wasn't found
|
# 'not in password store' is the expected error if a password wasn't found
|
||||||
if "not in the password store" not in e.output:
|
if "not in the password store" not in e.output:
|
||||||
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}")
|
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}") from e
|
||||||
|
|
||||||
if self.paramvals["missing"] == "error":
|
if self.paramvals["missing"] == "error":
|
||||||
raise AnsibleError(f"passwordstore: passname {self.passname} not found and missing=error is set")
|
raise AnsibleError(f"passwordstore: passname {self.passname} not found and missing=error is set")
|
||||||
|
|
@ -459,7 +459,7 @@ class LookupModule(LookupBase):
|
||||||
try:
|
try:
|
||||||
check_output2([self.pass_cmd, "insert", "-f", "-m", self.passname], input=msg, env=self.env)
|
check_output2([self.pass_cmd, "insert", "-f", "-m", self.passname], input=msg, env=self.env)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}")
|
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}") from e
|
||||||
return newpass
|
return newpass
|
||||||
|
|
||||||
def generate_password(self):
|
def generate_password(self):
|
||||||
|
|
@ -480,7 +480,7 @@ class LookupModule(LookupBase):
|
||||||
try:
|
try:
|
||||||
check_output2([self.pass_cmd, "insert", "-f", "-m", self.passname], input=msg, env=self.env)
|
check_output2([self.pass_cmd, "insert", "-f", "-m", self.passname], input=msg, env=self.env)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}")
|
raise AnsibleError(f"exit code {e.returncode} while running {e.cmd}. Error output: {e.output}") from e
|
||||||
|
|
||||||
return newpass
|
return newpass
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -112,5 +112,5 @@ class LookupModule(LookupBase):
|
||||||
ret.append(to_text(res))
|
ret.append(to_text(res))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# connection failed or key not found
|
# connection failed or key not found
|
||||||
raise AnsibleError(f"Encountered exception while fetching {term}: {e}")
|
raise AnsibleError(f"Encountered exception while fetching {term}: {e}") from e
|
||||||
return ret
|
return ret
|
||||||
|
|
|
||||||
|
|
@ -98,5 +98,5 @@ class LookupModule(LookupBase):
|
||||||
display.vvv(f"Secret Server lookup of Secret with ID {term}")
|
display.vvv(f"Secret Server lookup of Secret with ID {term}")
|
||||||
result.append({term: secret_server.get_pam_secret(term)})
|
result.append({term: secret_server.get_pam_secret(term)})
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
raise AnsibleError(f"Secret Server lookup failure: {error.message}")
|
raise AnsibleError(f"Secret Server lookup failure: {error.message}") from error
|
||||||
return result
|
return result
|
||||||
|
|
|
||||||
|
|
@ -74,7 +74,7 @@ class LookupModule(LookupBase):
|
||||||
|
|
||||||
except (ValueError, AssertionError) as e:
|
except (ValueError, AssertionError) as e:
|
||||||
# In case "file" or "key" are not present
|
# In case "file" or "key" are not present
|
||||||
raise AnsibleError(e)
|
raise AnsibleError(e) from e
|
||||||
|
|
||||||
key = paramvals["key"]
|
key = paramvals["key"]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -377,8 +377,8 @@ class TSSClient(metaclass=abc.ABCMeta): # noqa: B024
|
||||||
file_content = i["itemValue"].content
|
file_content = i["itemValue"].content
|
||||||
with open(os.path.join(file_download_path, f"{obj['id']}_{i['slug']}"), "wb") as f:
|
with open(os.path.join(file_download_path, f"{obj['id']}_{i['slug']}"), "wb") as f:
|
||||||
f.write(file_content)
|
f.write(file_content)
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise AnsibleOptionsError(f"Failed to download {i['slug']}")
|
raise AnsibleOptionsError(f"Failed to download {i['slug']}") from e
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
display.warning(f"Could not read file content for {i['slug']}")
|
display.warning(f"Could not read file content for {i['slug']}")
|
||||||
finally:
|
finally:
|
||||||
|
|
@ -403,15 +403,15 @@ class TSSClient(metaclass=abc.ABCMeta): # noqa: B024
|
||||||
def _term_to_secret_id(term):
|
def _term_to_secret_id(term):
|
||||||
try:
|
try:
|
||||||
return int(term)
|
return int(term)
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise AnsibleOptionsError("Secret ID must be an integer")
|
raise AnsibleOptionsError("Secret ID must be an integer") from e
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _term_to_folder_id(term):
|
def _term_to_folder_id(term):
|
||||||
try:
|
try:
|
||||||
return int(term)
|
return int(term)
|
||||||
except ValueError:
|
except ValueError as e:
|
||||||
raise AnsibleOptionsError("Folder ID must be an integer")
|
raise AnsibleOptionsError("Folder ID must be an integer") from e
|
||||||
|
|
||||||
|
|
||||||
class TSSClientV0(TSSClient):
|
class TSSClientV0(TSSClient):
|
||||||
|
|
@ -493,4 +493,4 @@ class LookupModule(LookupBase):
|
||||||
for term in terms
|
for term in terms
|
||||||
]
|
]
|
||||||
except SecretServerError as error:
|
except SecretServerError as error:
|
||||||
raise AnsibleError(f"Secret Server lookup failure: {error.message}")
|
raise AnsibleError(f"Secret Server lookup failure: {error.message}") from error
|
||||||
|
|
|
||||||
|
|
@ -78,9 +78,9 @@ def _keys_filter_target_str(target, matching_parameter):
|
||||||
r = target[0]
|
r = target[0]
|
||||||
try:
|
try:
|
||||||
tt = re.compile(r)
|
tt = re.compile(r)
|
||||||
except re.error:
|
except re.error as e:
|
||||||
msg = "The target must be a valid regex if matching_parameter=regex. target is %s"
|
msg = "The target must be a valid regex if matching_parameter=regex. target is %s"
|
||||||
raise AnsibleFilterError(msg % r)
|
raise AnsibleFilterError(msg % r) from e
|
||||||
elif isinstance(target, str):
|
elif isinstance(target, str):
|
||||||
tt = (target,)
|
tt = (target,)
|
||||||
else:
|
else:
|
||||||
|
|
@ -129,12 +129,12 @@ def _keys_filter_target_dict(target, matching_parameter):
|
||||||
try:
|
try:
|
||||||
tr = map(re.compile, before)
|
tr = map(re.compile, before)
|
||||||
tz = list(zip(tr, after))
|
tz = list(zip(tr, after))
|
||||||
except re.error:
|
except re.error as e:
|
||||||
msg = (
|
msg = (
|
||||||
"The attributes before must be valid regex if matching_parameter=regex."
|
"The attributes before must be valid regex if matching_parameter=regex."
|
||||||
" Not all items are valid regex in: %s"
|
" Not all items are valid regex in: %s"
|
||||||
)
|
)
|
||||||
raise AnsibleFilterError(msg % before)
|
raise AnsibleFilterError(msg % before) from e
|
||||||
else:
|
else:
|
||||||
tz = list(zip(before, after))
|
tz = list(zip(before, after))
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -151,8 +151,8 @@ class ModuleTestCase:
|
||||||
for mock_name, mock_spec in self.mock_specs.items():
|
for mock_name, mock_spec in self.mock_specs.items():
|
||||||
try:
|
try:
|
||||||
mock_class = mocks_map[mock_name]
|
mock_class = mocks_map[mock_name]
|
||||||
except KeyError:
|
except KeyError as e:
|
||||||
raise Exception(f"Cannot find TestCaseMock class for: {mock_name}")
|
raise Exception(f"Cannot find TestCaseMock class for: {mock_name}") from e
|
||||||
self.mocks[mock_name] = mock_class.build_mock(mock_spec)
|
self.mocks[mock_name] = mock_class.build_mock(mock_spec)
|
||||||
|
|
||||||
self._fixtures.update(self.mocks[mock_name].fixtures())
|
self._fixtures.update(self.mocks[mock_name].fixtures())
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue