mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-02-04 07:51:50 +00:00
batch 3 - update Python idiom to 3.7 using pyupgrade (#11343)
* batch 3 - update Python idiom to 3.7 using pyupgrade * add changelog frag * bring back sanity * adjust test * Apply suggestions from code review
This commit is contained in:
parent
543329cecb
commit
e8f2b135ba
26 changed files with 74 additions and 51 deletions
25
changelogs/fragments/11343-pyupgrade-3.yml
Normal file
25
changelogs/fragments/11343-pyupgrade-3.yml
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
minor_changes:
|
||||||
|
- android_sdk - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- archive - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- bitbucket_pipeline_known_host - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- copr - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- cronvar - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- crypttab - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- elasticsearch_plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- gitlab_group - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- gitlab_issue - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- gitlab_merge_request - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- gitlab_project - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- gunicorn - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- htpasswd - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- imc_rest - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- ini_file - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- interfaces_file - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- iptables_state - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- jenkins_credential - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- jenkins_plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- kdeconfig - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- layman - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- listen_ports_facts - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- locale_gen - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
- lvm_pv - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11343).
|
||||||
|
|
@ -158,7 +158,7 @@ class AndroidSdk(StateModuleHelper):
|
||||||
arg_pkgs = set(self.vars.package)
|
arg_pkgs = set(self.vars.package)
|
||||||
if len(arg_pkgs) < len(self.vars.package):
|
if len(arg_pkgs) < len(self.vars.package):
|
||||||
self.do_raise("Packages may not repeat")
|
self.do_raise("Packages may not repeat")
|
||||||
return set(Package(p) for p in arg_pkgs)
|
return {Package(p) for p in arg_pkgs}
|
||||||
|
|
||||||
def state_present(self):
|
def state_present(self):
|
||||||
packages = self._parse_packages()
|
packages = self._parse_packages()
|
||||||
|
|
|
||||||
|
|
@ -321,7 +321,7 @@ class Archive(metaclass=abc.ABCMeta):
|
||||||
f_out.close()
|
f_out.close()
|
||||||
self.successes.append(path)
|
self.successes.append(path)
|
||||||
self.destination_state = STATE_COMPRESSED
|
self.destination_state = STATE_COMPRESSED
|
||||||
except (IOError, OSError) as e:
|
except OSError as e:
|
||||||
self.module.fail_json(
|
self.module.fail_json(
|
||||||
path=_to_native(path),
|
path=_to_native(path),
|
||||||
dest=_to_native(self.destination),
|
dest=_to_native(self.destination),
|
||||||
|
|
@ -506,7 +506,7 @@ class ZipArchive(Archive):
|
||||||
def _get_checksums(self, path):
|
def _get_checksums(self, path):
|
||||||
try:
|
try:
|
||||||
archive = zipfile.ZipFile(_to_native_ascii(path), "r")
|
archive = zipfile.ZipFile(_to_native_ascii(path), "r")
|
||||||
checksums = set((info.filename, info.CRC) for info in archive.infolist())
|
checksums = {(info.filename, info.CRC) for info in archive.infolist()}
|
||||||
archive.close()
|
archive.close()
|
||||||
except BadZipFile:
|
except BadZipFile:
|
||||||
checksums = set()
|
checksums = set()
|
||||||
|
|
@ -558,11 +558,11 @@ class TarArchive(Archive):
|
||||||
if self.format == "xz":
|
if self.format == "xz":
|
||||||
with lzma.open(_to_native_ascii(path), "r") as f:
|
with lzma.open(_to_native_ascii(path), "r") as f:
|
||||||
archive = tarfile.open(fileobj=f)
|
archive = tarfile.open(fileobj=f)
|
||||||
checksums = set((info.name, info.chksum) for info in archive.getmembers())
|
checksums = {(info.name, info.chksum) for info in archive.getmembers()}
|
||||||
archive.close()
|
archive.close()
|
||||||
else:
|
else:
|
||||||
archive = tarfile.open(_to_native_ascii(path), f"r|{self.format}")
|
archive = tarfile.open(_to_native_ascii(path), f"r|{self.format}")
|
||||||
checksums = set((info.name, info.chksum) for info in archive.getmembers())
|
checksums = {(info.name, info.chksum) for info in archive.getmembers()}
|
||||||
archive.close()
|
archive.close()
|
||||||
except (LZMAError, tarfile.ReadError, tarfile.CompressionError):
|
except (LZMAError, tarfile.ReadError, tarfile.CompressionError):
|
||||||
try:
|
try:
|
||||||
|
|
@ -575,7 +575,7 @@ class TarArchive(Archive):
|
||||||
if not chunk:
|
if not chunk:
|
||||||
break
|
break
|
||||||
checksum = crc32(chunk, checksum)
|
checksum = crc32(chunk, checksum)
|
||||||
checksums = set([(b"", checksum)])
|
checksums = {(b"", checksum)}
|
||||||
f.close()
|
f.close()
|
||||||
except Exception:
|
except Exception:
|
||||||
checksums = set()
|
checksums = set()
|
||||||
|
|
|
||||||
|
|
@ -181,7 +181,7 @@ def get_host_key(module, hostname):
|
||||||
try:
|
try:
|
||||||
sock = socket.socket()
|
sock = socket.socket()
|
||||||
sock.connect((hostname, 22))
|
sock.connect((hostname, 22))
|
||||||
except socket.error:
|
except OSError:
|
||||||
module.fail_json(msg=f"Error opening socket to {hostname}")
|
module.fail_json(msg=f"Error opening socket to {hostname}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
||||||
|
|
@ -348,7 +348,7 @@ class CoprModule:
|
||||||
repo = self._get_copr_repo()
|
repo = self._get_copr_repo()
|
||||||
for repo_id in repo.cfg.sections():
|
for repo_id in repo.cfg.sections():
|
||||||
repo_content_api = self._download_repo_info()
|
repo_content_api = self._download_repo_info()
|
||||||
with open(repo_filename_path, "r") as file:
|
with open(repo_filename_path) as file:
|
||||||
repo_content_file = file.read()
|
repo_content_file = file.read()
|
||||||
if repo_content_file != repo_content_api:
|
if repo_content_file != repo_content_api:
|
||||||
if not self.resolve_differences(repo_content_file, repo_content_api, repo_filename_path):
|
if not self.resolve_differences(repo_content_file, repo_content_api, repo_filename_path):
|
||||||
|
|
@ -449,7 +449,7 @@ class CoprModule:
|
||||||
"""
|
"""
|
||||||
if not os.path.isfile(repo_filename_path):
|
if not os.path.isfile(repo_filename_path):
|
||||||
return False
|
return False
|
||||||
with open(repo_filename_path, "r") as file:
|
with open(repo_filename_path) as file:
|
||||||
repo_content_file = file.read()
|
repo_content_file = file.read()
|
||||||
return repo_content_file == repo_content_api
|
return repo_content_file == repo_content_api
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -155,9 +155,9 @@ class CronVar:
|
||||||
if self.cron_file:
|
if self.cron_file:
|
||||||
# read the cronfile
|
# read the cronfile
|
||||||
try:
|
try:
|
||||||
with open(self.cron_file, "r") as f:
|
with open(self.cron_file) as f:
|
||||||
self.lines = f.read().splitlines()
|
self.lines = f.read().splitlines()
|
||||||
except IOError:
|
except OSError:
|
||||||
# cron file does not exist
|
# cron file does not exist
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
|
||||||
|
|
@ -171,7 +171,7 @@ class Crypttab:
|
||||||
os.makedirs(os.path.dirname(path))
|
os.makedirs(os.path.dirname(path))
|
||||||
open(path, "a").close()
|
open(path, "a").close()
|
||||||
|
|
||||||
with open(path, "r") as f:
|
with open(path) as f:
|
||||||
for line in f.readlines():
|
for line in f.readlines():
|
||||||
self._lines.append(Line(line))
|
self._lines.append(Line(line))
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -232,8 +232,8 @@ def get_plugin_bin(module, plugin_bin=None):
|
||||||
|
|
||||||
# Get separate lists of dirs and binary names from the full paths to the
|
# Get separate lists of dirs and binary names from the full paths to the
|
||||||
# plugin binaries.
|
# plugin binaries.
|
||||||
plugin_dirs = list(set(os.path.dirname(x) for x in bin_paths))
|
plugin_dirs = list({os.path.dirname(x) for x in bin_paths})
|
||||||
plugin_bins = list(set(os.path.basename(x) for x in bin_paths))
|
plugin_bins = list({os.path.basename(x) for x in bin_paths})
|
||||||
|
|
||||||
# Check for the binary names in the default system paths as well as the path
|
# Check for the binary names in the default system paths as well as the path
|
||||||
# specified in the module arguments.
|
# specified in the module arguments.
|
||||||
|
|
|
||||||
|
|
@ -317,7 +317,7 @@ class GitLabGroup:
|
||||||
if options["avatar_path"]:
|
if options["avatar_path"]:
|
||||||
try:
|
try:
|
||||||
group.avatar = open(options["avatar_path"], "rb")
|
group.avatar = open(options["avatar_path"], "rb")
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
self._module.fail_json(msg=f"Cannot open {options['avatar_path']}: {e}")
|
self._module.fail_json(msg=f"Cannot open {options['avatar_path']}: {e}")
|
||||||
changed = True
|
changed = True
|
||||||
else:
|
else:
|
||||||
|
|
|
||||||
|
|
@ -358,7 +358,7 @@ def main():
|
||||||
try:
|
try:
|
||||||
with open(description_path, "rb") as f:
|
with open(description_path, "rb") as f:
|
||||||
description = to_text(f.read(), errors="surrogate_or_strict")
|
description = to_text(f.read(), errors="surrogate_or_strict")
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
module.fail_json(msg=f"Cannot open {description_path}: {e}")
|
module.fail_json(msg=f"Cannot open {description_path}: {e}")
|
||||||
|
|
||||||
# sorting necessary in order to properly detect changes, as we don't want to get false positive
|
# sorting necessary in order to properly detect changes, as we don't want to get false positive
|
||||||
|
|
|
||||||
|
|
@ -372,7 +372,7 @@ def main():
|
||||||
try:
|
try:
|
||||||
with open(description_path, "rb") as f:
|
with open(description_path, "rb") as f:
|
||||||
description = to_text(f.read(), errors="surrogate_or_strict")
|
description = to_text(f.read(), errors="surrogate_or_strict")
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
module.fail_json(msg=f"Cannot open {description_path}: {e}")
|
module.fail_json(msg=f"Cannot open {description_path}: {e}")
|
||||||
|
|
||||||
# sorting necessary in order to properly detect changes, as we don't want to get false positive
|
# sorting necessary in order to properly detect changes, as we don't want to get false positive
|
||||||
|
|
|
||||||
|
|
@ -500,7 +500,7 @@ class GitLabProject:
|
||||||
if options["avatar_path"]:
|
if options["avatar_path"]:
|
||||||
try:
|
try:
|
||||||
project.avatar = open(options["avatar_path"], "rb")
|
project.avatar = open(options["avatar_path"], "rb")
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
self._module.fail_json(msg=f"Cannot open {options['avatar_path']}: {e}")
|
self._module.fail_json(msg=f"Cannot open {options['avatar_path']}: {e}")
|
||||||
|
|
||||||
changed = True
|
changed = True
|
||||||
|
|
|
||||||
|
|
@ -109,7 +109,7 @@ from ansible.module_utils.basic import AnsibleModule
|
||||||
def search_existing_config(config, option):
|
def search_existing_config(config, option):
|
||||||
"""search in config file for specified option"""
|
"""search in config file for specified option"""
|
||||||
if config and os.path.isfile(config):
|
if config and os.path.isfile(config):
|
||||||
with open(config, "r") as f:
|
with open(config) as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
if option in line:
|
if option in line:
|
||||||
return line
|
return line
|
||||||
|
|
@ -196,7 +196,7 @@ def main():
|
||||||
# wait for gunicorn to dump to log
|
# wait for gunicorn to dump to log
|
||||||
time.sleep(0.5)
|
time.sleep(0.5)
|
||||||
if os.path.isfile(pid):
|
if os.path.isfile(pid):
|
||||||
with open(pid, "r") as f:
|
with open(pid) as f:
|
||||||
result = f.readline().strip()
|
result = f.readline().strip()
|
||||||
|
|
||||||
if not params["pid"]:
|
if not params["pid"]:
|
||||||
|
|
@ -209,7 +209,7 @@ def main():
|
||||||
error = f"Please check your {error_log.strip()}"
|
error = f"Please check your {error_log.strip()}"
|
||||||
else:
|
else:
|
||||||
if os.path.isfile(tmp_error_log):
|
if os.path.isfile(tmp_error_log):
|
||||||
with open(tmp_error_log, "r") as f:
|
with open(tmp_error_log) as f:
|
||||||
error = f.read()
|
error = f.read()
|
||||||
# delete tmp log
|
# delete tmp log
|
||||||
os.remove(tmp_error_log)
|
os.remove(tmp_error_log)
|
||||||
|
|
|
||||||
|
|
@ -204,7 +204,7 @@ def main():
|
||||||
# TODO double check if this hack below is still needed.
|
# TODO double check if this hack below is still needed.
|
||||||
# Check file for blank lines in effort to avoid "need more than 1 value to unpack" error.
|
# Check file for blank lines in effort to avoid "need more than 1 value to unpack" error.
|
||||||
try:
|
try:
|
||||||
with open(path, "r") as f:
|
with open(path) as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
|
||||||
# If the file gets edited, it returns true, so only edit the file if it has blank lines
|
# If the file gets edited, it returns true, so only edit the file if it has blank lines
|
||||||
|
|
@ -222,7 +222,7 @@ def main():
|
||||||
with open(path, "w") as f:
|
with open(path, "w") as f:
|
||||||
f.writelines(line for line in lines if line.strip())
|
f.writelines(line for line in lines if line.strip())
|
||||||
|
|
||||||
except IOError:
|
except OSError:
|
||||||
# No preexisting file to remove blank lines from
|
# No preexisting file to remove blank lines from
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -398,7 +398,7 @@ def main():
|
||||||
if content:
|
if content:
|
||||||
rawdata = content.replace("\n", "")
|
rawdata = content.replace("\n", "")
|
||||||
elif file_exists:
|
elif file_exists:
|
||||||
with open(path, "r") as config_object:
|
with open(path) as config_object:
|
||||||
rawdata = config_object.read().replace("\n", "")
|
rawdata = config_object.read().replace("\n", "")
|
||||||
|
|
||||||
# Wrap the XML documents in a <root> element
|
# Wrap the XML documents in a <root> element
|
||||||
|
|
|
||||||
|
|
@ -349,7 +349,7 @@ def do_ini(
|
||||||
os.makedirs(destpath)
|
os.makedirs(destpath)
|
||||||
ini_lines = []
|
ini_lines = []
|
||||||
else:
|
else:
|
||||||
with open(target_filename, "r", encoding="utf-8-sig") as ini_file:
|
with open(target_filename, encoding="utf-8-sig") as ini_file:
|
||||||
ini_lines = [to_text(line) for line in ini_file.readlines()]
|
ini_lines = [to_text(line) for line in ini_file.readlines()]
|
||||||
|
|
||||||
if module._diff:
|
if module._diff:
|
||||||
|
|
@ -579,12 +579,12 @@ def do_ini(
|
||||||
f = os.fdopen(tmpfd, "wb")
|
f = os.fdopen(tmpfd, "wb")
|
||||||
f.writelines(encoded_ini_lines)
|
f.writelines(encoded_ini_lines)
|
||||||
f.close()
|
f.close()
|
||||||
except IOError:
|
except OSError:
|
||||||
module.fail_json(msg="Unable to create temporary file %s", traceback=traceback.format_exc())
|
module.fail_json(msg="Unable to create temporary file %s", traceback=traceback.format_exc())
|
||||||
|
|
||||||
try:
|
try:
|
||||||
module.atomic_move(tmpfile, os.path.abspath(target_filename))
|
module.atomic_move(tmpfile, os.path.abspath(target_filename))
|
||||||
except IOError:
|
except OSError:
|
||||||
module.ansible.fail_json(
|
module.ansible.fail_json(
|
||||||
msg=f"Unable to move temporary file {tmpfile} to {target_filename}, IOError",
|
msg=f"Unable to move temporary file {tmpfile} to {target_filename}, IOError",
|
||||||
traceback=traceback.format_exc(),
|
traceback=traceback.format_exc(),
|
||||||
|
|
|
||||||
|
|
@ -179,7 +179,7 @@ def getValueFromLine(s):
|
||||||
|
|
||||||
|
|
||||||
def read_interfaces_file(module, filename):
|
def read_interfaces_file(module, filename):
|
||||||
with open(filename, "r") as f:
|
with open(filename) as f:
|
||||||
return read_interfaces_lines(module, f)
|
return read_interfaces_lines(module, f)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -252,7 +252,7 @@ def read_state(b_path):
|
||||||
"""
|
"""
|
||||||
Read a file and store its content in a variable as a list.
|
Read a file and store its content in a variable as a list.
|
||||||
"""
|
"""
|
||||||
with open(b_path, "r") as f:
|
with open(b_path) as f:
|
||||||
text = f.read()
|
text = f.read()
|
||||||
return [t for t in text.splitlines() if t != ""]
|
return [t for t in text.splitlines() if t != ""]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -465,7 +465,7 @@ def delete_target(module, headers):
|
||||||
# Function to read the private key for types texts and ssh_key
|
# Function to read the private key for types texts and ssh_key
|
||||||
def read_privateKey(module):
|
def read_privateKey(module):
|
||||||
try:
|
try:
|
||||||
with open(module.params["private_key_path"], "r") as f:
|
with open(module.params["private_key_path"]) as f:
|
||||||
private_key = f.read().strip()
|
private_key = f.read().strip()
|
||||||
return private_key
|
return private_key
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
@ -738,9 +738,9 @@ def run_module():
|
||||||
|
|
||||||
elif ext.lower() in [".pem", ".crt"]: # PEM mode
|
elif ext.lower() in [".pem", ".crt"]: # PEM mode
|
||||||
try:
|
try:
|
||||||
with open(filePath, "r") as f:
|
with open(filePath) as f:
|
||||||
cert_chain = f.read()
|
cert_chain = f.read()
|
||||||
with open(private_key_path, "r") as f:
|
with open(private_key_path) as f:
|
||||||
private_key = f.read()
|
private_key = f.read()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
module.fail_json(msg=f"Failed to read PEM files: {e}")
|
module.fail_json(msg=f"Failed to read PEM files: {e}")
|
||||||
|
|
|
||||||
|
|
@ -688,7 +688,7 @@ class JenkinsPlugin:
|
||||||
with open(cache_path, "w") as f:
|
with open(cache_path, "w") as f:
|
||||||
json.dump(plugin_data, f)
|
json.dump(plugin_data, f)
|
||||||
|
|
||||||
with open(cache_path, "r") as f:
|
with open(cache_path) as f:
|
||||||
plugin_data = json.load(f)
|
plugin_data = json.load(f)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
@ -759,7 +759,7 @@ class JenkinsPlugin:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.close(tmp_update_fd)
|
os.close(tmp_update_fd)
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
self.module.fail_json(msg=f"Cannot close the tmp updates file {tmp_updates_file}.", details=f"{e}")
|
self.module.fail_json(msg=f"Cannot close the tmp updates file {tmp_updates_file}.", details=f"{e}")
|
||||||
else:
|
else:
|
||||||
tmp_updates_file = updates_file
|
tmp_updates_file = updates_file
|
||||||
|
|
@ -771,7 +771,7 @@ class JenkinsPlugin:
|
||||||
# Read only the second line
|
# Read only the second line
|
||||||
dummy = f.readline()
|
dummy = f.readline()
|
||||||
data = json.loads(f.readline())
|
data = json.loads(f.readline())
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
self.module.fail_json(
|
self.module.fail_json(
|
||||||
msg=f"Cannot open{' temporary' if tmp_updates_file != updates_file else ''} updates file.",
|
msg=f"Cannot open{' temporary' if tmp_updates_file != updates_file else ''} updates file.",
|
||||||
details=f"{e}",
|
details=f"{e}",
|
||||||
|
|
@ -808,7 +808,7 @@ class JenkinsPlugin:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.close(tmp_f_fd)
|
os.close(tmp_f_fd)
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
self.module.fail_json(msg=f"Cannot close the temporal plugin file {tmp_f}.", details=f"{e}")
|
self.module.fail_json(msg=f"Cannot close the temporal plugin file {tmp_f}.", details=f"{e}")
|
||||||
|
|
||||||
# Move the file onto the right place
|
# Move the file onto the right place
|
||||||
|
|
|
||||||
|
|
@ -160,7 +160,7 @@ def run_module(module, tmpdir, kwriteconfig):
|
||||||
try:
|
try:
|
||||||
with open(b_path, "rb") as src:
|
with open(b_path, "rb") as src:
|
||||||
b_data = src.read()
|
b_data = src.read()
|
||||||
except IOError:
|
except OSError:
|
||||||
result["changed"] = True
|
result["changed"] = True
|
||||||
else:
|
else:
|
||||||
dst.write(b_data)
|
dst.write(b_data)
|
||||||
|
|
@ -168,7 +168,7 @@ def run_module(module, tmpdir, kwriteconfig):
|
||||||
diff["before"] = to_text(b_data)
|
diff["before"] = to_text(b_data)
|
||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
diff["before"] = repr(b_data)
|
diff["before"] = repr(b_data)
|
||||||
except IOError:
|
except OSError:
|
||||||
module.fail_json(msg="Unable to create temporary file", traceback=traceback.format_exc())
|
module.fail_json(msg="Unable to create temporary file", traceback=traceback.format_exc())
|
||||||
|
|
||||||
for row in module.params["values"]:
|
for row in module.params["values"]:
|
||||||
|
|
@ -211,7 +211,7 @@ def run_module(module, tmpdir, kwriteconfig):
|
||||||
result["backup_file"] = module.backup_local(result["path"])
|
result["backup_file"] = module.backup_local(result["path"])
|
||||||
try:
|
try:
|
||||||
module.atomic_move(b_tmpfile, os.path.abspath(b_path))
|
module.atomic_move(b_tmpfile, os.path.abspath(b_path))
|
||||||
except IOError:
|
except OSError:
|
||||||
module.ansible.fail_json(
|
module.ansible.fail_json(
|
||||||
msg=f"Unable to move temporary file {tmpfile} to {result['path']}, IOError",
|
msg=f"Unable to move temporary file {tmpfile} to {result['path']}, IOError",
|
||||||
traceback=traceback.format_exc(),
|
traceback=traceback.format_exc(),
|
||||||
|
|
|
||||||
|
|
@ -135,7 +135,7 @@ def download_url(module, url, dest):
|
||||||
try:
|
try:
|
||||||
with open(dest, "w") as f:
|
with open(dest, "w") as f:
|
||||||
shutil.copyfileobj(response, f)
|
shutil.copyfileobj(response, f)
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
raise ModuleError(f"Failed to write: {e}") from e
|
raise ModuleError(f"Failed to write: {e}") from e
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -282,7 +282,7 @@ def ss_parse(raw):
|
||||||
|
|
||||||
if len(lines) == 0 or not lines[0].startswith("Netid "):
|
if len(lines) == 0 or not lines[0].startswith("Netid "):
|
||||||
# unexpected stdout from ss
|
# unexpected stdout from ss
|
||||||
raise EnvironmentError(f"Unknown stdout format of `ss`: {raw}")
|
raise OSError(f"Unknown stdout format of `ss`: {raw}")
|
||||||
|
|
||||||
# skip headers (-H arg is not present on e.g. Ubuntu 16)
|
# skip headers (-H arg is not present on e.g. Ubuntu 16)
|
||||||
lines = lines[1:]
|
lines = lines[1:]
|
||||||
|
|
@ -298,7 +298,7 @@ def ss_parse(raw):
|
||||||
protocol, state, recv_q, send_q, local_addr_port, peer_addr_port, process = cells
|
protocol, state, recv_q, send_q, local_addr_port, peer_addr_port, process = cells
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
# unexpected stdout from ss
|
# unexpected stdout from ss
|
||||||
raise EnvironmentError(
|
raise OSError(
|
||||||
'Expected `ss` table layout "Netid, State, Recv-Q, Send-Q, Local Address:Port, Peer Address:Port" and'
|
'Expected `ss` table layout "Netid, State, Recv-Q, Send-Q, Local Address:Port, Peer Address:Port" and'
|
||||||
f'optionally "Process", but got something else: {line}'
|
f'optionally "Process", but got something else: {line}'
|
||||||
) from e
|
) from e
|
||||||
|
|
@ -394,9 +394,7 @@ def main():
|
||||||
break
|
break
|
||||||
|
|
||||||
if bin_path is None:
|
if bin_path is None:
|
||||||
raise EnvironmentError(
|
raise OSError(f"Unable to find any of the supported commands in PATH: {', '.join(sorted(commands_map))}")
|
||||||
f"Unable to find any of the supported commands in PATH: {', '.join(sorted(commands_map))}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# which ports are listening for connections?
|
# which ports are listening for connections?
|
||||||
args = commands_map[command]["args"]
|
args = commands_map[command]["args"]
|
||||||
|
|
@ -416,7 +414,7 @@ def main():
|
||||||
result["ansible_facts"]["tcp_listen"].append(connection)
|
result["ansible_facts"]["tcp_listen"].append(connection)
|
||||||
elif connection["protocol"].startswith("udp"):
|
elif connection["protocol"].startswith("udp"):
|
||||||
result["ansible_facts"]["udp_listen"].append(connection)
|
result["ansible_facts"]["udp_listen"].append(connection)
|
||||||
except (KeyError, EnvironmentError) as e:
|
except (KeyError, OSError) as e:
|
||||||
module.fail_json(msg=f"{e}")
|
module.fail_json(msg=f"{e}")
|
||||||
|
|
||||||
module.exit_json(**result)
|
module.exit_json(**result)
|
||||||
|
|
|
||||||
|
|
@ -164,7 +164,7 @@ class LocaleGen(StateModuleHelper):
|
||||||
available_locale_entry_re_matches = []
|
available_locale_entry_re_matches = []
|
||||||
for locale_path in self.mechanisms[self.vars.mechanism]["available"]:
|
for locale_path in self.mechanisms[self.vars.mechanism]["available"]:
|
||||||
if os.path.exists(locale_path):
|
if os.path.exists(locale_path):
|
||||||
with open(locale_path, "r") as fd:
|
with open(locale_path) as fd:
|
||||||
self.vars.available_lines.extend(fd.readlines())
|
self.vars.available_lines.extend(fd.readlines())
|
||||||
|
|
||||||
re_locale_entry = re.compile(r"^\s*#?\s*(?P<locale>\S+[\._\S]+) (?P<charset>\S+)\s*$")
|
re_locale_entry = re.compile(r"^\s*#?\s*(?P<locale>\S+[\._\S]+) (?P<charset>\S+)\s*$")
|
||||||
|
|
@ -210,7 +210,7 @@ class LocaleGen(StateModuleHelper):
|
||||||
|
|
||||||
def set_locale_glibc(self, names, enabled=True):
|
def set_locale_glibc(self, names, enabled=True):
|
||||||
"""Sets the state of the locale. Defaults to enabled."""
|
"""Sets the state of the locale. Defaults to enabled."""
|
||||||
with open(ETC_LOCALE_GEN, "r") as fr:
|
with open(ETC_LOCALE_GEN) as fr:
|
||||||
lines = fr.readlines()
|
lines = fr.readlines()
|
||||||
|
|
||||||
locale_regexes = []
|
locale_regexes = []
|
||||||
|
|
@ -265,7 +265,7 @@ class LocaleGen(StateModuleHelper):
|
||||||
ctx.run()
|
ctx.run()
|
||||||
else:
|
else:
|
||||||
# Delete locale involves discarding the locale from /var/lib/locales/supported.d/local and regenerating all locales.
|
# Delete locale involves discarding the locale from /var/lib/locales/supported.d/local and regenerating all locales.
|
||||||
with open(VAR_LIB_LOCALES_LOCAL, "r") as fr:
|
with open(VAR_LIB_LOCALES_LOCAL) as fr:
|
||||||
content = fr.readlines()
|
content = fr.readlines()
|
||||||
with open(VAR_LIB_LOCALES_LOCAL, "w") as fw:
|
with open(VAR_LIB_LOCALES_LOCAL, "w") as fw:
|
||||||
for line in content:
|
for line in content:
|
||||||
|
|
|
||||||
|
|
@ -109,7 +109,7 @@ def rescan_device(module, device):
|
||||||
with open(rescan_path, "w") as f:
|
with open(rescan_path, "w") as f:
|
||||||
f.write("1")
|
f.write("1")
|
||||||
return True
|
return True
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
module.warn(f"Failed to rescan device {device}: {e!s}")
|
module.warn(f"Failed to rescan device {device}: {e!s}")
|
||||||
else:
|
else:
|
||||||
module.warn(f"Rescan path does not exist for device {device}")
|
module.warn(f"Rescan path does not exist for device {device}")
|
||||||
|
|
|
||||||
|
|
@ -248,7 +248,7 @@ def test_read_privateKey_returns_trimmed_contents():
|
||||||
expected = "-----BEGIN PRIVATE KEY-----\nKEYDATA\n-----END PRIVATE KEY-----"
|
expected = "-----BEGIN PRIVATE KEY-----\nKEYDATA\n-----END PRIVATE KEY-----"
|
||||||
|
|
||||||
assert result == expected
|
assert result == expected
|
||||||
mocked_file.assert_called_once_with("/fake/path/key.pem", "r")
|
mocked_file.assert_called_once_with("/fake/path/key.pem")
|
||||||
|
|
||||||
|
|
||||||
def test_read_privateKey_handles_file_read_error():
|
def test_read_privateKey_handles_file_read_error():
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue