mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-02-04 07:51:50 +00:00
[PR #11341/5b5f7e9e backport][stable-12] batch 1 - update Python idiom to 3.7 using pyupgrade (#11349)
batch 1 - update Python idiom to 3.7 using pyupgrade (#11341)
* batch 1 - update Python idiom to 3.7 using pyupgrade
* add changelog frag
* add changelog frag
(cherry picked from commit 5b5f7e9e64)
Co-authored-by: Alexei Znamensky <103110+russoz@users.noreply.github.com>
This commit is contained in:
parent
41f815be57
commit
2d07481e64
19 changed files with 48 additions and 29 deletions
19
changelogs/fragments/11341-pyupgrade-1.yml
Normal file
19
changelogs/fragments/11341-pyupgrade-1.yml
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
minor_changes:
|
||||
- yaml cache plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- logentries callback plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- timestamp callback plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- chroot connection plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- jail connection plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- lxc connection plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- wsl connection plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- zone connection plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- cobbler inventory plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- linode inventory plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- lxd inventory plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- opennebula inventory plugin - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- alicloud_ecs module utils - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- known_hosts module utils - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- lxd module utils - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- manageiq module utils - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- oci_utils module utils - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
- univention_umc module utils - update to Python 3.7 idioms (https://github.com/ansible-collections/community.general/pull/11341).
|
||||
2
plugins/cache/yaml.py
vendored
2
plugins/cache/yaml.py
vendored
|
|
@ -58,7 +58,7 @@ class CacheModule(BaseFileCacheModule):
|
|||
"""
|
||||
|
||||
def _load(self, filepath):
|
||||
with open(os.path.abspath(filepath), "r", encoding="utf-8") as f:
|
||||
with open(os.path.abspath(filepath), encoding="utf-8") as f:
|
||||
return AnsibleLoader(f).get_single_data()
|
||||
|
||||
def _dump(self, value, filepath):
|
||||
|
|
|
|||
|
|
@ -178,7 +178,7 @@ class PlainTextSocketAppender:
|
|||
while True:
|
||||
try:
|
||||
self._conn.send(to_bytes(multiline, errors="surrogate_or_strict"))
|
||||
except socket.error:
|
||||
except OSError:
|
||||
self.reopen_connection()
|
||||
continue
|
||||
break
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ def banner(self, msg, color=None, cows=True):
|
|||
msg = msg.strip()
|
||||
try:
|
||||
star_len = self.columns - get_text_width(msg) - timestamp_len
|
||||
except EnvironmentError:
|
||||
except OSError:
|
||||
star_len = self.columns - len(msg) - timestamp_len
|
||||
if star_len <= 3:
|
||||
star_len = 3
|
||||
|
|
|
|||
|
|
@ -200,7 +200,7 @@ class Connection(ConnectionBase):
|
|||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
|
||||
if p.returncode != 0:
|
||||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
raise AnsibleError(f"file or module does not exist at: {in_path}") from e
|
||||
|
||||
def fetch_file(self, in_path, out_path):
|
||||
|
|
|
|||
|
|
@ -172,7 +172,7 @@ class Connection(ConnectionBase):
|
|||
raise AnsibleError(
|
||||
f"failed to transfer file {in_path} to {out_path}:\n{to_native(stdout)}\n{to_native(stderr)}"
|
||||
)
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
raise AnsibleError(f"file or module does not exist at: {in_path}") from e
|
||||
|
||||
def fetch_file(self, in_path, out_path):
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ class Connection(ConnectionBase):
|
|||
while len(read_fds) > 0 or len(write_fds) > 0:
|
||||
try:
|
||||
ready_reads, ready_writes, dummy = select.select(read_fds, write_fds, [])
|
||||
except select.error as e:
|
||||
except OSError as e:
|
||||
if e.args[0] == errno.EINTR:
|
||||
continue
|
||||
raise
|
||||
|
|
@ -173,7 +173,7 @@ class Connection(ConnectionBase):
|
|||
raise errors.AnsibleFileNotFound(msg)
|
||||
try:
|
||||
src_file = open(in_path, "rb")
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
traceback.print_exc()
|
||||
raise errors.AnsibleError(f"failed to open input file to {in_path}") from e
|
||||
try:
|
||||
|
|
@ -184,7 +184,7 @@ class Connection(ConnectionBase):
|
|||
|
||||
try:
|
||||
self.container.attach_wait(write_file, None)
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
traceback.print_exc()
|
||||
msg = f"failed to transfer file to {out_path}"
|
||||
raise errors.AnsibleError(msg) from e
|
||||
|
|
@ -200,7 +200,7 @@ class Connection(ConnectionBase):
|
|||
|
||||
try:
|
||||
dst_file = open(out_path, "wb")
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
traceback.print_exc()
|
||||
msg = f"failed to open output file {out_path}"
|
||||
raise errors.AnsibleError(msg) from e
|
||||
|
|
@ -217,7 +217,7 @@ class Connection(ConnectionBase):
|
|||
|
||||
try:
|
||||
self.container.attach_wait(write_file, None)
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
traceback.print_exc()
|
||||
msg = f"failed to transfer file from {in_path} to {out_path}"
|
||||
raise errors.AnsibleError(msg) from e
|
||||
|
|
|
|||
|
|
@ -475,7 +475,7 @@ class Connection(ConnectionBase):
|
|||
try:
|
||||
ssh.load_system_host_keys(ssh_known_hosts)
|
||||
break
|
||||
except IOError:
|
||||
except OSError:
|
||||
pass # file was not found, but not required to function
|
||||
except paramiko.hostkeys.InvalidHostKey as e:
|
||||
raise AnsibleConnectionFailure(f"Invalid host key: {to_text(e.line)}") from e
|
||||
|
|
|
|||
|
|
@ -169,7 +169,7 @@ class Connection(ConnectionBase):
|
|||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") from e
|
||||
if p.returncode != 0:
|
||||
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
raise AnsibleError(f"file or module does not exist at: {in_path}") from e
|
||||
|
||||
def fetch_file(self, in_path, out_path):
|
||||
|
|
|
|||
|
|
@ -197,7 +197,7 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
|||
data = self.cobbler.get_profiles(self.token)
|
||||
else:
|
||||
data = self.cobbler.get_profiles()
|
||||
except (socket.gaierror, socket.error, xmlrpc_client.ProtocolError):
|
||||
except (socket.gaierror, OSError, xmlrpc_client.ProtocolError):
|
||||
self._reload_cache()
|
||||
else:
|
||||
self._init_cache()
|
||||
|
|
@ -221,7 +221,7 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
|||
data[i] = self.cobbler.get_system_as_rendered(host["name"], self.token)
|
||||
else:
|
||||
data[i] = self.cobbler.get_system_as_rendered(host["name"])
|
||||
except (socket.gaierror, socket.error, xmlrpc_client.ProtocolError):
|
||||
except (socket.gaierror, OSError, xmlrpc_client.ProtocolError):
|
||||
self._reload_cache()
|
||||
else:
|
||||
self._init_cache()
|
||||
|
|
|
|||
|
|
@ -203,7 +203,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||
ips += [instance.ips.ipv6.slaac, instance.ips.ipv6.link_local]
|
||||
ips += instance.ips.ipv6.pools
|
||||
|
||||
for ip_type in set(ip.type for ip in ips):
|
||||
for ip_type in {ip.type for ip in ips}:
|
||||
self.inventory.set_variable(
|
||||
hostname, ip_type, make_unsafe(self._ip_data([ip for ip in ips if ip.type == ip_type]))
|
||||
)
|
||||
|
|
|
|||
|
|
@ -210,9 +210,9 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
Returns:
|
||||
dict(json_data): json data"""
|
||||
try:
|
||||
with open(path, "r") as json_file:
|
||||
with open(path) as json_file:
|
||||
return json.load(json_file)
|
||||
except (IOError, json.decoder.JSONDecodeError) as err:
|
||||
except (OSError, json.decoder.JSONDecodeError) as err:
|
||||
raise AnsibleParserError(f"Could not load the test data from {to_native(path)}: {err}") from err
|
||||
|
||||
def save_json_data(self, path, file_name=None):
|
||||
|
|
@ -242,7 +242,7 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
cwd = os.path.abspath(os.path.dirname(__file__))
|
||||
with open(os.path.abspath(os.path.join(cwd, *path)), "w") as json_file:
|
||||
json.dump(self.data, json_file)
|
||||
except IOError as err:
|
||||
except OSError as err:
|
||||
raise AnsibleParserError(f"Could not save data: {err}") from err
|
||||
|
||||
def verify_file(self, path):
|
||||
|
|
|
|||
|
|
@ -119,10 +119,10 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
|||
if authfile is None:
|
||||
authfile = os.path.join(os.environ.get("HOME"), ".one", "one_auth")
|
||||
try:
|
||||
with open(authfile, "r") as fp:
|
||||
with open(authfile) as fp:
|
||||
authstring = fp.read().rstrip()
|
||||
username, password = authstring.split(":")
|
||||
except (OSError, IOError) as e:
|
||||
except OSError as e:
|
||||
raise AnsibleError(f"Could not find or read ONE_AUTH file at '{authfile}'") from e
|
||||
except Exception as e:
|
||||
raise AnsibleError(f"Error occurs when reading ONE_AUTH file at '{authfile}'") from e
|
||||
|
|
|
|||
|
|
@ -163,7 +163,7 @@ def get_profile(params):
|
|||
else f"{os.getenv('HOME')}/.aliyun/config.json"
|
||||
)
|
||||
auth = {}
|
||||
with open(path, "r") as f:
|
||||
with open(path) as f:
|
||||
for pro in json.load(f)["profiles"]:
|
||||
if params["profile"] == pro["name"]:
|
||||
auth = pro
|
||||
|
|
|
|||
|
|
@ -97,7 +97,7 @@ def not_in_host_file(self, host):
|
|||
try:
|
||||
with open(hf) as host_fh:
|
||||
data = host_fh.read()
|
||||
except IOError:
|
||||
except OSError:
|
||||
hfiles_not_found += 1
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ class LXDClient:
|
|||
return resp_json
|
||||
self._raise_err_from_json(resp_json)
|
||||
return resp_json
|
||||
except socket.error as e:
|
||||
except OSError as e:
|
||||
raise LXDClientException("cannot connect to the LXD server", err=e) from e
|
||||
|
||||
def _raise_err_from_json(self, resp_json):
|
||||
|
|
|
|||
|
|
@ -292,7 +292,7 @@ class ManageIQPolicies:
|
|||
|
||||
# make a list of assigned full profile names strings
|
||||
# e.g. ['openscap profile', ...]
|
||||
assigned_profiles_set = set(profile["profile_name"] for profile in assigned_profiles)
|
||||
assigned_profiles_set = {profile["profile_name"] for profile in assigned_profiles}
|
||||
|
||||
for profile in profiles:
|
||||
assigned = profile.get("name") in assigned_profiles_set
|
||||
|
|
@ -398,7 +398,7 @@ class ManageIQTags:
|
|||
|
||||
# make a list of assigned full tag names strings
|
||||
# e.g. ['/managed/environment/prod', ...]
|
||||
assigned_tags_set = set(tag["full_name"] for tag in assigned_tags)
|
||||
assigned_tags_set = {tag["full_name"] for tag in assigned_tags}
|
||||
|
||||
for tag in tags:
|
||||
assigned = self.full_tag_name(tag) in assigned_tags_set
|
||||
|
|
|
|||
|
|
@ -1352,7 +1352,7 @@ def delete_and_wait(
|
|||
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
|
||||
"""
|
||||
|
||||
states_set = set(["DETACHING", "DETACHED", "DELETING", "DELETED", "TERMINATING", "TERMINATED"])
|
||||
states_set = {"DETACHING", "DETACHED", "DELETING", "DELETED", "TERMINATING", "TERMINATED"}
|
||||
result: dict[str, t.Any] = dict(changed=False)
|
||||
result[resource_type] = dict()
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -88,10 +88,10 @@ def uldap():
|
|||
|
||||
def construct():
|
||||
try:
|
||||
secret_file = open("/etc/ldap.secret", "r")
|
||||
secret_file = open("/etc/ldap.secret")
|
||||
bind_dn = f"cn=admin,{base_dn()}"
|
||||
except IOError: # pragma: no cover
|
||||
secret_file = open("/etc/machine.secret", "r")
|
||||
except OSError: # pragma: no cover
|
||||
secret_file = open("/etc/machine.secret")
|
||||
bind_dn = config_registry()["ldap/hostdn"]
|
||||
pwd_line = secret_file.readline()
|
||||
pwd = re.sub("\n", "", pwd_line)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue