mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-02-04 07:51:50 +00:00
fix ruff case UP031 (#11223)
* fix ruff case UP031 * refactor backslashout of f-string for the sake of old Pythons * add changelog frag * Update plugins/modules/imc_rest.py Co-authored-by: Felix Fontein <felix@fontein.de> * scaleway_user_data: fix bug and make it an f-string * reformat --------- Co-authored-by: Felix Fontein <felix@fontein.de>
This commit is contained in:
parent
1ab9be152f
commit
d550baacfa
22 changed files with 72 additions and 63 deletions
19
changelogs/fragments/11223-ruff-cases-9.yml
Normal file
19
changelogs/fragments/11223-ruff-cases-9.yml
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
minor_changes:
|
||||||
|
- hashids filter - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- keycloak module_utils - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- atomic_container modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- atomic_image modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- bitbucket_access_key modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- bitbucket_pipeline_key_pair modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- bitbucket_pipeline_known_host modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- bitbucket_pipeline_variable modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- dimensiondata_network modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- imc_rest modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- jenkins_plugin modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- nmcli modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- oneandone_server modules - mark ``%`` templating as ``noqa`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- packet_device modules - mark ``%`` templating as ``noqa`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- pushbullet modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- scaleway_user_data modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- sensu_silence modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
- xenserver_guest modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||||
|
|
@ -9,7 +9,6 @@ from ansible.errors import (
|
||||||
AnsibleFilterError,
|
AnsibleFilterError,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
|
||||||
from ansible.module_utils.common.collections import is_sequence
|
from ansible.module_utils.common.collections import is_sequence
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -34,10 +33,8 @@ def initialize_hashids(**kwargs):
|
||||||
try:
|
try:
|
||||||
return Hashids(**params)
|
return Hashids(**params)
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
raise AnsibleFilterError(
|
str_params = ", ".join([f"{k}={v}" for k, v in params.items()])
|
||||||
"The provided parameters %s are invalid: %s"
|
raise AnsibleFilterError(f"The provided parameters {str_params} are invalid: {e}") from e
|
||||||
% (", ".join(["%s=%s" % (k, v) for k, v in params.items()]), to_native(e))
|
|
||||||
) from e
|
|
||||||
|
|
||||||
|
|
||||||
def hashids_encode(nums, salt=None, alphabet=None, min_length=None):
|
def hashids_encode(nums, salt=None, alphabet=None, min_length=None):
|
||||||
|
|
|
||||||
|
|
@ -3010,7 +3010,7 @@ class KeycloakAPI:
|
||||||
def get_authz_permission_by_name(self, name, client_id, realm):
|
def get_authz_permission_by_name(self, name, client_id, realm):
|
||||||
"""Get authorization permission by name"""
|
"""Get authorization permission by name"""
|
||||||
url = URL_AUTHZ_POLICIES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
url = URL_AUTHZ_POLICIES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
||||||
search_url = "%s/search?name=%s" % (url, name.replace(" ", "%20"))
|
search_url = f"{url}/search?name={name.replace(' ', '%20')}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self._request_and_deserialize(search_url, method="GET")
|
return self._request_and_deserialize(search_url, method="GET")
|
||||||
|
|
@ -3056,7 +3056,7 @@ class KeycloakAPI:
|
||||||
def get_authz_resource_by_name(self, name, client_id, realm):
|
def get_authz_resource_by_name(self, name, client_id, realm):
|
||||||
"""Get authorization resource by name"""
|
"""Get authorization resource by name"""
|
||||||
url = URL_AUTHZ_RESOURCES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
url = URL_AUTHZ_RESOURCES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
||||||
search_url = "%s/search?name=%s" % (url, name.replace(" ", "%20"))
|
search_url = f"{url}/search?name={name.replace(' ', '%20')}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self._request_and_deserialize(search_url, method="GET")
|
return self._request_and_deserialize(search_url, method="GET")
|
||||||
|
|
@ -3066,7 +3066,7 @@ class KeycloakAPI:
|
||||||
def get_authz_policy_by_name(self, name, client_id, realm):
|
def get_authz_policy_by_name(self, name, client_id, realm):
|
||||||
"""Get authorization policy by name"""
|
"""Get authorization policy by name"""
|
||||||
url = URL_AUTHZ_POLICIES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
url = URL_AUTHZ_POLICIES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
||||||
search_url = "%s/search?name=%s&permission=false" % (url, name.replace(" ", "%20"))
|
search_url = f"{url}/search?name={name.replace(' ', '%20')}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self._request_and_deserialize(search_url, method="GET")
|
return self._request_and_deserialize(search_url, method="GET")
|
||||||
|
|
|
||||||
|
|
@ -108,10 +108,10 @@ from ansible.module_utils.basic import AnsibleModule
|
||||||
def do_install(module, mode, rootfs, container, image, values_list, backend):
|
def do_install(module, mode, rootfs, container, image, values_list, backend):
|
||||||
system_list = ["--system"] if mode == "system" else []
|
system_list = ["--system"] if mode == "system" else []
|
||||||
user_list = ["--user"] if mode == "user" else []
|
user_list = ["--user"] if mode == "user" else []
|
||||||
rootfs_list = ["--rootfs=%s" % rootfs] if rootfs else []
|
rootfs_list = [f"--rootfs={rootfs}"] if rootfs else []
|
||||||
atomic_bin = module.get_bin_path("atomic")
|
atomic_bin = module.get_bin_path("atomic")
|
||||||
args = (
|
args = (
|
||||||
[atomic_bin, "install", "--storage=%s" % backend, "--name=%s" % container]
|
[atomic_bin, "install", f"--storage={backend}", f"--name={container}"]
|
||||||
+ system_list
|
+ system_list
|
||||||
+ user_list
|
+ user_list
|
||||||
+ rootfs_list
|
+ rootfs_list
|
||||||
|
|
@ -128,7 +128,7 @@ def do_install(module, mode, rootfs, container, image, values_list, backend):
|
||||||
|
|
||||||
def do_update(module, container, image, values_list):
|
def do_update(module, container, image, values_list):
|
||||||
atomic_bin = module.get_bin_path("atomic")
|
atomic_bin = module.get_bin_path("atomic")
|
||||||
args = [atomic_bin, "containers", "update", "--rebase=%s" % image] + values_list + [container]
|
args = [atomic_bin, "containers", "update", f"--rebase={image}"] + values_list + [container]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
|
|
@ -139,7 +139,7 @@ def do_update(module, container, image, values_list):
|
||||||
|
|
||||||
def do_uninstall(module, name, backend):
|
def do_uninstall(module, name, backend):
|
||||||
atomic_bin = module.get_bin_path("atomic")
|
atomic_bin = module.get_bin_path("atomic")
|
||||||
args = [atomic_bin, "uninstall", "--storage=%s" % backend, name]
|
args = [atomic_bin, "uninstall", f"--storage={backend}", name]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
|
|
@ -169,7 +169,7 @@ def core(module):
|
||||||
atomic_bin = module.get_bin_path("atomic")
|
atomic_bin = module.get_bin_path("atomic")
|
||||||
module.run_command_environ_update = dict(LANG="C", LC_ALL="C", LC_MESSAGES="C")
|
module.run_command_environ_update = dict(LANG="C", LC_ALL="C", LC_MESSAGES="C")
|
||||||
|
|
||||||
values_list = ["--set=%s" % x for x in values] if values else []
|
values_list = [f"--set={x}" for x in values] if values else []
|
||||||
|
|
||||||
args = [
|
args = [
|
||||||
atomic_bin,
|
atomic_bin,
|
||||||
|
|
@ -179,9 +179,9 @@ def core(module):
|
||||||
"-n",
|
"-n",
|
||||||
"--all",
|
"--all",
|
||||||
"-f",
|
"-f",
|
||||||
"backend=%s" % backend,
|
f"backend={backend}",
|
||||||
"-f",
|
"-f",
|
||||||
"container=%s" % name,
|
f"container={name}",
|
||||||
]
|
]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
|
|
|
||||||
|
|
@ -107,14 +107,14 @@ def core(module):
|
||||||
|
|
||||||
if backend:
|
if backend:
|
||||||
if state == "present" or state == "latest":
|
if state == "present" or state == "latest":
|
||||||
args = [atomic_bin, "pull", "--storage=%s" % backend, image]
|
args = [atomic_bin, "pull", f"--storage={backend}", image]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc < 0:
|
if rc < 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
else:
|
else:
|
||||||
out_run = ""
|
out_run = ""
|
||||||
if started:
|
if started:
|
||||||
args = [atomic_bin, "run", "--storage=%s" % backend, image]
|
args = [atomic_bin, "run", f"--storage={backend}", image]
|
||||||
rc, out_run, err = module.run_command(args, check_rc=False)
|
rc, out_run, err = module.run_command(args, check_rc=False)
|
||||||
if rc < 0:
|
if rc < 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
|
|
@ -122,7 +122,7 @@ def core(module):
|
||||||
changed = "Extracting" in out or "Copying blob" in out
|
changed = "Extracting" in out or "Copying blob" in out
|
||||||
module.exit_json(msg=(out + out_run), changed=changed)
|
module.exit_json(msg=(out + out_run), changed=changed)
|
||||||
elif state == "absent":
|
elif state == "absent":
|
||||||
args = [atomic_bin, "images", "delete", "--storage=%s" % backend, image]
|
args = [atomic_bin, "images", "delete", f"--storage={backend}", image]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc < 0:
|
if rc < 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
|
|
|
||||||
|
|
@ -83,9 +83,8 @@ error_messages = {
|
||||||
}
|
}
|
||||||
|
|
||||||
BITBUCKET_API_ENDPOINTS = {
|
BITBUCKET_API_ENDPOINTS = {
|
||||||
"deploy-key-list": "%s/2.0/repositories/{workspace}/{repo_slug}/deploy-keys/" % BitbucketHelper.BITBUCKET_API_URL,
|
"deploy-key-list": f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/deploy-keys/",
|
||||||
"deploy-key-detail": "%s/2.0/repositories/{workspace}/{repo_slug}/deploy-keys/{key_id}"
|
"deploy-key-detail": f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/deploy-keys/{{key_id}}",
|
||||||
% BitbucketHelper.BITBUCKET_API_URL,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -78,8 +78,7 @@ error_messages = {
|
||||||
}
|
}
|
||||||
|
|
||||||
BITBUCKET_API_ENDPOINTS = {
|
BITBUCKET_API_ENDPOINTS = {
|
||||||
"ssh-key-pair": "%s/2.0/repositories/{workspace}/{repo_slug}/pipelines_config/ssh/key_pair"
|
"ssh-key-pair": f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/pipelines_config/ssh/key_pair",
|
||||||
% BitbucketHelper.BITBUCKET_API_URL,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -102,10 +102,10 @@ error_messages = {
|
||||||
}
|
}
|
||||||
|
|
||||||
BITBUCKET_API_ENDPOINTS = {
|
BITBUCKET_API_ENDPOINTS = {
|
||||||
"known-host-list": "%s/2.0/repositories/{workspace}/{repo_slug}/pipelines_config/ssh/known_hosts/"
|
"known-host-list": f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/pipelines_config/ssh/known_hosts/",
|
||||||
% BitbucketHelper.BITBUCKET_API_URL,
|
"known-host-detail": (
|
||||||
"known-host-detail": "%s/2.0/repositories/{workspace}/{repo_slug}/pipelines_config/ssh/known_hosts/{known_host_uuid}"
|
f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/pipelines_config/ssh/known_hosts/{{known_host_uuid}}"
|
||||||
% BitbucketHelper.BITBUCKET_API_URL,
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -89,10 +89,10 @@ error_messages = {
|
||||||
}
|
}
|
||||||
|
|
||||||
BITBUCKET_API_ENDPOINTS = {
|
BITBUCKET_API_ENDPOINTS = {
|
||||||
"pipeline-variable-list": "%s/2.0/repositories/{workspace}/{repo_slug}/pipelines_config/variables/"
|
"pipeline-variable-list": f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/pipelines_config/variables/",
|
||||||
% BitbucketHelper.BITBUCKET_API_URL,
|
"pipeline-variable-detail": (
|
||||||
"pipeline-variable-detail": "%s/2.0/repositories/{workspace}/{repo_slug}/pipelines_config/variables/{variable_uuid}"
|
f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/pipelines_config/variables/{{variable_uuid}}"
|
||||||
% BitbucketHelper.BITBUCKET_API_URL,
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -163,7 +163,7 @@ class DimensionDataNetworkModule(DimensionDataModule):
|
||||||
|
|
||||||
self.module.exit_json(
|
self.module.exit_json(
|
||||||
changed=True,
|
changed=True,
|
||||||
msg='Created network "%s" in datacenter "%s".' % (self.name, self.location),
|
msg=f'Created network "{self.name}" in datacenter "{self.location}".',
|
||||||
network=self._network_to_dict(network),
|
network=self._network_to_dict(network),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -172,7 +172,7 @@ class DimensionDataNetworkModule(DimensionDataModule):
|
||||||
|
|
||||||
if not network:
|
if not network:
|
||||||
self.module.exit_json(
|
self.module.exit_json(
|
||||||
changed=False, msg='Network "%s" does not exist' % self.name, network=self._network_to_dict(network)
|
changed=False, msg=f'Network "{self.name}" does not exist', network=self._network_to_dict(network)
|
||||||
)
|
)
|
||||||
|
|
||||||
self._delete_network(network)
|
self._delete_network(network)
|
||||||
|
|
@ -237,9 +237,9 @@ class DimensionDataNetworkModule(DimensionDataModule):
|
||||||
deleted = self.driver.ex_delete_network_domain(network)
|
deleted = self.driver.ex_delete_network_domain(network)
|
||||||
|
|
||||||
if deleted:
|
if deleted:
|
||||||
self.module.exit_json(changed=True, msg="Deleted network with id %s" % network.id)
|
self.module.exit_json(changed=True, msg=f"Deleted network with id {network.id}")
|
||||||
|
|
||||||
self.module.fail_json("Unexpected failure deleting network with id %s" % network.id)
|
self.module.fail_json(f"Unexpected failure deleting network with id {network.id}")
|
||||||
|
|
||||||
except DimensionDataAPIException as e:
|
except DimensionDataAPIException as e:
|
||||||
self.module.fail_json(msg=f"Failed to delete network: {e}", exception=traceback.format_exc())
|
self.module.fail_json(msg=f"Failed to delete network: {e}", exception=traceback.format_exc())
|
||||||
|
|
|
||||||
|
|
@ -396,13 +396,13 @@ def main():
|
||||||
try:
|
try:
|
||||||
# Prepare request data
|
# Prepare request data
|
||||||
if content:
|
if content:
|
||||||
rawdata = content
|
rawdata = content.replace("\n", "")
|
||||||
elif file_exists:
|
elif file_exists:
|
||||||
with open(path, "r") as config_object:
|
with open(path, "r") as config_object:
|
||||||
rawdata = config_object.read()
|
rawdata = config_object.read().replace("\n", "")
|
||||||
|
|
||||||
# Wrap the XML documents in a <root> element
|
# Wrap the XML documents in a <root> element
|
||||||
xmldata = lxml.etree.fromstring("<root>%s</root>" % rawdata.replace("\n", ""))
|
xmldata = lxml.etree.fromstring(f"<root>{rawdata}</root>")
|
||||||
|
|
||||||
# Handle each XML document separately in the same session
|
# Handle each XML document separately in the same session
|
||||||
for xmldoc in list(xmldata):
|
for xmldoc in list(xmldata):
|
||||||
|
|
|
||||||
|
|
@ -543,8 +543,8 @@ class JenkinsPlugin:
|
||||||
|
|
||||||
if self.params["with_dependencies"]:
|
if self.params["with_dependencies"]:
|
||||||
install_script = (
|
install_script = (
|
||||||
'Jenkins.instance.updateCenter.getPlugin("%s")'
|
f'Jenkins.instance.updateCenter.getPlugin("{self.params["name"]}")'
|
||||||
".getNeededDependencies().each{it.deploy()}; %s" % (self.params["name"], install_script)
|
f".getNeededDependencies().each{{it.deploy()}}; {install_script}"
|
||||||
)
|
)
|
||||||
|
|
||||||
script_data = {"script": install_script}
|
script_data = {"script": install_script}
|
||||||
|
|
|
||||||
|
|
@ -2562,7 +2562,8 @@ class Nmcli:
|
||||||
for property in unsupported_properties:
|
for property in unsupported_properties:
|
||||||
msg_options.append(f"{setting_key}.{property}")
|
msg_options.append(f"{setting_key}.{property}")
|
||||||
|
|
||||||
msg = 'Invalid or unsupported option(s): "%s"' % '", "'.join(msg_options)
|
str_msg_options = '", "'.join(msg_options)
|
||||||
|
msg = f'Invalid or unsupported option(s): "{str_msg_options}"'
|
||||||
if self.ignore_unsupported_suboptions:
|
if self.ignore_unsupported_suboptions:
|
||||||
self.module.warn(msg)
|
self.module.warn(msg)
|
||||||
else:
|
else:
|
||||||
|
|
|
||||||
|
|
@ -553,7 +553,7 @@ def _auto_increment_hostname(count, hostname):
|
||||||
name-02, name-03, and so forth.
|
name-02, name-03, and so forth.
|
||||||
"""
|
"""
|
||||||
if "%" not in hostname:
|
if "%" not in hostname:
|
||||||
hostname = "%s-%%01d" % hostname
|
hostname = "%s-%%01d" % hostname # noqa
|
||||||
|
|
||||||
return [hostname % i for i in range(1, count + 1)]
|
return [hostname % i for i in range(1, count + 1)]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -423,7 +423,7 @@ def get_hostname_list(module):
|
||||||
if re.search(r"%\d{0,2}d", hostname_spec):
|
if re.search(r"%\d{0,2}d", hostname_spec):
|
||||||
hostnames = [hostname_spec % i for i in count_range]
|
hostnames = [hostname_spec % i for i in count_range]
|
||||||
elif count > 1:
|
elif count > 1:
|
||||||
hostname_spec = "%s%%02d" % hostname_spec
|
hostname_spec = "%s%%02d" % hostname_spec # noqa
|
||||||
hostnames = [hostname_spec % i for i in count_range]
|
hostnames = [hostname_spec % i for i in count_range]
|
||||||
|
|
||||||
for hn in hostnames:
|
for hn in hostnames:
|
||||||
|
|
|
||||||
|
|
@ -159,9 +159,8 @@ def main():
|
||||||
if device in devices_by_nickname:
|
if device in devices_by_nickname:
|
||||||
target = devices_by_nickname[device]
|
target = devices_by_nickname[device]
|
||||||
else:
|
else:
|
||||||
module.fail_json(
|
str_devices_by_nickname = "', '".join(devices_by_nickname)
|
||||||
msg="Device '%s' not found. Available devices: '%s'" % (device, "', '".join(devices_by_nickname.keys()))
|
module.fail_json(msg=f"Device '{device}' not found. Available devices: '{str_devices_by_nickname}'")
|
||||||
)
|
|
||||||
|
|
||||||
# Search for given channel
|
# Search for given channel
|
||||||
if channel is not None:
|
if channel is not None:
|
||||||
|
|
@ -172,9 +171,8 @@ def main():
|
||||||
if channel in channels_by_tag:
|
if channel in channels_by_tag:
|
||||||
target = channels_by_tag[channel]
|
target = channels_by_tag[channel]
|
||||||
else:
|
else:
|
||||||
module.fail_json(
|
str_channels_by_tag = "', '".join(channels_by_tag)
|
||||||
msg="Channel '%s' not found. Available channels: '%s'" % (channel, "', '".join(channels_by_tag.keys()))
|
module.fail_json(msg=f"Channel '{channel}' not found. Available channels: '{str_channels_by_tag}'")
|
||||||
)
|
|
||||||
|
|
||||||
# If in check mode, exit saying that we succeeded
|
# If in check mode, exit saying that we succeeded
|
||||||
if module.check_mode:
|
if module.check_mode:
|
||||||
|
|
|
||||||
|
|
@ -104,7 +104,7 @@ def delete_user_data(compute_api, server_id, key):
|
||||||
response = compute_api.delete(path=f"servers/{server_id}/user_data/{key}")
|
response = compute_api.delete(path=f"servers/{server_id}/user_data/{key}")
|
||||||
|
|
||||||
if not response.ok:
|
if not response.ok:
|
||||||
msg = "Error during user_data deleting: (%s) %s" % response.status_code, response.body
|
msg = f"Error during user_data deleting: ({response.status_code}) {response.body}"
|
||||||
compute_api.module.fail_json(msg=msg)
|
compute_api.module.fail_json(msg=msg)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
@ -133,7 +133,7 @@ def core(module):
|
||||||
|
|
||||||
user_data_list = compute_api.get(path=f"servers/{server_id}/user_data")
|
user_data_list = compute_api.get(path=f"servers/{server_id}/user_data")
|
||||||
if not user_data_list.ok:
|
if not user_data_list.ok:
|
||||||
msg = "Error during user_data fetching: %s %s" % user_data_list.status_code, user_data_list.body
|
msg = f"Error during user_data fetching: {user_data_list.status_code} {user_data_list.body}"
|
||||||
compute_api.module.fail_json(msg=msg)
|
compute_api.module.fail_json(msg=msg)
|
||||||
|
|
||||||
present_user_data_keys = user_data_list.json["user_data"]
|
present_user_data_keys = user_data_list.json["user_data"]
|
||||||
|
|
|
||||||
|
|
@ -125,7 +125,7 @@ def query(module, url, check, subscription):
|
||||||
response, info = fetch_url(module, url, method="GET", headers=headers, data=json.dumps(request_data))
|
response, info = fetch_url(module, url, method="GET", headers=headers, data=json.dumps(request_data))
|
||||||
|
|
||||||
if info["status"] == 500:
|
if info["status"] == 500:
|
||||||
module.fail_json(msg="Failed to query silence %s. Reason: %s" % (subscription, info))
|
module.fail_json(msg=f"Failed to query silence {subscription}. Reason: {info}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
json_out = json.loads(response.read())
|
json_out = json.loads(response.read())
|
||||||
|
|
@ -172,7 +172,7 @@ def clear(module, url, check, subscription):
|
||||||
response, info = fetch_url(module, url, method="POST", headers=headers, data=json.dumps(request_data))
|
response, info = fetch_url(module, url, method="POST", headers=headers, data=json.dumps(request_data))
|
||||||
|
|
||||||
if info["status"] != 204:
|
if info["status"] != 204:
|
||||||
module.fail_json(msg="Failed to silence %s. Reason: %s" % (subscription, info))
|
module.fail_json(msg=f"Failed to silence {subscription}. Reason: {info}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
json_out = json.loads(response.read())
|
json_out = json.loads(response.read())
|
||||||
|
|
@ -221,7 +221,7 @@ def create(module, url, check, creator, expire, expire_on_resolve, reason, subsc
|
||||||
response, info = fetch_url(module, url, method="POST", headers=headers, data=json.dumps(request_data))
|
response, info = fetch_url(module, url, method="POST", headers=headers, data=json.dumps(request_data))
|
||||||
|
|
||||||
if info["status"] != 201:
|
if info["status"] != 201:
|
||||||
module.fail_json(msg="Failed to silence %s. Reason: %s" % (subscription, info["msg"]))
|
module.fail_json(msg=f"Failed to silence {subscription}. Reason: {info['msg']}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
json_out = json.loads(response.read())
|
json_out = json.loads(response.read())
|
||||||
|
|
|
||||||
|
|
@ -2091,9 +2091,9 @@ class XenServerVM(XenServerObject):
|
||||||
if unit in disk_units:
|
if unit in disk_units:
|
||||||
return int(size * (1024 ** disk_units[unit]))
|
return int(size * (1024 ** disk_units[unit]))
|
||||||
else:
|
else:
|
||||||
|
str_supported_units = "', '".join(sorted(disk_units.keys(), key=lambda key: disk_units[key]))
|
||||||
self.module.fail_json(
|
self.module.fail_json(
|
||||||
msg="%s'%s' is not a supported unit for disk size! Supported units are ['%s']."
|
msg=f"{msg_prefix}'{unit}' is not a supported unit for disk size! Supported units are ['{str_supported_units}']."
|
||||||
% (msg_prefix, unit, "', '".join(sorted(disk_units.keys(), key=lambda key: disk_units[key])))
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,6 @@ ignore = [
|
||||||
"UP045", # Use `X | None` for type annotations - needs Python 3.10+
|
"UP045", # Use `X | None` for type annotations - needs Python 3.10+
|
||||||
# To fix:
|
# To fix:
|
||||||
"UP024", # Replace aliased errors with `OSError`
|
"UP024", # Replace aliased errors with `OSError`
|
||||||
"UP031", # Use format specifiers instead of percent format
|
|
||||||
"UP041", # Replace aliased errors with `TimeoutError`
|
"UP041", # Replace aliased errors with `TimeoutError`
|
||||||
"B026", # star-arg-unpacking-after-keyword-arg
|
"B026", # star-arg-unpacking-after-keyword-arg
|
||||||
"SIM102", # collapsible-if
|
"SIM102", # collapsible-if
|
||||||
|
|
|
||||||
|
|
@ -65,8 +65,7 @@ def test_dzdo(mocker, parser, reset_cli_args):
|
||||||
print(cmd)
|
print(cmd)
|
||||||
assert (
|
assert (
|
||||||
re.match(
|
re.match(
|
||||||
"""%s %s -p %s -u %s %s -c 'echo %s; %s'"""
|
"""{} {} -p {} -u {} {} -c 'echo {}; {}'""".format(
|
||||||
% (
|
|
||||||
dzdo_exe,
|
dzdo_exe,
|
||||||
dzdo_flags,
|
dzdo_flags,
|
||||||
r"\"\[dzdo via ansible, key=.+?\] password:\"",
|
r"\"\[dzdo via ansible, key=.+?\] password:\"",
|
||||||
|
|
@ -115,8 +114,7 @@ def test_dzdo_varoptions(mocker, parser, reset_cli_args):
|
||||||
print(cmd)
|
print(cmd)
|
||||||
assert (
|
assert (
|
||||||
re.match(
|
re.match(
|
||||||
"""%s %s -p %s -u %s %s -c 'echo %s; %s'"""
|
"""{} {} -p {} -u {} {} -c 'echo {}; {}'""".format(
|
||||||
% (
|
|
||||||
dzdo_exe,
|
dzdo_exe,
|
||||||
dzdo_flags,
|
dzdo_flags,
|
||||||
r"\"\[dzdo via ansible, key=.+?\] password:\"",
|
r"\"\[dzdo via ansible, key=.+?\] password:\"",
|
||||||
|
|
|
||||||
|
|
@ -52,8 +52,7 @@ def test_sudosu(mocker, parser, reset_cli_args):
|
||||||
print(cmd)
|
print(cmd)
|
||||||
assert (
|
assert (
|
||||||
re.match(
|
re.match(
|
||||||
"""%s %s -p "%s" su -l %s %s -c 'echo %s; %s'"""
|
"""{} {} -p "{}" su -l {} {} -c 'echo {}; {}'""".format(
|
||||||
% (
|
|
||||||
sudo_exe,
|
sudo_exe,
|
||||||
sudo_flags.replace("-n", ""),
|
sudo_flags.replace("-n", ""),
|
||||||
r"\[sudo via ansible, key=.+?\] password:",
|
r"\[sudo via ansible, key=.+?\] password:",
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue