mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-02-03 23:41:51 +00:00
[PR #11223/d550baac backport][stable-12] fix ruff case UP031 (#11226)
fix ruff case UP031 (#11223)
* fix ruff case UP031
* refactor backslashout of f-string for the sake of old Pythons
* add changelog frag
* Update plugins/modules/imc_rest.py
* scaleway_user_data: fix bug and make it an f-string
* reformat
---------
(cherry picked from commit d550baacfa)
Co-authored-by: Alexei Znamensky <103110+russoz@users.noreply.github.com>
Co-authored-by: Felix Fontein <felix@fontein.de>
This commit is contained in:
parent
17d2a089a0
commit
8ae47d3a8d
22 changed files with 72 additions and 63 deletions
19
changelogs/fragments/11223-ruff-cases-9.yml
Normal file
19
changelogs/fragments/11223-ruff-cases-9.yml
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
minor_changes:
|
||||
- hashids filter - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- keycloak module_utils - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- atomic_container modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- atomic_image modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- bitbucket_access_key modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- bitbucket_pipeline_key_pair modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- bitbucket_pipeline_known_host modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- bitbucket_pipeline_variable modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- dimensiondata_network modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- imc_rest modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- jenkins_plugin modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- nmcli modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- oneandone_server modules - mark ``%`` templating as ``noqa`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- packet_device modules - mark ``%`` templating as ``noqa`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- pushbullet modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- scaleway_user_data modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- sensu_silence modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
- xenserver_guest modules - replace ``%`` templating with f-strings or ``format()`` (https://github.com/ansible-collections/community.general/pull/11223).
|
||||
|
|
@ -9,7 +9,6 @@ from ansible.errors import (
|
|||
AnsibleFilterError,
|
||||
)
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.collections import is_sequence
|
||||
|
||||
try:
|
||||
|
|
@ -34,10 +33,8 @@ def initialize_hashids(**kwargs):
|
|||
try:
|
||||
return Hashids(**params)
|
||||
except TypeError as e:
|
||||
raise AnsibleFilterError(
|
||||
"The provided parameters %s are invalid: %s"
|
||||
% (", ".join(["%s=%s" % (k, v) for k, v in params.items()]), to_native(e))
|
||||
) from e
|
||||
str_params = ", ".join([f"{k}={v}" for k, v in params.items()])
|
||||
raise AnsibleFilterError(f"The provided parameters {str_params} are invalid: {e}") from e
|
||||
|
||||
|
||||
def hashids_encode(nums, salt=None, alphabet=None, min_length=None):
|
||||
|
|
|
|||
|
|
@ -3010,7 +3010,7 @@ class KeycloakAPI:
|
|||
def get_authz_permission_by_name(self, name, client_id, realm):
|
||||
"""Get authorization permission by name"""
|
||||
url = URL_AUTHZ_POLICIES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
||||
search_url = "%s/search?name=%s" % (url, name.replace(" ", "%20"))
|
||||
search_url = f"{url}/search?name={name.replace(' ', '%20')}"
|
||||
|
||||
try:
|
||||
return self._request_and_deserialize(search_url, method="GET")
|
||||
|
|
@ -3056,7 +3056,7 @@ class KeycloakAPI:
|
|||
def get_authz_resource_by_name(self, name, client_id, realm):
|
||||
"""Get authorization resource by name"""
|
||||
url = URL_AUTHZ_RESOURCES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
||||
search_url = "%s/search?name=%s" % (url, name.replace(" ", "%20"))
|
||||
search_url = f"{url}/search?name={name.replace(' ', '%20')}"
|
||||
|
||||
try:
|
||||
return self._request_and_deserialize(search_url, method="GET")
|
||||
|
|
@ -3066,7 +3066,7 @@ class KeycloakAPI:
|
|||
def get_authz_policy_by_name(self, name, client_id, realm):
|
||||
"""Get authorization policy by name"""
|
||||
url = URL_AUTHZ_POLICIES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
||||
search_url = "%s/search?name=%s&permission=false" % (url, name.replace(" ", "%20"))
|
||||
search_url = f"{url}/search?name={name.replace(' ', '%20')}"
|
||||
|
||||
try:
|
||||
return self._request_and_deserialize(search_url, method="GET")
|
||||
|
|
|
|||
|
|
@ -108,10 +108,10 @@ from ansible.module_utils.basic import AnsibleModule
|
|||
def do_install(module, mode, rootfs, container, image, values_list, backend):
|
||||
system_list = ["--system"] if mode == "system" else []
|
||||
user_list = ["--user"] if mode == "user" else []
|
||||
rootfs_list = ["--rootfs=%s" % rootfs] if rootfs else []
|
||||
rootfs_list = [f"--rootfs={rootfs}"] if rootfs else []
|
||||
atomic_bin = module.get_bin_path("atomic")
|
||||
args = (
|
||||
[atomic_bin, "install", "--storage=%s" % backend, "--name=%s" % container]
|
||||
[atomic_bin, "install", f"--storage={backend}", f"--name={container}"]
|
||||
+ system_list
|
||||
+ user_list
|
||||
+ rootfs_list
|
||||
|
|
@ -128,7 +128,7 @@ def do_install(module, mode, rootfs, container, image, values_list, backend):
|
|||
|
||||
def do_update(module, container, image, values_list):
|
||||
atomic_bin = module.get_bin_path("atomic")
|
||||
args = [atomic_bin, "containers", "update", "--rebase=%s" % image] + values_list + [container]
|
||||
args = [atomic_bin, "containers", "update", f"--rebase={image}"] + values_list + [container]
|
||||
rc, out, err = module.run_command(args, check_rc=False)
|
||||
if rc != 0:
|
||||
module.fail_json(rc=rc, msg=err)
|
||||
|
|
@ -139,7 +139,7 @@ def do_update(module, container, image, values_list):
|
|||
|
||||
def do_uninstall(module, name, backend):
|
||||
atomic_bin = module.get_bin_path("atomic")
|
||||
args = [atomic_bin, "uninstall", "--storage=%s" % backend, name]
|
||||
args = [atomic_bin, "uninstall", f"--storage={backend}", name]
|
||||
rc, out, err = module.run_command(args, check_rc=False)
|
||||
if rc != 0:
|
||||
module.fail_json(rc=rc, msg=err)
|
||||
|
|
@ -169,7 +169,7 @@ def core(module):
|
|||
atomic_bin = module.get_bin_path("atomic")
|
||||
module.run_command_environ_update = dict(LANG="C", LC_ALL="C", LC_MESSAGES="C")
|
||||
|
||||
values_list = ["--set=%s" % x for x in values] if values else []
|
||||
values_list = [f"--set={x}" for x in values] if values else []
|
||||
|
||||
args = [
|
||||
atomic_bin,
|
||||
|
|
@ -179,9 +179,9 @@ def core(module):
|
|||
"-n",
|
||||
"--all",
|
||||
"-f",
|
||||
"backend=%s" % backend,
|
||||
f"backend={backend}",
|
||||
"-f",
|
||||
"container=%s" % name,
|
||||
f"container={name}",
|
||||
]
|
||||
rc, out, err = module.run_command(args, check_rc=False)
|
||||
if rc != 0:
|
||||
|
|
|
|||
|
|
@ -107,14 +107,14 @@ def core(module):
|
|||
|
||||
if backend:
|
||||
if state == "present" or state == "latest":
|
||||
args = [atomic_bin, "pull", "--storage=%s" % backend, image]
|
||||
args = [atomic_bin, "pull", f"--storage={backend}", image]
|
||||
rc, out, err = module.run_command(args, check_rc=False)
|
||||
if rc < 0:
|
||||
module.fail_json(rc=rc, msg=err)
|
||||
else:
|
||||
out_run = ""
|
||||
if started:
|
||||
args = [atomic_bin, "run", "--storage=%s" % backend, image]
|
||||
args = [atomic_bin, "run", f"--storage={backend}", image]
|
||||
rc, out_run, err = module.run_command(args, check_rc=False)
|
||||
if rc < 0:
|
||||
module.fail_json(rc=rc, msg=err)
|
||||
|
|
@ -122,7 +122,7 @@ def core(module):
|
|||
changed = "Extracting" in out or "Copying blob" in out
|
||||
module.exit_json(msg=(out + out_run), changed=changed)
|
||||
elif state == "absent":
|
||||
args = [atomic_bin, "images", "delete", "--storage=%s" % backend, image]
|
||||
args = [atomic_bin, "images", "delete", f"--storage={backend}", image]
|
||||
rc, out, err = module.run_command(args, check_rc=False)
|
||||
if rc < 0:
|
||||
module.fail_json(rc=rc, msg=err)
|
||||
|
|
|
|||
|
|
@ -83,9 +83,8 @@ error_messages = {
|
|||
}
|
||||
|
||||
BITBUCKET_API_ENDPOINTS = {
|
||||
"deploy-key-list": "%s/2.0/repositories/{workspace}/{repo_slug}/deploy-keys/" % BitbucketHelper.BITBUCKET_API_URL,
|
||||
"deploy-key-detail": "%s/2.0/repositories/{workspace}/{repo_slug}/deploy-keys/{key_id}"
|
||||
% BitbucketHelper.BITBUCKET_API_URL,
|
||||
"deploy-key-list": f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/deploy-keys/",
|
||||
"deploy-key-detail": f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/deploy-keys/{{key_id}}",
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -78,8 +78,7 @@ error_messages = {
|
|||
}
|
||||
|
||||
BITBUCKET_API_ENDPOINTS = {
|
||||
"ssh-key-pair": "%s/2.0/repositories/{workspace}/{repo_slug}/pipelines_config/ssh/key_pair"
|
||||
% BitbucketHelper.BITBUCKET_API_URL,
|
||||
"ssh-key-pair": f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/pipelines_config/ssh/key_pair",
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -102,10 +102,10 @@ error_messages = {
|
|||
}
|
||||
|
||||
BITBUCKET_API_ENDPOINTS = {
|
||||
"known-host-list": "%s/2.0/repositories/{workspace}/{repo_slug}/pipelines_config/ssh/known_hosts/"
|
||||
% BitbucketHelper.BITBUCKET_API_URL,
|
||||
"known-host-detail": "%s/2.0/repositories/{workspace}/{repo_slug}/pipelines_config/ssh/known_hosts/{known_host_uuid}"
|
||||
% BitbucketHelper.BITBUCKET_API_URL,
|
||||
"known-host-list": f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/pipelines_config/ssh/known_hosts/",
|
||||
"known-host-detail": (
|
||||
f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/pipelines_config/ssh/known_hosts/{{known_host_uuid}}"
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -89,10 +89,10 @@ error_messages = {
|
|||
}
|
||||
|
||||
BITBUCKET_API_ENDPOINTS = {
|
||||
"pipeline-variable-list": "%s/2.0/repositories/{workspace}/{repo_slug}/pipelines_config/variables/"
|
||||
% BitbucketHelper.BITBUCKET_API_URL,
|
||||
"pipeline-variable-detail": "%s/2.0/repositories/{workspace}/{repo_slug}/pipelines_config/variables/{variable_uuid}"
|
||||
% BitbucketHelper.BITBUCKET_API_URL,
|
||||
"pipeline-variable-list": f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/pipelines_config/variables/",
|
||||
"pipeline-variable-detail": (
|
||||
f"{BitbucketHelper.BITBUCKET_API_URL}/2.0/repositories/{{workspace}}/{{repo_slug}}/pipelines_config/variables/{{variable_uuid}}"
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -163,7 +163,7 @@ class DimensionDataNetworkModule(DimensionDataModule):
|
|||
|
||||
self.module.exit_json(
|
||||
changed=True,
|
||||
msg='Created network "%s" in datacenter "%s".' % (self.name, self.location),
|
||||
msg=f'Created network "{self.name}" in datacenter "{self.location}".',
|
||||
network=self._network_to_dict(network),
|
||||
)
|
||||
|
||||
|
|
@ -172,7 +172,7 @@ class DimensionDataNetworkModule(DimensionDataModule):
|
|||
|
||||
if not network:
|
||||
self.module.exit_json(
|
||||
changed=False, msg='Network "%s" does not exist' % self.name, network=self._network_to_dict(network)
|
||||
changed=False, msg=f'Network "{self.name}" does not exist', network=self._network_to_dict(network)
|
||||
)
|
||||
|
||||
self._delete_network(network)
|
||||
|
|
@ -237,9 +237,9 @@ class DimensionDataNetworkModule(DimensionDataModule):
|
|||
deleted = self.driver.ex_delete_network_domain(network)
|
||||
|
||||
if deleted:
|
||||
self.module.exit_json(changed=True, msg="Deleted network with id %s" % network.id)
|
||||
self.module.exit_json(changed=True, msg=f"Deleted network with id {network.id}")
|
||||
|
||||
self.module.fail_json("Unexpected failure deleting network with id %s" % network.id)
|
||||
self.module.fail_json(f"Unexpected failure deleting network with id {network.id}")
|
||||
|
||||
except DimensionDataAPIException as e:
|
||||
self.module.fail_json(msg=f"Failed to delete network: {e}", exception=traceback.format_exc())
|
||||
|
|
|
|||
|
|
@ -396,13 +396,13 @@ def main():
|
|||
try:
|
||||
# Prepare request data
|
||||
if content:
|
||||
rawdata = content
|
||||
rawdata = content.replace("\n", "")
|
||||
elif file_exists:
|
||||
with open(path, "r") as config_object:
|
||||
rawdata = config_object.read()
|
||||
rawdata = config_object.read().replace("\n", "")
|
||||
|
||||
# Wrap the XML documents in a <root> element
|
||||
xmldata = lxml.etree.fromstring("<root>%s</root>" % rawdata.replace("\n", ""))
|
||||
xmldata = lxml.etree.fromstring(f"<root>{rawdata}</root>")
|
||||
|
||||
# Handle each XML document separately in the same session
|
||||
for xmldoc in list(xmldata):
|
||||
|
|
|
|||
|
|
@ -543,8 +543,8 @@ class JenkinsPlugin:
|
|||
|
||||
if self.params["with_dependencies"]:
|
||||
install_script = (
|
||||
'Jenkins.instance.updateCenter.getPlugin("%s")'
|
||||
".getNeededDependencies().each{it.deploy()}; %s" % (self.params["name"], install_script)
|
||||
f'Jenkins.instance.updateCenter.getPlugin("{self.params["name"]}")'
|
||||
f".getNeededDependencies().each{{it.deploy()}}; {install_script}"
|
||||
)
|
||||
|
||||
script_data = {"script": install_script}
|
||||
|
|
|
|||
|
|
@ -2562,7 +2562,8 @@ class Nmcli:
|
|||
for property in unsupported_properties:
|
||||
msg_options.append(f"{setting_key}.{property}")
|
||||
|
||||
msg = 'Invalid or unsupported option(s): "%s"' % '", "'.join(msg_options)
|
||||
str_msg_options = '", "'.join(msg_options)
|
||||
msg = f'Invalid or unsupported option(s): "{str_msg_options}"'
|
||||
if self.ignore_unsupported_suboptions:
|
||||
self.module.warn(msg)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -553,7 +553,7 @@ def _auto_increment_hostname(count, hostname):
|
|||
name-02, name-03, and so forth.
|
||||
"""
|
||||
if "%" not in hostname:
|
||||
hostname = "%s-%%01d" % hostname
|
||||
hostname = "%s-%%01d" % hostname # noqa
|
||||
|
||||
return [hostname % i for i in range(1, count + 1)]
|
||||
|
||||
|
|
|
|||
|
|
@ -423,7 +423,7 @@ def get_hostname_list(module):
|
|||
if re.search(r"%\d{0,2}d", hostname_spec):
|
||||
hostnames = [hostname_spec % i for i in count_range]
|
||||
elif count > 1:
|
||||
hostname_spec = "%s%%02d" % hostname_spec
|
||||
hostname_spec = "%s%%02d" % hostname_spec # noqa
|
||||
hostnames = [hostname_spec % i for i in count_range]
|
||||
|
||||
for hn in hostnames:
|
||||
|
|
|
|||
|
|
@ -159,9 +159,8 @@ def main():
|
|||
if device in devices_by_nickname:
|
||||
target = devices_by_nickname[device]
|
||||
else:
|
||||
module.fail_json(
|
||||
msg="Device '%s' not found. Available devices: '%s'" % (device, "', '".join(devices_by_nickname.keys()))
|
||||
)
|
||||
str_devices_by_nickname = "', '".join(devices_by_nickname)
|
||||
module.fail_json(msg=f"Device '{device}' not found. Available devices: '{str_devices_by_nickname}'")
|
||||
|
||||
# Search for given channel
|
||||
if channel is not None:
|
||||
|
|
@ -172,9 +171,8 @@ def main():
|
|||
if channel in channels_by_tag:
|
||||
target = channels_by_tag[channel]
|
||||
else:
|
||||
module.fail_json(
|
||||
msg="Channel '%s' not found. Available channels: '%s'" % (channel, "', '".join(channels_by_tag.keys()))
|
||||
)
|
||||
str_channels_by_tag = "', '".join(channels_by_tag)
|
||||
module.fail_json(msg=f"Channel '{channel}' not found. Available channels: '{str_channels_by_tag}'")
|
||||
|
||||
# If in check mode, exit saying that we succeeded
|
||||
if module.check_mode:
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ def delete_user_data(compute_api, server_id, key):
|
|||
response = compute_api.delete(path=f"servers/{server_id}/user_data/{key}")
|
||||
|
||||
if not response.ok:
|
||||
msg = "Error during user_data deleting: (%s) %s" % response.status_code, response.body
|
||||
msg = f"Error during user_data deleting: ({response.status_code}) {response.body}"
|
||||
compute_api.module.fail_json(msg=msg)
|
||||
|
||||
return response
|
||||
|
|
@ -133,7 +133,7 @@ def core(module):
|
|||
|
||||
user_data_list = compute_api.get(path=f"servers/{server_id}/user_data")
|
||||
if not user_data_list.ok:
|
||||
msg = "Error during user_data fetching: %s %s" % user_data_list.status_code, user_data_list.body
|
||||
msg = f"Error during user_data fetching: {user_data_list.status_code} {user_data_list.body}"
|
||||
compute_api.module.fail_json(msg=msg)
|
||||
|
||||
present_user_data_keys = user_data_list.json["user_data"]
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ def query(module, url, check, subscription):
|
|||
response, info = fetch_url(module, url, method="GET", headers=headers, data=json.dumps(request_data))
|
||||
|
||||
if info["status"] == 500:
|
||||
module.fail_json(msg="Failed to query silence %s. Reason: %s" % (subscription, info))
|
||||
module.fail_json(msg=f"Failed to query silence {subscription}. Reason: {info}")
|
||||
|
||||
try:
|
||||
json_out = json.loads(response.read())
|
||||
|
|
@ -172,7 +172,7 @@ def clear(module, url, check, subscription):
|
|||
response, info = fetch_url(module, url, method="POST", headers=headers, data=json.dumps(request_data))
|
||||
|
||||
if info["status"] != 204:
|
||||
module.fail_json(msg="Failed to silence %s. Reason: %s" % (subscription, info))
|
||||
module.fail_json(msg=f"Failed to silence {subscription}. Reason: {info}")
|
||||
|
||||
try:
|
||||
json_out = json.loads(response.read())
|
||||
|
|
@ -221,7 +221,7 @@ def create(module, url, check, creator, expire, expire_on_resolve, reason, subsc
|
|||
response, info = fetch_url(module, url, method="POST", headers=headers, data=json.dumps(request_data))
|
||||
|
||||
if info["status"] != 201:
|
||||
module.fail_json(msg="Failed to silence %s. Reason: %s" % (subscription, info["msg"]))
|
||||
module.fail_json(msg=f"Failed to silence {subscription}. Reason: {info['msg']}")
|
||||
|
||||
try:
|
||||
json_out = json.loads(response.read())
|
||||
|
|
|
|||
|
|
@ -2091,9 +2091,9 @@ class XenServerVM(XenServerObject):
|
|||
if unit in disk_units:
|
||||
return int(size * (1024 ** disk_units[unit]))
|
||||
else:
|
||||
str_supported_units = "', '".join(sorted(disk_units.keys(), key=lambda key: disk_units[key]))
|
||||
self.module.fail_json(
|
||||
msg="%s'%s' is not a supported unit for disk size! Supported units are ['%s']."
|
||||
% (msg_prefix, unit, "', '".join(sorted(disk_units.keys(), key=lambda key: disk_units[key])))
|
||||
msg=f"{msg_prefix}'{unit}' is not a supported unit for disk size! Supported units are ['{str_supported_units}']."
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -22,7 +22,6 @@ ignore = [
|
|||
"UP045", # Use `X | None` for type annotations - needs Python 3.10+
|
||||
# To fix:
|
||||
"UP024", # Replace aliased errors with `OSError`
|
||||
"UP031", # Use format specifiers instead of percent format
|
||||
"UP041", # Replace aliased errors with `TimeoutError`
|
||||
"B026", # star-arg-unpacking-after-keyword-arg
|
||||
"SIM102", # collapsible-if
|
||||
|
|
|
|||
|
|
@ -65,8 +65,7 @@ def test_dzdo(mocker, parser, reset_cli_args):
|
|||
print(cmd)
|
||||
assert (
|
||||
re.match(
|
||||
"""%s %s -p %s -u %s %s -c 'echo %s; %s'"""
|
||||
% (
|
||||
"""{} {} -p {} -u {} {} -c 'echo {}; {}'""".format(
|
||||
dzdo_exe,
|
||||
dzdo_flags,
|
||||
r"\"\[dzdo via ansible, key=.+?\] password:\"",
|
||||
|
|
@ -115,8 +114,7 @@ def test_dzdo_varoptions(mocker, parser, reset_cli_args):
|
|||
print(cmd)
|
||||
assert (
|
||||
re.match(
|
||||
"""%s %s -p %s -u %s %s -c 'echo %s; %s'"""
|
||||
% (
|
||||
"""{} {} -p {} -u {} {} -c 'echo {}; {}'""".format(
|
||||
dzdo_exe,
|
||||
dzdo_flags,
|
||||
r"\"\[dzdo via ansible, key=.+?\] password:\"",
|
||||
|
|
|
|||
|
|
@ -52,8 +52,7 @@ def test_sudosu(mocker, parser, reset_cli_args):
|
|||
print(cmd)
|
||||
assert (
|
||||
re.match(
|
||||
"""%s %s -p "%s" su -l %s %s -c 'echo %s; %s'"""
|
||||
% (
|
||||
"""{} {} -p "{}" su -l {} {} -c 'echo {}; {}'""".format(
|
||||
sudo_exe,
|
||||
sudo_flags.replace("-n", ""),
|
||||
r"\[sudo via ansible, key=.+?\] password:",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue