1
0
Fork 0
mirror of https://github.com/ansible-collections/community.general.git synced 2026-02-04 07:51:50 +00:00

batch 2 - update Python idiom to 3.7 using pyupgrade (#11342)

* batch 2 - update Python idiom to 3.7 using pyupgrade

* Apply suggestions from code review
This commit is contained in:
Alexei Znamensky 2025-12-30 22:50:16 +13:00 committed by GitHub
parent 9e363c9f94
commit 266d9d3fb0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 64 additions and 70 deletions

View file

@ -31,7 +31,7 @@ class ActionModule(ActionBase):
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
result = super().run(tmp, task_vars)
del tmp # tmp no longer has any effect
if "that" not in self._task.args:

View file

@ -128,4 +128,4 @@ STATIC_URL = "/static/"
STATIC_ROOT = "/tmp/django-static"
if "DJANGO_ANSIBLE_RAISE" in os.environ:
raise ValueError("DJANGO_ANSIBLE_RAISE={0}".format(os.environ["DJANGO_ANSIBLE_RAISE"]))
raise ValueError("DJANGO_ANSIBLE_RAISE={}".format(os.environ["DJANGO_ANSIBLE_RAISE"]))

View file

@ -128,4 +128,4 @@ STATIC_URL = "/static/"
STATIC_ROOT = "/tmp/django-static"
if "DJANGO_ANSIBLE_RAISE" in os.environ:
raise ValueError("DJANGO_ANSIBLE_RAISE={0}".format(os.environ["DJANGO_ANSIBLE_RAISE"]))
raise ValueError("DJANGO_ANSIBLE_RAISE={}".format(os.environ["DJANGO_ANSIBLE_RAISE"]))

View file

@ -20,7 +20,7 @@ except ImportError:
# Argument parsing
if len(sys.argv) != 4:
print("Syntax: {0} <bind> <port> <path>".format(sys.argv[0]))
print(f"Syntax: {sys.argv[0]} <bind> <port> <path>")
sys.exit(-1)
HOST, PORT, PATH = sys.argv[1:4]

View file

@ -8,8 +8,8 @@ import lmdb
map_size = 1024 * 100
env = lmdb.open("./jp.mdb", map_size=map_size)
with env.begin(write=True) as txn:
txn.put("fr".encode(), "France".encode())
txn.put("nl".encode(), "Netherlands".encode())
txn.put("es".encode(), "Spain".encode())
txn.put("be".encode(), "Belgium".encode())
txn.put("lu".encode(), "Luxembourg".encode())
txn.put(b"fr", b"France")
txn.put(b"nl", b"Netherlands")
txn.put(b"es", b"Spain")
txn.put(b"be", b"Belgium")
txn.put(b"lu", b"Luxembourg")

View file

@ -46,7 +46,7 @@ if len(sys.argv) > 3:
ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
if HAS_TLS and ssl_ctx is not None:
print("Using %s and %s" % (certfile, keyfile))
print(f"Using {certfile} and {keyfile}")
ssl_ctx.load_cert_chain(certfile=certfile, keyfile=keyfile)
print("Start SMTP server on port", port1)

View file

@ -32,7 +32,7 @@ class EchoServer(BaseHTTPRequestHandler):
def run_webserver():
webServer = HTTPServer((hostname, server_port), EchoServer)
print("Server started http://%s:%s" % (hostname, server_port))
print(f"Server started http://{hostname}:{server_port}")
try:
webServer.serve_forever()

View file

@ -17,7 +17,7 @@ username = sys.argv[3]
password = sys.argv[4]
if username:
url = "http://%s:%s@127.0.0.1:9001/RPC2" % (quote(username, safe=""), quote(password, safe=""))
url = "http://{}:{}@127.0.0.1:9001/RPC2".format(quote(username, safe=""), quote(password, safe=""))
else:
url = "http://127.0.0.1:9001/RPC2"

View file

@ -17,13 +17,13 @@ def main():
with open(".azure-pipelines/azure-pipelines.yml", "rb") as f:
azp = yaml.safe_load(f)
allowed_targets = set(["azp/generic/1"])
allowed_targets = {"azp/generic/1"}
for stage in azp["stages"]:
if stage["stage"].startswith(("Sanity", "Unit", "Generic", "Summary")):
continue
for job in stage["jobs"]:
for group in job["parameters"]["groups"]:
allowed_targets.add("azp/posix/{0}".format(group))
allowed_targets.add(f"azp/posix/{group}")
paths = glob.glob("tests/integration/targets/*/aliases")
@ -31,7 +31,7 @@ def main():
for path in paths:
targets = []
skip = False
with open(path, "r") as f:
with open(path) as f:
for line in f:
if "#" in line:
line = line[: line.find("#")]
@ -56,11 +56,11 @@ def main():
if not targets:
if "targets/setup_" in path:
continue
print("%s: %s" % (path, "found no targets"))
print(f"{path}: found no targets")
has_errors = True
for target in targets:
if target not in allowed_targets:
print("%s: %s" % (path, 'found invalid target "{0}"'.format(target)))
print(f'{path}: found invalid target "{target}"')
has_errors = True
return 1 if has_errors else 0

View file

@ -70,7 +70,7 @@ class BotmetaCheck:
try:
documentation = []
in_docs = False
with open(filename, "r", encoding="utf-8") as f:
with open(filename, encoding="utf-8") as f:
for line in f:
if line.startswith("DOCUMENTATION ="):
in_docs = True

View file

@ -34,13 +34,13 @@ def inventory():
def load_txt_data(path):
with open(path, "r") as f:
with open(path) as f:
s = f.read()
return s
def load_yml_data(path):
with open(path, "r") as f:
with open(path) as f:
d = yaml.safe_load(f)
return d

View file

@ -80,7 +80,7 @@ def test_verify_file_bad_config(inventory):
def get_vm_pool_json():
with open("tests/unit/plugins/inventory/fixtures/opennebula_inventory.json", "r") as json_file:
with open("tests/unit/plugins/inventory/fixtures/opennebula_inventory.json") as json_file:
jsondata = json.load(json_file)
data = type("pyone.bindings.VM_POOLSub", (object,), {"VM": []})()
@ -349,15 +349,13 @@ keyed_groups:
# note the vm_pool (and json data file) has four hosts,
# but the options above asks ansible to filter one out
assert len(get_vm_pool_json().VM) == 4
assert set(vm.NAME for vm in get_vm_pool_json().VM) == set(
[
assert {vm.NAME for vm in get_vm_pool_json().VM} == {
"terraform_demo_00",
"terraform_demo_01",
"terraform_demo_srv_00",
"bs-windows",
]
)
assert set(im._inventory.hosts) == set(["terraform_demo_00", "terraform_demo_01", "terraform_demo_srv_00"])
}
assert set(im._inventory.hosts) == {"terraform_demo_00", "terraform_demo_01", "terraform_demo_srv_00"}
host_demo00 = im._inventory.get_host("terraform_demo_00")
host_demo01 = im._inventory.get_host("terraform_demo_01")

View file

@ -14,7 +14,7 @@ from ansible_collections.community.general.plugins.lookup.onepassword import (
def load_file(file):
with open((os.path.join(os.path.dirname(__file__), "onepassword_fixtures", file)), "r") as f:
with open(os.path.join(os.path.dirname(__file__), "onepassword_fixtures", file)) as f:
return json.loads(f.read())

View file

@ -205,7 +205,7 @@ class TestLookupModule(unittest.TestCase):
def test_bitwarden_plugin_duplicates(self):
# There are two records with name dupe_name; we need to be order-insensitive with
# checking what was retrieved.
self.assertEqual(set(["b", "d"]), set(self.lookup.run(["dupe_name"], field="password")[0]))
self.assertEqual({"b", "d"}, set(self.lookup.run(["dupe_name"], field="password")[0]))
@patch("ansible_collections.community.general.plugins.lookup.bitwarden._bitwarden", new=MockBitwarden())
def test_bitwarden_plugin_full_item(self):

View file

@ -82,7 +82,7 @@ class TestInterfacesFileModule(unittest.TestCase):
with open(testfilepath, "wb") as f:
f.write(string.encode())
else:
with open(testfilepath, "r") as goldenfile:
with open(testfilepath) as goldenfile:
goldenData = json.load(goldenfile)
self.assertEqual(goldenData, ifaces)
@ -96,7 +96,7 @@ class TestInterfacesFileModule(unittest.TestCase):
f.write(string.encode())
f.close()
else:
with open(testfilepath, "r") as goldenfile:
with open(testfilepath) as goldenfile:
goldenstring = goldenfile.read()
goldenfile.close()
self.assertEqual(goldenstring, string)

View file

@ -245,9 +245,7 @@ class TestPermanentParams(ModuleTestCase):
with patch("ansible_collections.community.general.plugins.modules.modprobe.Modprobe.modprobe_files"):
modprobe.modprobe_files = ["/etc/modprobe.d/dummy1.conf", "/etc/modprobe.d/dummy2.conf"]
assert modprobe.permanent_params == set(
["numdummies=4", "dummy_parameter1=6", "dummy_parameter2=5"]
)
assert modprobe.permanent_params == {"numdummies=4", "dummy_parameter1=6", "dummy_parameter2=5"}
def test_module_permanent_params_empty(self):
files_content = ["", ""]

View file

@ -44,7 +44,7 @@ valid_inventory = {
"sed": "4.8-1",
"sqlite": "3.36.0-1",
},
"installed_groups": {"base-devel": set(["gawk", "grep", "file", "findutils", "pacman", "sed", "gzip", "gettext"])},
"installed_groups": {"base-devel": {"gawk", "grep", "file", "findutils", "pacman", "sed", "gzip", "gettext"}},
"available_pkgs": {
"acl": "2.3.1-1",
"amd-ucode": "20211027.1d00989-1",
@ -61,8 +61,7 @@ valid_inventory = {
"sudo": "1.9.8.p2-3",
},
"available_groups": {
"base-devel": set(
[
"base-devel": {
"libtool",
"gawk",
"which",
@ -87,9 +86,8 @@ valid_inventory = {
"automake",
"sudo",
"binutils",
]
),
"some-group": set(["libtool", "sudo", "binutils"]),
},
"some-group": {"libtool", "sudo", "binutils"},
},
"upgradable_pkgs": {
"sqlite": VersionTuple(current="3.36.0-1", latest="3.37.0-1"),

View file

@ -34,7 +34,7 @@ class UTHelper:
for ext in extensions:
test_spec_filename = test_module.__file__.replace(".py", ext)
if os.path.exists(test_spec_filename):
with open(test_spec_filename, "r") as test_spec_filehandle:
with open(test_spec_filename) as test_spec_filehandle:
return UTHelper.from_file(ansible_module, test_module, test_spec_filehandle, mocks=mocks)
raise Exception(

View file

@ -139,7 +139,7 @@ def test_make_unsafe_dict_key():
def test_make_unsafe_set():
value = set([_make_trusted("test")])
value = {_make_trusted("test")}
if not SUPPORTS_DATA_TAGGING:
value.add(_make_trusted(b"test"))
unsafe_value = make_unsafe(value)
@ -147,7 +147,7 @@ def test_make_unsafe_set():
for obj in unsafe_value:
assert _is_trusted(obj)
value = set([_make_trusted("{{test}}")])
value = {_make_trusted("{{test}}")}
if not SUPPORTS_DATA_TAGGING:
value.add(_make_trusted(b"{{test}}"))
unsafe_value = make_unsafe(value)