From 48781cee7f1ef41a8a30519c9217e1b0e2e79f09 Mon Sep 17 00:00:00 2001 From: Brett Holman Date: Mon, 22 Apr 2024 10:32:06 -0600 Subject: [PATCH 01/11] fix(gce): Warn when key publishing fails --- cloudinit/config/cc_ssh.py | 3 ++- cloudinit/sources/DataSourceGCE.py | 14 +++++++++++--- cloudinit/sources/__init__.py | 4 +++- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/cloudinit/config/cc_ssh.py b/cloudinit/config/cc_ssh.py index 4c4f0c33e18..7a09a5fe60e 100644 --- a/cloudinit/config/cc_ssh.py +++ b/cloudinit/config/cc_ssh.py @@ -225,7 +225,8 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: if publish_hostkeys: hostkeys = get_public_host_keys(blacklist=host_key_blacklist) try: - cloud.datasource.publish_host_keys(hostkeys) + if not cloud.datasource.publish_host_keys(hostkeys): + LOG.warning("Publishing host keys failed!") except Exception: util.logexc(LOG, "Publishing host keys failed!") diff --git a/cloudinit/sources/DataSourceGCE.py b/cloudinit/sources/DataSourceGCE.py index 949580d8bce..aee31498e4a 100644 --- a/cloudinit/sources/DataSourceGCE.py +++ b/cloudinit/sources/DataSourceGCE.py @@ -151,8 +151,12 @@ def get_public_ssh_keys(self): return _parse_public_keys(public_keys_data, self.default_user) def publish_host_keys(self, hostkeys): + any_failed = False for key in hostkeys: - _write_host_key_to_guest_attributes(*key) + success = _write_host_key_to_guest_attributes(*key) + if not success: + any_failed = True + return any_failed def get_hostname(self, fqdn=False, resolve_ip=False, metadata_only=False): # GCE has long FDQN's and has asked for short hostnames. @@ -173,7 +177,7 @@ class DataSourceGCELocal(DataSourceGCE): perform_dhcp_setup = True -def _write_host_key_to_guest_attributes(key_type, key_value): +def _write_host_key_to_guest_attributes(key_type, key_value) -> bool: url = "%s/%s/%s" % (GUEST_ATTRIBUTES_URL, HOSTKEY_NAMESPACE, key_type) key_value = key_value.encode("utf-8") resp = url_helper.readurl( @@ -185,8 +189,12 @@ def _write_host_key_to_guest_attributes(key_type, key_value): ) if resp.ok(): LOG.debug("Wrote %s host key to guest attributes.", key_type) + return True else: - LOG.debug("Unable to write %s host key to guest attributes.", key_type) + LOG.info( + "Unable to write %s host key to guest attributes.", key_type + ) + return False def _has_expired(public_key): diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 973d140ff36..33944ff18c9 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -757,14 +757,16 @@ def get_config_obj(self): def get_public_ssh_keys(self): return normalize_pubkey_data(self.metadata.get("public-keys")) - def publish_host_keys(self, hostkeys): + def publish_host_keys(self, hostkeys) -> bool: """Publish the public SSH host keys (found in /etc/ssh/*.pub). @param hostkeys: List of host key tuples (key_type, key_value), where key_type is the first field in the public key file (e.g. 'ssh-rsa') and key_value is the key itself (e.g. 'AAAAB3NzaC1y...'). + @return: True if publishing host keys succeeded, otherwise False """ + return True def _remap_device(self, short_name): # LP: #611137 From f5622b72629f89f5edf06fa787f05dab22b4fc2a Mon Sep 17 00:00:00 2001 From: Brett Holman Date: Mon, 22 Apr 2024 11:51:33 -0600 Subject: [PATCH 02/11] chore(bootcmd): Eliminate redundant exception handling --- cloudinit/config/cc_bootcmd.py | 10 +++------- tests/unittests/config/test_cc_bootcmd.py | 22 ++++++++++------------ 2 files changed, 13 insertions(+), 19 deletions(-) diff --git a/cloudinit/config/cc_bootcmd.py b/cloudinit/config/cc_bootcmd.py index 054e5c4c2d2..9c00181f1f2 100644 --- a/cloudinit/config/cc_bootcmd.py +++ b/cloudinit/config/cc_bootcmd.py @@ -47,10 +47,6 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: util.logexc(LOG, "Failed to shellify bootcmd: %s", str(e)) raise - try: - iid = cloud.get_instance_id() - env = {"INSTANCE_ID": str(iid)} if iid else {} - subp.subp(["/bin/sh", tmpf.name], update_env=env, capture=False) - except Exception: - util.logexc(LOG, "Failed to run bootcmd module %s", name) - raise + iid = cloud.get_instance_id() + env = {"INSTANCE_ID": str(iid)} if iid else {} + subp.subp(["/bin/sh", tmpf.name], update_env=env, capture=False) diff --git a/tests/unittests/config/test_cc_bootcmd.py b/tests/unittests/config/test_cc_bootcmd.py index 29292405e54..43528f5bbec 100644 --- a/tests/unittests/config/test_cc_bootcmd.py +++ b/tests/unittests/config/test_cc_bootcmd.py @@ -84,22 +84,20 @@ def test_handler_invalid_command_set(self): str(context_manager.exception), ) - def test_handler_runs_bootcmd_script_with_error(self): - """When a valid script generates an error, that error is raised.""" + def test_handler_creates_and_runs_bootcmd_script_with_instance_id(self): + """Valid schema runs a bootcmd script with INSTANCE_ID in the env.""" cc = get_cloud() - valid_config = {"bootcmd": ["exit 1"]} # Script with error + out_file = self.tmp_path("bootcmd.out", self.new_root) + my_id = "b6ea0f59-e27d-49c6-9f87-79f19765a425" + valid_config = { + "bootcmd": ["echo {0} $INSTANCE_ID > {1}".format(my_id, out_file)] + } with mock.patch(self._etmpfile_path, FakeExtendedTempFile): with self.allow_subp(["/bin/sh"]): - with self.assertRaises(subp.ProcessExecutionError) as ctxt: - handle("does-not-matter", valid_config, cc, []) - self.assertIn( - "Unexpected error while running command.\nCommand: ['/bin/sh',", - str(ctxt.exception), - ) - self.assertIn( - "Failed to run bootcmd module does-not-matter", - self.logs.getvalue(), + handle("cc_bootcmd", valid_config, cc, []) + self.assertEqual( + my_id + " iid-datasource-none\n", util.load_text_file(out_file) ) From 9f866e856c7bcb2424059a939cf88c0509659068 Mon Sep 17 00:00:00 2001 From: Brett Holman Date: Mon, 22 Apr 2024 15:00:18 -0600 Subject: [PATCH 03/11] refactor(helpers): Remove unused conditional codepath --- cloudinit/cloud.py | 4 ++-- cloudinit/helpers.py | 15 +++++---------- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/cloudinit/cloud.py b/cloudinit/cloud.py index ae079d4857b..d7af14c11ed 100644 --- a/cloudinit/cloud.py +++ b/cloudinit/cloud.py @@ -56,7 +56,7 @@ def cfg(self): # Ensure that cfg is not indirectly modified return copy.deepcopy(self._cfg) - def run(self, name, functor, args, freq=None, clear_on_fail=False): + def run(self, name, functor, args, freq=None): """Run a function gated by a named semaphore for a desired frequency. The typical case for this method would be to limit running of the @@ -68,7 +68,7 @@ def run(self, name, functor, args, freq=None, clear_on_fail=False): even if they happen to be run in different boot stages or across reboots. """ - return self._runners.run(name, functor, args, freq, clear_on_fail) + return self._runners.run(name, functor, args, freq) def get_template_filename(self, name): fn = self.paths.template_tpl % (name) diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index 470a5b2013f..85d0fe950cf 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -34,7 +34,7 @@ def __init__(self): pass @contextlib.contextmanager - def lock(self, _name, _freq, _clear_on_fail=False): + def lock(self, _name, _freq): yield DummyLock() def has_run(self, _name, _freq): @@ -61,14 +61,9 @@ def __init__(self, sem_path): self.sem_path = sem_path @contextlib.contextmanager - def lock(self, name, freq, clear_on_fail=False): + def lock(self, name, freq): name = canon_sem_name(name) - try: - yield self._acquire(name, freq) - except Exception: - if clear_on_fail: - self.clear(name, freq) - raise + yield self._acquire(name, freq) def clear(self, name, freq): name = canon_sem_name(name) @@ -138,7 +133,7 @@ def _get_sem(self, freq): self.sems[sem_path] = FileSemaphores(sem_path) return self.sems[sem_path] - def run(self, name, functor, args, freq=None, clear_on_fail=False): + def run(self, name, functor, args, freq=None): sem = self._get_sem(freq) if not sem: sem = DummySemaphores() @@ -147,7 +142,7 @@ def run(self, name, functor, args, freq=None, clear_on_fail=False): if sem.has_run(name, freq): LOG.debug("%s already ran (freq=%s)", name, freq) return (False, None) - with sem.lock(name, freq, clear_on_fail) as lk: + with sem.lock(name, freq) as lk: if not lk: raise LockFailure("Failed to acquire lock for %s" % name) else: From abe431e1299c0c411c91bbb719c70fe82f5123be Mon Sep 17 00:00:00 2001 From: Brett Holman Date: Mon, 22 Apr 2024 15:12:11 -0600 Subject: [PATCH 04/11] chore(debain): Remove obsolete helper function From this helper's docstring: Ubuntu cloud images previously included a 'eth0.cfg' that had hard coded content. That file would interfere with the rendered configuration if it was present. This code is no longer relevant due to the following: 1) Ubuntu uses netplan 2) This appears like an Ubuntu-specific workaround 3) Debian cloud images use netplan --- cloudinit/distros/debian.py | 36 ------------------------------------ 1 file changed, 36 deletions(-) diff --git a/cloudinit/distros/debian.py b/cloudinit/distros/debian.py index cef6e1fb8c0..d68bd5c44e3 100644 --- a/cloudinit/distros/debian.py +++ b/cloudinit/distros/debian.py @@ -123,10 +123,6 @@ def apply_locale(self, locale, out_fn=None, keyname="LANG"): # once we've updated the system config, invalidate cache self.system_locale = None - def _write_network_state(self, *args, **kwargs): - _maybe_remove_legacy_eth0() - return super()._write_network_state(*args, **kwargs) - def _write_hostname(self, hostname, filename): conf = None try: @@ -221,38 +217,6 @@ def set_keymap(self, layout: str, model: str, variant: str, options: str): self.manage_service("restart", "console-setup") -def _maybe_remove_legacy_eth0(path="/etc/network/interfaces.d/eth0.cfg"): - """Ubuntu cloud images previously included a 'eth0.cfg' that had - hard coded content. That file would interfere with the rendered - configuration if it was present. - - if the file does not exist do nothing. - If the file exists: - - with known content, remove it and warn - - with unknown content, leave it and warn - """ - - if not os.path.exists(path): - return - - bmsg = "Dynamic networking config may not apply." - try: - contents = util.load_text_file(path) - known_contents = ["auto eth0", "iface eth0 inet dhcp"] - lines = [ - f.strip() for f in contents.splitlines() if not f.startswith("#") - ] - if lines == known_contents: - util.del_file(path) - msg = "removed %s with known contents" % path - else: - msg = bmsg + " '%s' exists with user configured content." % path - except Exception: - msg = bmsg + " %s exists, but could not be read." % path - - LOG.warning(msg) - - def read_system_locale(sys_path=LOCALE_CONF_FN, keyname="LANG"): """Read system default locale setting, if present""" sys_val = "" From 324926c684c0563540f8383b1680ded09b6f315b Mon Sep 17 00:00:00 2001 From: Brett Holman Date: Mon, 22 Apr 2024 16:07:44 -0600 Subject: [PATCH 05/11] refactor(net): Simplify read_sys_net exception handling Remove unnecessary arguments. --- cloudinit/net/__init__.py | 60 ++++++++------------------------ tests/unittests/net/test_init.py | 56 +++-------------------------- tests/unittests/test_net.py | 4 --- 3 files changed, 19 insertions(+), 101 deletions(-) diff --git a/cloudinit/net/__init__.py b/cloudinit/net/__init__.py index 78b15a47b89..b06dd1f70a6 100644 --- a/cloudinit/net/__init__.py +++ b/cloudinit/net/__init__.py @@ -67,52 +67,21 @@ def sys_dev_path(devname, path=""): def read_sys_net( devname, path, - translate=None, - on_enoent=None, - on_keyerror=None, - on_einval=None, ): dev_path = sys_dev_path(devname, path) - try: - contents = util.load_text_file(dev_path) - except (OSError, IOError) as e: - e_errno = getattr(e, "errno", None) - if e_errno in (errno.ENOENT, errno.ENOTDIR): - if on_enoent is not None: - return on_enoent(e) - if e_errno in (errno.EINVAL,): - if on_einval is not None: - return on_einval(e) - raise - contents = contents.strip() - if translate is None: - return contents - try: - return translate[contents] - except KeyError as e: - if on_keyerror is not None: - return on_keyerror(e) - else: - LOG.debug( - "Found unexpected (not translatable) value '%s' in '%s", - contents, - dev_path, - ) - raise + return util.load_text_file(dev_path).strip() -def read_sys_net_safe(iface, field, translate=None): - def on_excp_false(e): - return False - - return read_sys_net( - iface, - field, - on_keyerror=on_excp_false, - on_enoent=on_excp_false, - on_einval=on_excp_false, - translate=translate, - ) +def read_sys_net_safe(iface, field): + try: + return read_sys_net( + iface, + field, + ) + except OSError as e: + if e.errno in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL): + return False + raise def read_sys_net_int(iface, field): @@ -125,12 +94,13 @@ def read_sys_net_int(iface, field): return None -def is_up(devname): +def is_up(devname) -> bool: # The linux kernel says to consider devices in 'unknown' # operstate as up for the purposes of network configuration. See # Documentation/networking/operstates.txt in the kernel source. translate = {"up": True, "unknown": True, "down": False} - return read_sys_net_safe(devname, "operstate", translate=translate) + contents = read_sys_net_safe(devname, "operstate") + return translate.get(contents, False) def is_bridge(devname): @@ -237,7 +207,7 @@ def get_dev_features(devname): features = "" try: features = read_sys_net(devname, "device/features") - except Exception: + except (OSError, UnicodeError): pass return features diff --git a/tests/unittests/net/test_init.py b/tests/unittests/net/test_init.py index 140161a1977..d4be808aec7 100644 --- a/tests/unittests/net/test_init.py +++ b/tests/unittests/net/test_init.py @@ -66,57 +66,9 @@ def test_read_sys_net_reraises_oserror(self): with pytest.raises(Exception, match="No such file or directory"): net.read_sys_net("dev", "attr") - def test_read_sys_net_handles_error_with_on_enoent(self): - """read_sys_net handles OSError/IOError with on_enoent if provided.""" - handled_errors = [] - - def on_enoent(e): - handled_errors.append(e) - - net.read_sys_net("dev", "attr", on_enoent=on_enoent) - error = handled_errors[0] - assert isinstance(error, Exception) - assert "No such file or directory" in str(error) - - def test_read_sys_net_translates_content(self): - """read_sys_net translates content when translate dict is provided.""" - content = "you're welcome\n" - write_file(os.path.join(self.sysdir, "dev", "attr"), content) - translate = {"you're welcome": "de nada"} - assert "de nada" == net.read_sys_net( - "dev", "attr", translate=translate - ) - - def test_read_sys_net_errors_on_translation_failures(self, caplog): - """read_sys_net raises a KeyError and logs details on failure.""" - content = "you're welcome\n" - write_file(os.path.join(self.sysdir, "dev", "attr"), content) - with pytest.raises(KeyError, match='"you\'re welcome"'): - net.read_sys_net("dev", "attr", translate={}) - assert ( - "Found unexpected (not translatable) value 'you're welcome' in " - "'{0}dev/attr".format(self.sysdir) in caplog.text - ) - - def test_read_sys_net_handles_handles_with_onkeyerror(self): - """read_sys_net handles translation errors calling on_keyerror.""" - content = "you're welcome\n" - write_file(os.path.join(self.sysdir, "dev", "attr"), content) - handled_errors = [] - - def on_keyerror(e): - handled_errors.append(e) - - net.read_sys_net("dev", "attr", translate={}, on_keyerror=on_keyerror) - error = handled_errors[0] - assert isinstance(error, KeyError) - assert '"you\'re welcome"' == str(error) - - def test_read_sys_net_safe_false_on_translate_failure(self): - """read_sys_net_safe returns False on translation failures.""" - content = "you're welcome\n" - write_file(os.path.join(self.sysdir, "dev", "attr"), content) - assert not net.read_sys_net_safe("dev", "attr", translate={}) + def test_read_sys_net_safe_handles_error(self): + """read_sys_net_safe handles OSError/IOError""" + assert not net.read_sys_net_safe("dev", "attr") def test_read_sys_net_safe_returns_false_on_noent_failure(self): """read_sys_net_safe returns False on file not found failures.""" @@ -1399,7 +1351,7 @@ def test_get_dev_features(self): def test_get_dev_features_none_returns_empty_string(self): devname = random_string() - self.read_sys_net.side_effect = Exception("error") + self.read_sys_net.side_effect = FileNotFoundError("error") assert "" == net.get_dev_features(devname) assert 1 == self.read_sys_net.call_count self.read_sys_net.assert_called_once_with(devname, "device/features") diff --git a/tests/unittests/test_net.py b/tests/unittests/test_net.py index 68e44fa8021..53b0daf6762 100644 --- a/tests/unittests/test_net.py +++ b/tests/unittests/test_net.py @@ -1299,10 +1299,6 @@ def _setup_test( def fake_read( devname, path, - translate=None, - on_enoent=None, - on_keyerror=None, - on_einval=None, ): return dev_attrs[devname][path] From 3b65a2761591553bc8d9a3295d1e4f027a83b2a7 Mon Sep 17 00:00:00 2001 From: Brett Holman Date: Mon, 22 Apr 2024 20:17:09 -0600 Subject: [PATCH 06/11] chore: Remove redundant end-of-module exception handling --- cloudinit/config/cc_keys_to_console.py | 12 +++--------- cloudinit/config/cc_ssh_import_id.py | 6 +----- 2 files changed, 4 insertions(+), 14 deletions(-) diff --git a/cloudinit/config/cc_keys_to_console.py b/cloudinit/config/cc_keys_to_console.py index de29a81a8c2..56aafe1aafc 100644 --- a/cloudinit/config/cc_keys_to_console.py +++ b/cloudinit/config/cc_keys_to_console.py @@ -62,12 +62,6 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: cfg, "ssh_key_console_blacklist", [] ) - try: - cmd = [helper_path, ",".join(fp_blacklist), ",".join(key_blacklist)] - (stdout, _stderr) = subp.subp(cmd) - log_util.multi_log( - "%s\n" % (stdout.strip()), stderr=False, console=True - ) - except Exception: - LOG.warning("Writing keys to the system console failed!") - raise + cmd = [helper_path, ",".join(fp_blacklist), ",".join(key_blacklist)] + stdout, _stderr = subp.subp(cmd) + log_util.multi_log("%s\n" % (stdout.strip()), stderr=False, console=True) diff --git a/cloudinit/config/cc_ssh_import_id.py b/cloudinit/config/cc_ssh_import_id.py index 7c1422dee3d..14147f6ddb9 100644 --- a/cloudinit/config/cc_ssh_import_id.py +++ b/cloudinit/config/cc_ssh_import_id.py @@ -150,11 +150,7 @@ def import_ssh_ids(ids, user): return LOG.debug("Importing SSH ids for user %s.", user) - try: - subp.subp(cmd, capture=False) - except subp.ProcessExecutionError as exc: - util.logexc(LOG, "Failed to run command to import %s SSH ids", user) - raise exc + subp.subp(cmd, capture=False) def is_key_in_nested_dict(config: dict, search_key: str) -> bool: From eec8f50694c4602fe6a89d2cde83620bb503b800 Mon Sep 17 00:00:00 2001 From: Brett Holman Date: Tue, 30 Apr 2024 15:08:19 -0600 Subject: [PATCH 07/11] chore(json): Simplify json loading Since Python 3.6, json.loads() is capable of receiving arguments of type str, bytes, and bytearray. Cloud-init's code historically manually handled type conversion. This boilerplate is no longer necessary. Add docstring and typing to util.load_json(). --- cloudinit/sources/DataSourceBigstep.py | 2 +- cloudinit/sources/DataSourceRbxCloud.py | 2 +- cloudinit/sources/DataSourceScaleway.py | 2 +- cloudinit/sources/helpers/digitalocean.py | 2 +- cloudinit/sources/helpers/upcloud.py | 2 +- cloudinit/sources/helpers/vultr.py | 6 +++--- cloudinit/util.py | 20 +++++++++++++++++--- tests/unittests/sources/test_vultr.py | 2 +- 8 files changed, 26 insertions(+), 12 deletions(-) diff --git a/cloudinit/sources/DataSourceBigstep.py b/cloudinit/sources/DataSourceBigstep.py index cef1489b78c..01272780abd 100644 --- a/cloudinit/sources/DataSourceBigstep.py +++ b/cloudinit/sources/DataSourceBigstep.py @@ -26,7 +26,7 @@ def _get_data(self, apply_filter=False) -> bool: if url is None: return False response = url_helper.readurl(url) - decoded = json.loads(response.contents.decode()) + decoded = json.loads(response.contents) self.metadata = decoded["metadata"] self.vendordata_raw = decoded["vendordata_raw"] self.userdata_raw = decoded["userdata_raw"] diff --git a/cloudinit/sources/DataSourceRbxCloud.py b/cloudinit/sources/DataSourceRbxCloud.py index 2fba1149d86..d3679102d23 100644 --- a/cloudinit/sources/DataSourceRbxCloud.py +++ b/cloudinit/sources/DataSourceRbxCloud.py @@ -153,7 +153,7 @@ def read_user_data_callback(mount_dir): @returns: A dict containing userdata, metadata and cfg based on metadata. """ meta_data = util.load_json( - text=util.load_binary_file(fname=os.path.join(mount_dir, "cloud.json")) + util.load_binary_file(fname=os.path.join(mount_dir, "cloud.json")) ) user_data = util.load_text_file( fname=os.path.join(mount_dir, "user.data"), quiet=True diff --git a/cloudinit/sources/DataSourceScaleway.py b/cloudinit/sources/DataSourceScaleway.py index 4952e314edd..766847e5a90 100644 --- a/cloudinit/sources/DataSourceScaleway.py +++ b/cloudinit/sources/DataSourceScaleway.py @@ -225,7 +225,7 @@ def _crawl_metadata(self): resp = url_helper.readurl( self.metadata_url, timeout=self.timeout, retries=self.retries ) - self.metadata = json.loads(util.decode_binary(resp.contents)) + self.metadata = json.loads(resp.contents) self.userdata_raw = query_data_api( "user-data", self.userdata_url, self.retries, self.timeout diff --git a/cloudinit/sources/helpers/digitalocean.py b/cloudinit/sources/helpers/digitalocean.py index 4d0dd363bbe..d2d19919295 100644 --- a/cloudinit/sources/helpers/digitalocean.py +++ b/cloudinit/sources/helpers/digitalocean.py @@ -201,7 +201,7 @@ def read_metadata(url, timeout=2, sec_between=2, retries=30): ) if not response.ok(): raise RuntimeError("unable to read metadata at %s" % url) - return json.loads(response.contents.decode()) + return json.loads(response.contents) def read_sysinfo(): diff --git a/cloudinit/sources/helpers/upcloud.py b/cloudinit/sources/helpers/upcloud.py index f882fa67948..2c4d5338cac 100644 --- a/cloudinit/sources/helpers/upcloud.py +++ b/cloudinit/sources/helpers/upcloud.py @@ -199,7 +199,7 @@ def read_metadata(url, timeout=2, sec_between=2, retries=30): ) if not response.ok(): raise RuntimeError("unable to read metadata at %s" % url) - return json.loads(response.contents.decode()) + return json.loads(response.contents) def read_sysinfo(): diff --git a/cloudinit/sources/helpers/vultr.py b/cloudinit/sources/helpers/vultr.py index 0c3aa6079d3..c00ee43809c 100644 --- a/cloudinit/sources/helpers/vultr.py +++ b/cloudinit/sources/helpers/vultr.py @@ -32,7 +32,7 @@ def get_metadata( connectivity_url_data={"url": url}, ): # Fetch the metadata - v1 = read_metadata(url, timeout, retries, sec_between, agent) + v1 = _read_metadata(url, timeout, retries, sec_between, agent) metadata = json.loads(v1) refactor_metadata(metadata) @@ -101,7 +101,7 @@ def is_vultr(): # Read Metadata endpoint -def read_metadata(url, timeout, retries, sec_between, agent): +def _read_metadata(url, timeout, retries, sec_between, agent) -> bytes: url = "%s/v1.json" % url # Announce os details so we can handle non Vultr origin @@ -121,7 +121,7 @@ def read_metadata(url, timeout, retries, sec_between, agent): "Failed to connect to %s: Code: %s" % url, response.code ) - return response.contents.decode() + return response.contents # Wrapped for caching diff --git a/cloudinit/util.py b/cloudinit/util.py index 8025f4d51c4..60c6c67e0af 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -52,6 +52,7 @@ Mapping, Optional, Sequence, + Tuple, Union, cast, ) @@ -1851,8 +1852,21 @@ def ensure_dirs(dirlist, mode=0o755): @performance.timed("Loading json") -def load_json(text, root_types=(dict,)): - decoded = json.loads(decode_binary(text)) +def load_json( + content: Union[str, bytes, bytearray], root_types: Tuple[type] = (dict,) +): + """load json and verify that the returned object is a certain type + + Args: + content (str, bytes, bytearray): An object to be deserialized. + root_types (tupletypes, optional): Valid types to be returned. + Raises: + UnicodeDecodeError: If content cannot be decoded to utf-8. + JSONDecodeError: If content does not contain valid json. + TypeError: If json returned from json.loads() from `content` does not + match `root_types`. + """ + decoded = json.loads(content) if not isinstance(decoded, tuple(root_types)): expected_types = ", ".join([str(t) for t in root_types]) raise TypeError( @@ -3069,7 +3083,7 @@ def read_hotplug_enabled_file(paths: "Paths") -> dict: ) except FileNotFoundError: LOG.debug("File not found: %s", paths.get_cpath("hotplug.enabled")) - except json.JSONDecodeError as e: + except (json.JSONDecodeError, UnicodeDecodeError) as e: LOG.warning( "Ignoring contents of %s because it is not decodable. Error: %s", settings.HOTPLUG_ENABLED_FILE, diff --git a/tests/unittests/sources/test_vultr.py b/tests/unittests/sources/test_vultr.py index e5f1c39ecae..820a331dec4 100644 --- a/tests/unittests/sources/test_vultr.py +++ b/tests/unittests/sources/test_vultr.py @@ -371,7 +371,7 @@ def override_exit(self, excp_type, excp_value, excp_traceback): "cloudinit.net.ephemeral.EphemeralDHCPv4.__exit__", override_exit ) @mock.patch("cloudinit.sources.helpers.vultr.is_vultr") - @mock.patch("cloudinit.sources.helpers.vultr.read_metadata") + @mock.patch("cloudinit.sources.helpers.vultr._read_metadata") @mock.patch("cloudinit.sources.helpers.vultr.get_interface_list") def test_interface_seek( self, From 56ad063d185d1313a0acefdff306ee81cb8d2ad4 Mon Sep 17 00:00:00 2001 From: Brett Holman Date: Mon, 22 Apr 2024 20:23:12 -0600 Subject: [PATCH 08/11] chore: Make exception handling more explicit Cloud-init's codebase makes extensive use of exception handlers which catch Exception and then do something based on the assumed exception type. This is bad practice and makes it possible for exceptions to be unexpectedly ignored. To remedy this, this commit updates cloud-init to make `Exception` always be an unknown exception which therefore should always produce a log of level `WARN` or higher. Require a minimum of one of the following in each Exception handler: - LOG.warning("Unhandled exception: %s", e) - util.logexc() - with a log level of WARN or higher - re-raise the exception without setting exception type when re-raising[1] Make cloud-init's code adhere to the above set of rules with the following changes: - Limit the scope where Exception is handled to only unexpected error paths. - Add new handlers for more specific exception types. - Add warning logs or increase log level to WARN in some cases. - Add typing where exceptions are returned. - Replace various Exception handlers with ProcessExecutionError. - Remove handling that does nothing. - Remove unused code which handles Exception. [1] This would likely reduce the scope of the exception and make it possible to mask an Exception. --- cloudinit/analyze/show.py | 19 ++++++--- cloudinit/cmd/clean.py | 2 + cloudinit/cmd/devel/hotplug_hook.py | 6 +++ cloudinit/cmd/main.py | 8 +++- cloudinit/config/cc_disk_setup.py | 30 +++++++++----- cloudinit/config/cc_growpart.py | 27 ++++++++++++ cloudinit/config/cc_grub_dpkg.py | 2 +- cloudinit/config/cc_mounts.py | 9 +++- cloudinit/config/cc_power_state_change.py | 4 ++ cloudinit/config/cc_runcmd.py | 3 +- cloudinit/config/cc_scripts_per_boot.py | 2 +- cloudinit/config/cc_scripts_per_instance.py | 2 +- cloudinit/config/cc_scripts_per_once.py | 2 +- cloudinit/config/cc_scripts_user.py | 2 +- cloudinit/config/cc_scripts_vendor.py | 2 +- cloudinit/config/cc_ssh_import_id.py | 14 +++---- cloudinit/config/cc_wireguard.py | 2 +- cloudinit/config/modules.py | 24 ++++++++--- cloudinit/distros/__init__.py | 9 +++- cloudinit/handlers/jinja_template.py | 18 ++++---- cloudinit/net/__init__.py | 10 ++++- cloudinit/net/netplan.py | 10 +++++ cloudinit/netinfo.py | 17 ++++++++ cloudinit/reporting/handlers.py | 10 ++++- cloudinit/sources/DataSourceAzure.py | 41 +++++++++++++++---- cloudinit/sources/DataSourceCloudSigma.py | 8 +++- cloudinit/sources/DataSourceEc2.py | 1 + cloudinit/sources/DataSourceGCE.py | 5 +-- cloudinit/sources/DataSourceNoCloud.py | 3 +- cloudinit/sources/DataSourceOVF.py | 26 ++++++------ cloudinit/sources/DataSourceVMware.py | 4 +- cloudinit/sources/__init__.py | 4 ++ cloudinit/sources/azure/errors.py | 7 +++- cloudinit/sources/azure/kvp.py | 7 +++- cloudinit/sources/helpers/azure.py | 16 +++++++- cloudinit/sources/helpers/openstack.py | 31 ++++++++++++-- .../helpers/vmware/imc/guestcust_util.py | 2 +- cloudinit/stages.py | 19 ++++++--- cloudinit/templater.py | 2 - cloudinit/url_helper.py | 5 +++ cloudinit/user_data.py | 6 ++- cloudinit/util.py | 33 +++++++++++++-- tests/unittests/config/test_cc_runcmd.py | 14 ++++--- tests/unittests/config/test_modules.py | 6 +-- tests/unittests/sources/test_vmware.py | 1 + tests/unittests/test_util.py | 2 +- 46 files changed, 363 insertions(+), 114 deletions(-) diff --git a/cloudinit/analyze/show.py b/cloudinit/analyze/show.py index b3814c646fb..d6e9d91912d 100644 --- a/cloudinit/analyze/show.py +++ b/cloudinit/analyze/show.py @@ -6,10 +6,11 @@ import datetime import json +import logging import sys import time -from cloudinit import subp, util +from cloudinit import lifecycle, subp, util from cloudinit.distros import uses_systemd # Example events: @@ -49,6 +50,7 @@ FAIL_CODE = "failure" CONTAINER_CODE = "container" TIMESTAMP_UNKNOWN = (FAIL_CODE, -1, -1, -1) +LOG = logging.getLogger(__name__) def format_record(msg, event): @@ -142,7 +144,7 @@ def subp(self): return err self.epoch = value return None - except Exception as systemctl_fail: + except subp.ProcessExecutionError as systemctl_fail: return systemctl_fail def parse_epoch_as_float(self): @@ -213,9 +215,16 @@ def gather_timestamps_using_dmesg(): # systemd wont start cloud-init in this case, # so we cannot get that timestamp return SUCCESS_CODE, kernel_start, kernel_end, kernel_end - - except Exception: + except (ValueError, subp.ProcessExecutionError): pass + except Exception as e: + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) return TIMESTAMP_UNKNOWN @@ -244,10 +253,10 @@ def gather_timestamps_using_systemd(): cloudinit_sysd = base_time + delta_ci_s except Exception as e: + LOG.warning("Unhandled exception: %s", e) # Except ALL exceptions as Systemctl reader can throw many different # errors, but any failure in systemctl means that timestamps cannot be # obtained - print(e) return TIMESTAMP_UNKNOWN return status, kernel_start, kernel_end, cloudinit_sysd diff --git a/cloudinit/cmd/clean.py b/cloudinit/cmd/clean.py index 1d61c11d2b8..5f7287e45f6 100755 --- a/cloudinit/cmd/clean.py +++ b/cloudinit/cmd/clean.py @@ -8,6 +8,7 @@ import argparse import glob +import logging import os import sys @@ -37,6 +38,7 @@ GEN_SSH_CONFIG_FILES = [ "/etc/ssh/sshd_config.d/50-cloud-init.conf", ] +LOG = logging.getLogger(__name__) def get_parser(parser=None): diff --git a/cloudinit/cmd/devel/hotplug_hook.py b/cloudinit/cmd/devel/hotplug_hook.py index 8e839cb1617..a3295fed726 100755 --- a/cloudinit/cmd/devel/hotplug_hook.py +++ b/cloudinit/cmd/devel/hotplug_hook.py @@ -237,6 +237,12 @@ def handle_hotplug(hotplug_init: Init, devpath, subsystem, udevaction): event_handler.success() break except Exception as e: + # The number of possible exceptions handled here is large and + # difficult to audit. If retries fail this exception is always + # re-raised. Therefore, this callsite is allowed to be an exception + # to the rule that handle Exception must log a warning or reraise + # or call util.logexc() - since it does technically do this on + # timeout. LOG.debug("Exception while processing hotplug event. %s", e) time.sleep(wait) last_exception = e diff --git a/cloudinit/cmd/main.py b/cloudinit/cmd/main.py index 0a75d7cc502..e710e60e087 100644 --- a/cloudinit/cmd/main.py +++ b/cloudinit/cmd/main.py @@ -801,8 +801,12 @@ def status_wrapper(name, args): else: try: status = json.loads(util.load_text_file(status_path)) - except Exception: - pass + except OSError: + LOG.warning("File %s cannot be accessed %s.", status_path, mode) + except (UnicodeDecodeError, json.JSONDecodeError) as e: + LOG.warning("File %s not valid json %s: %s", status_path, mode, e) + except Exception as e: + LOG.warning("File %s not valid json %s: %s", status_path, mode, e) if mode not in status["v1"]: # this should never happen, but leave it just to be safe diff --git a/cloudinit/config/cc_disk_setup.py b/cloudinit/config/cc_disk_setup.py index cff5ee6654f..1459044f96d 100644 --- a/cloudinit/config/cc_disk_setup.py +++ b/cloudinit/config/cc_disk_setup.py @@ -180,7 +180,7 @@ def enumerate_disk(device, nodeps=False): info = None try: info, _err = subp.subp(lsblk_cmd) - except Exception as e: + except subp.ProcessExecutionError as e: raise RuntimeError( "Failed during disk check for %s\n%s" % (device, e) ) from e @@ -219,9 +219,12 @@ def is_device_valid(name, partition=False): d_type = "" try: d_type = device_type(name) - except Exception: + except RuntimeError: LOG.warning("Query against device %s failed", name) return False + except Exception as e: + LOG.warning("Unhandled exception while querying device %s %s", name, e) + return False if partition and d_type == "part": return True @@ -244,7 +247,7 @@ def check_fs(device): blkid_cmd = ["blkid", "-c", "/dev/null", device] try: out, _err = subp.subp(blkid_cmd, rcs=[0, 2]) - except Exception as e: + except subp.ProcessExecutionError as e: raise RuntimeError( "Failed during disk check for %s\n%s" % (device, e) ) from e @@ -350,7 +353,7 @@ def get_hdd_size(device): try: size_in_bytes, _ = subp.subp(["blockdev", "--getsize64", device]) sector_size, _ = subp.subp(["blockdev", "--getss", device]) - except Exception as e: + except subp.ProcessExecutionError as e: raise RuntimeError("Failed to get %s size\n%s" % (device, e)) from e return int(size_in_bytes) / int(sector_size) @@ -369,7 +372,7 @@ def check_partition_mbr_layout(device, layout): prt_cmd = ["sfdisk", "-l", device] try: out, _err = subp.subp(prt_cmd, data="%s\n" % layout) - except Exception as e: + except subp.ProcessExecutionError as e: raise RuntimeError( "Error running partition command on %s\n%s" % (device, e) ) from e @@ -400,7 +403,7 @@ def check_partition_gpt_layout(device, layout): prt_cmd = ["sgdisk", "-p", device] try: out, _err = subp.subp(prt_cmd, update_env=LANG_C_ENV) - except Exception as e: + except subp.ProcessExecutionError as e: raise RuntimeError( "Error running partition command on %s\n%s" % (device, e) ) from e @@ -591,7 +594,7 @@ def purge_disk(device): try: LOG.info("Purging filesystem on /dev/%s", d["name"]) subp.subp(wipefs_cmd) - except Exception as e: + except subp.ProcessExecutionError as e: raise RuntimeError( "Failed FS purge of /dev/%s" % d["name"] ) from e @@ -628,7 +631,7 @@ def read_parttbl(device): util.udevadm_settle() try: subp.subp(probe_cmd) - except Exception as e: + except subp.ProcessExecutionError as e: util.logexc(LOG, "Failed reading the partition table %s" % e) util.udevadm_settle() @@ -643,7 +646,7 @@ def exec_mkpart_mbr(device, layout): prt_cmd = ["sfdisk", "--force", device] try: subp.subp(prt_cmd, data="%s\n" % layout) - except Exception as e: + except subp.ProcessExecutionError as e: raise RuntimeError( "Failed to partition device %s\n%s" % (device, e) ) from e @@ -671,9 +674,14 @@ def exec_mkpart_gpt(device, layout): subp.subp( ["sgdisk", "-t", "{}:{}".format(index, pinput), device] ) - except Exception: + except subp.ProcessExecutionError: LOG.warning("Failed to partition device %s", device) raise + except Exception as e: + LOG.warning( + "Unhandled exception while partitioning device %s: %s", device, e + ) + raise read_parttbl(device) @@ -976,5 +984,5 @@ def mkfs(fs_cfg): LOG.debug("Creating file system %s on %s", label, device) try: subp.subp(fs_cmd, shell=shell) - except Exception as e: + except subp.ProcessExecutionError as e: raise RuntimeError("Failed to exec of '%s':\n%s" % (fs_cmd, e)) from e diff --git a/cloudinit/config/cc_growpart.py b/cloudinit/config/cc_growpart.py index 1d0ddcb7de0..77b1ff05703 100644 --- a/cloudinit/config/cc_growpart.py +++ b/cloudinit/config/cc_growpart.py @@ -8,6 +8,7 @@ """Growpart: Grow partitions""" import base64 +import binascii import copy import json import logging @@ -325,7 +326,18 @@ def resize_encrypted(blockdev, partition) -> Tuple[str, str]: key = keydata["key"] decoded_key = base64.b64decode(key) slot = keydata["slot"] + except ( + OSError, + UnicodeDecodeError, + binascii.Error, + json.JSONDecodeError, + ) as e: + raise RuntimeError( + "Could not load encryption key. This is expected if " + "the volume has been previously resized." + ) from e except Exception as e: + LOG.warning("Unhandled exception: %s", e) raise RuntimeError( "Could not load encryption key. This is expected if " "the volume has been previously resized." @@ -499,7 +511,22 @@ def resize_devices(resizer: Resizer, devices, distro: Distro): "as it is not encrypted.", ) ) + except OSError as e: + info.append( + ( + devent, + RESIZE.FAILED, + f"Resizing encrypted device ({blockdev}) failed: {e}", + ) + ) except Exception as e: + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) info.append( ( devent, diff --git a/cloudinit/config/cc_grub_dpkg.py b/cloudinit/config/cc_grub_dpkg.py index fef0f99728e..d39a4599cf3 100644 --- a/cloudinit/config/cc_grub_dpkg.py +++ b/cloudinit/config/cc_grub_dpkg.py @@ -125,7 +125,7 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: try: subp.subp(["debconf-set-selections"], data=dconf_sel) - except Exception as e: + except subp.ProcessExecutionError as e: util.logexc( LOG, "Failed to run debconf-set-selections for grub_dpkg: %s", e ) diff --git a/cloudinit/config/cc_mounts.py b/cloudinit/config/cc_mounts.py index 54c99f43ec2..4b16988572e 100644 --- a/cloudinit/config/cc_mounts.py +++ b/cloudinit/config/cc_mounts.py @@ -331,11 +331,13 @@ def handle_swapcfg(swapcfg): LOG.debug("swap file %s already in use", fname) return fname LOG.debug("swap file %s exists, but not in /proc/swaps", fname) - except Exception: + except OSError: LOG.warning( "swap file %s exists. Error reading /proc/swaps", fname ) - return fname + except Exception as e: + LOG.warning("Unhandled exception: %s", e) + return fname try: if isinstance(size, str) and size != "auto": @@ -344,7 +346,10 @@ def handle_swapcfg(swapcfg): maxsize = util.human2bytes(maxsize) return setup_swapfile(fname=fname, size=size, maxsize=maxsize) + except OSError as e: + LOG.warning("failed to setup swap: %s", e) except Exception as e: + LOG.warning("Unhandled exception: %s", e) LOG.warning("failed to setup swap: %s", e) return None diff --git a/cloudinit/config/cc_power_state_change.py b/cloudinit/config/cc_power_state_change.py index 5bd6b96d847..32a1a5e620b 100644 --- a/cloudinit/config/cc_power_state_change.py +++ b/cloudinit/config/cc_power_state_change.py @@ -71,7 +71,11 @@ def check_condition(cond): else: LOG.warning("%sunexpected exit %s. do not apply change.", pre, ret) return False + except OSError as e: + LOG.warning("%sUnexpected error: %s", pre, e) + return False except Exception as e: + LOG.warning("Unhandled exception: %s", e) LOG.warning("%sUnexpected error: %s", pre, e) return False diff --git a/cloudinit/config/cc_runcmd.py b/cloudinit/config/cc_runcmd.py index 2212fd36a97..9841ed1381b 100644 --- a/cloudinit/config/cc_runcmd.py +++ b/cloudinit/config/cc_runcmd.py @@ -47,4 +47,5 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: content = util.shellify(cmd) util.write_file(out_fn, content, 0o700) except Exception as e: - raise type(e)("Failed to shellify {} into file {}".format(cmd, out_fn)) + util.logexc(LOG, "Failed to shellify runcmd: %s", e) + raise diff --git a/cloudinit/config/cc_scripts_per_boot.py b/cloudinit/config/cc_scripts_per_boot.py index fa78b8a48d8..53316e8dc7f 100644 --- a/cloudinit/config/cc_scripts_per_boot.py +++ b/cloudinit/config/cc_scripts_per_boot.py @@ -37,7 +37,7 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: runparts_path = os.path.join(cloud.get_cpath(), "scripts", SCRIPT_SUBDIR) try: subp.runparts(runparts_path) - except Exception: + except RuntimeError: LOG.warning( "Failed to run module %s (%s in %s)", name, diff --git a/cloudinit/config/cc_scripts_per_instance.py b/cloudinit/config/cc_scripts_per_instance.py index c5c52b97cf2..8d932ffdd95 100644 --- a/cloudinit/config/cc_scripts_per_instance.py +++ b/cloudinit/config/cc_scripts_per_instance.py @@ -35,7 +35,7 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: runparts_path = os.path.join(cloud.get_cpath(), "scripts", SCRIPT_SUBDIR) try: subp.runparts(runparts_path) - except Exception: + except RuntimeError: LOG.warning( "Failed to run module %s (%s in %s)", name, diff --git a/cloudinit/config/cc_scripts_per_once.py b/cloudinit/config/cc_scripts_per_once.py index 4d9ac583c49..ee1d813edce 100644 --- a/cloudinit/config/cc_scripts_per_once.py +++ b/cloudinit/config/cc_scripts_per_once.py @@ -35,7 +35,7 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: runparts_path = os.path.join(cloud.get_cpath(), "scripts", SCRIPT_SUBDIR) try: subp.runparts(runparts_path) - except Exception: + except RuntimeError: LOG.warning( "Failed to run module %s (%s in %s)", name, diff --git a/cloudinit/config/cc_scripts_user.py b/cloudinit/config/cc_scripts_user.py index eb63f7566fc..d6d00cf0687 100644 --- a/cloudinit/config/cc_scripts_user.py +++ b/cloudinit/config/cc_scripts_user.py @@ -36,7 +36,7 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: runparts_path = os.path.join(cloud.get_ipath_cur(), SCRIPT_SUBDIR) try: subp.runparts(runparts_path) - except Exception: + except RuntimeError: LOG.warning( "Failed to run module %s (%s in %s)", name, diff --git a/cloudinit/config/cc_scripts_vendor.py b/cloudinit/config/cc_scripts_vendor.py index a102c794ab5..899e7867bca 100644 --- a/cloudinit/config/cc_scripts_vendor.py +++ b/cloudinit/config/cc_scripts_vendor.py @@ -38,7 +38,7 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: try: subp.runparts(runparts_path, exe_prefix=prefix) - except Exception: + except RuntimeError: LOG.warning( "Failed to run module %s (%s in %s)", name, diff --git a/cloudinit/config/cc_ssh_import_id.py b/cloudinit/config/cc_ssh_import_id.py index 14147f6ddb9..58a068ced16 100644 --- a/cloudinit/config/cc_ssh_import_id.py +++ b/cloudinit/config/cc_ssh_import_id.py @@ -9,6 +9,7 @@ import logging import pwd +from typing import List from cloudinit import subp, util from cloudinit.cloud import Cloud @@ -58,15 +59,14 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: # import for cloudinit created users (users, _groups) = ug_util.normalize_users_groups(cfg, cloud.distro) - elist = [] + elist: List[Exception] = [] for user, user_cfg in users.items(): import_ids = [] if user_cfg["default"]: import_ids = util.get_cfg_option_list(cfg, "ssh_import_id", []) else: - try: - import_ids = user_cfg["ssh_import_id"] - except Exception: + import_ids = user_cfg.get("ssh_import_id", []) + if not import_ids: LOG.debug("User %s is not configured for ssh_import_id", user) continue @@ -74,12 +74,10 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: import_ids = util.uniq_merge(import_ids) import_ids = [str(i) for i in import_ids] except Exception: - LOG.debug( - "User %s is not correctly configured for ssh_import_id", user - ) + util.logexc(LOG, "Unhandled configuration for user %s", user) continue - if not len(import_ids): + if not import_ids: continue try: diff --git a/cloudinit/config/cc_wireguard.py b/cloudinit/config/cc_wireguard.py index 0cacd1338e0..dc3a78b97ef 100644 --- a/cloudinit/config/cc_wireguard.py +++ b/cloudinit/config/cc_wireguard.py @@ -70,7 +70,7 @@ def write_config(wg_int: dict): util.write_file( wg_int["config_path"], wg_int["content"], mode=WG_CONFIG_FILE_MODE ) - except Exception as e: + except (OSError, KeyError) as e: raise RuntimeError( "Failure writing Wireguard configuration file" f' {wg_int["config_path"]}:{NL}{str(e)}' diff --git a/cloudinit/config/modules.py b/cloudinit/config/modules.py index bea6a8e9665..c89147fba04 100644 --- a/cloudinit/config/modules.py +++ b/cloudinit/config/modules.py @@ -10,7 +10,7 @@ import logging from inspect import signature from types import ModuleType -from typing import Dict, List, NamedTuple, Optional +from typing import Dict, List, NamedTuple, Optional, Tuple from cloudinit import ( config, @@ -46,6 +46,8 @@ "cc_ubuntu_advantage": "cc_ubuntu_pro", # Renamed 24.1 } +RunOutput = Tuple[List[str], List[Tuple[str, Exception]]] + class ModuleDetails(NamedTuple): module: ModuleType @@ -253,7 +255,7 @@ def _fixup_modules(self, raw_mods) -> List[ModuleDetails]: ) return mostly_mods - def _run_modules(self, mostly_mods: List[ModuleDetails]): + def _run_modules(self, mostly_mods: List[ModuleDetails]) -> RunOutput: cc = self.init.cloudify() # Return which ones ran # and which ones failed + the exception of why it failed @@ -308,7 +310,14 @@ def _run_modules(self, mostly_mods: List[ModuleDetails]): failures.append((name, e)) return (which_ran, failures) - def run_single(self, mod_name, args=None, freq=None): + def run_single(self, mod_name: str, args=None, freq=None) -> RunOutput: + """Run a single module + + return: a tuple containing two lists: + - string names of modules which ran + - a list of tuples of modules which failed while running and + the exception that was raised + """ # Form the users module 'specs' mod_to_be = { "mod": mod_name, @@ -320,7 +329,7 @@ def run_single(self, mod_name, args=None, freq=None): mostly_mods = self._fixup_modules(raw_mods) return self._run_modules(mostly_mods) - def run_section(self, section_name): + def run_section(self, section_name: str) -> RunOutput: """Runs all modules in the given section. section_name - One of the modules lists as defined in @@ -328,6 +337,11 @@ def run_section(self, section_name): - cloud_init_modules - cloud_config_modules - cloud_final_modules + + return: a tuple containing two lists: + - string names of modules which ran + - a list of tuples of modules which failed while running and + the exception that was raised """ raw_mods = self._read_modules(section_name) mostly_mods = self._fixup_modules(raw_mods) @@ -357,7 +371,7 @@ def run_section(self, section_name): skipped.append(name) continue forced.append(name) - active_mods.append([mod, name, _freq, _args]) + active_mods.append(module_details) if inapplicable_mods: LOG.info( diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index e65cbfb5d89..77c372d1065 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -416,6 +416,10 @@ def update_package_sources(self, *, force=False): continue try: manager.update_package_sources(force=force) + except subp.ProcessExecutionError as e: + LOG.error( + "Failed to update package using %s: %s", manager.name, e + ) except Exception as e: LOG.error( "Failed to update package using %s: %s", manager.name, e @@ -1773,5 +1777,8 @@ def uses_systemd(): try: res = os.lstat("/run/systemd/system") return stat.S_ISDIR(res.st_mode) - except Exception: + except OSError: + return False + except Exception as e: + LOG.warning("Unhandled exception: %s", e) return False diff --git a/cloudinit/handlers/jinja_template.py b/cloudinit/handlers/jinja_template.py index 388588d8029..62203cf4f01 100644 --- a/cloudinit/handlers/jinja_template.py +++ b/cloudinit/handlers/jinja_template.py @@ -117,15 +117,17 @@ def render_jinja_payload_from_file( ) try: instance_data = load_json(load_text_file(instance_data_file)) - except Exception as e: - msg = "Loading Jinja instance data failed" - if isinstance(e, (IOError, OSError)): - if e.errno == EACCES: - msg = ( - "Cannot render jinja template vars. No read permission on" - " '%s'. Try sudo" % instance_data_file - ) + except PermissionError as e: + msg = ( + "Cannot render jinja template vars. No read permission on" + " '%s'. Try sudo" % instance_data_file + ) raise JinjaLoadError(msg) from e + except (OSError, ValueError, TypeError) as e: + raise JinjaLoadError("Loading Jinja instance data failed") from e + except Exception as e: + LOG.warning("Unhandled exception: %s", e) + raise JinjaLoadError("Loading Jinja instance data failed") from e rendered_payload = render_jinja_payload( payload, payload_fn, instance_data, debug diff --git a/cloudinit/net/__init__.py b/cloudinit/net/__init__.py index b06dd1f70a6..65897664787 100644 --- a/cloudinit/net/__init__.py +++ b/cloudinit/net/__init__.py @@ -209,6 +209,8 @@ def get_dev_features(devname): features = read_sys_net(devname, "device/features") except (OSError, UnicodeError): pass + except Exception as e: + LOG.warning("Unhandled exception: %s", e) return features @@ -815,11 +817,17 @@ def find_entry(mac, driver, device_id): for op, mac, new_name, params in ops + ups: try: opmap[op](*params) - except Exception as e: + except subp.ProcessExecutionError as e: errors.append( "[unknown] Error performing %s%s for %s, %s: %s" % (op, params, mac, new_name, e) ) + except Exception as e: + errors.append( + "[unknown] Unexpected exception " + "performing %s%s for %s, %s: %s" + % (op, params, mac, new_name, e) + ) if len(errors): raise RuntimeError("\n".join(errors)) diff --git a/cloudinit/net/netplan.py b/cloudinit/net/netplan.py index 50af17694d5..00e4a59619d 100644 --- a/cloudinit/net/netplan.py +++ b/cloudinit/net/netplan.py @@ -238,6 +238,7 @@ def netplan_api_write_yaml_file(net_config_content: str) -> bool: separate file or directory paths than world-readable configuration parts. """ try: + from netplan import NetplanException # type: ignore from netplan.parser import Parser # type: ignore from netplan.state import State # type: ignore except ImportError: @@ -263,7 +264,16 @@ def netplan_api_write_yaml_file(net_config_content: str) -> bool: state_output_file._write_yaml_file( os.path.basename(CLOUDINIT_NETPLAN_FILE) ) + except (NetplanException, OSError) as e: + LOG.warning( + "Unable to render network config using netplan python module." + " Fallback to write %s. %s", + CLOUDINIT_NETPLAN_FILE, + e, + ) + return False except Exception as e: + LOG.warning("Unhandled exception: %s") LOG.warning( "Unable to render network config using netplan python module." " Fallback to write %s. %s", diff --git a/cloudinit/netinfo.py b/cloudinit/netinfo.py index 6888693c382..67d57d32d2b 100644 --- a/cloudinit/netinfo.py +++ b/cloudinit/netinfo.py @@ -573,7 +573,16 @@ def netdev_pformat(): empty = "." try: netdev = netdev_info(empty=empty) + except (OSError, subp.ProcessExecutionError) as e: + lines.append( + util.center( + "Net device info failed ({error})".format(error=str(e)), + "!", + 80, + ) + ) except Exception as e: + LOG.warning("Unhandled exception: %s", e) lines.append( util.center( "Net device info failed ({error})".format(error=str(e)), @@ -624,7 +633,15 @@ def route_pformat(): lines = [] try: routes = route_info() + except subp.ProcessExecutionError as e: + lines.append( + util.center( + "Route info failed ({error})".format(error=str(e)), "!", 80 + ) + ) + util.logexc(LOG, "Route info failed: %s" % e) except Exception as e: + LOG.warning("Unhandled exception: %s", e) lines.append( util.center( "Route info failed ({error})".format(error=str(e)), "!", 80 diff --git a/cloudinit/reporting/handlers.py b/cloudinit/reporting/handlers.py index 1e89b12345b..0ecd1efbb49 100644 --- a/cloudinit/reporting/handlers.py +++ b/cloudinit/reporting/handlers.py @@ -50,7 +50,7 @@ def __init__(self, level="DEBUG"): input_level = level try: level = getattr(logging, level.upper()) - except Exception: + except AttributeError: LOG.warning("invalid level '%s', using WARN", input_level) level = logging.WARN self.level = level @@ -131,7 +131,15 @@ def process_requests(self): log_req_resp=False, ) consecutive_failed = 0 + except url_helper.UrlError as e: + LOG.warning( + "Failed posting event: %s. This was caused by: %s", + args[1], + e, + ) + consecutive_failed += 1 except Exception as e: + LOG.warning("Unhandled exception: %s", e) LOG.warning( "Failed posting event: %s. This was caused by: %s", args[1], diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index cadbfe836a8..5e4e08fc291 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -5,6 +5,7 @@ # This file is part of cloud-init. See LICENSE file for license information. import base64 +import binascii import functools import logging import os @@ -776,6 +777,7 @@ def crawl_metadata(self): ) crawled_data["files"] = {"ovf-env.xml": contents} except Exception as e: + LOG.warning("Unhandled exception: %s", e) report_diagnostic_event( "Failed to construct OVF from IMDS data %s" % e, logger_func=LOG.debug, @@ -791,7 +793,12 @@ def crawl_metadata(self): crawled_data["userdata_raw"] = base64.b64decode( "".join(imds_userdata.split()) ) - except Exception: + except binascii.Error: + report_diagnostic_event( + "Bad userdata in IMDS", logger_func=LOG.warning + ) + except Exception as e: + LOG.warning("Unhandled exception: %s", e) report_diagnostic_event( "Bad userdata in IMDS", logger_func=LOG.warning ) @@ -813,9 +820,9 @@ def crawl_metadata(self): ) try: ssh_keys = self._report_ready(pubkey_info=pubkey_info) - except Exception: + except Exception as e: + LOG.warning("Unhandled exception: %s", e) # Failed to report ready, but continue with best effort. - pass else: LOG.debug("negotiating returned %s", ssh_keys) if ssh_keys: @@ -900,12 +907,16 @@ def _get_data(self): """ try: get_boot_telemetry() + except RuntimeError as e: + LOG.warning("Failed to get boot telemetry: %s", e) except Exception as e: + LOG.warning("Unhandled excption: %s", e) LOG.warning("Failed to get boot telemetry: %s", e) try: get_system_info() except Exception as e: + LOG.warning("Unhandled exception: %s", e) LOG.warning("Failed to get system information: %s", e) try: @@ -1129,13 +1140,13 @@ def _report_ready_for_pps( """ try: self._report_ready() - except Exception as error: + except UrlError as error: # Ignore HTTP failures for Savable PPS as the call may appear to # fail if the network interface is unplugged or the VM is # suspended before we process the response. Worst case scenario # is that we failed to report ready for source PPS and this VM # will be discarded shortly, no harm done. - if expect_url_error and isinstance(error, UrlError): + if expect_url_error: report_diagnostic_event( "Ignoring http call failure, it was expected.", logger_func=LOG.debug, @@ -1148,6 +1159,11 @@ def _report_ready_for_pps( ) report_diagnostic_event(msg, logger_func=LOG.error) raise sources.InvalidMetaDataException(msg) from error + except Exception as e: + LOG.warning("Unhandled exception: %s", e) + msg = "Failed reporting ready while in the preprovisioning pool." + report_diagnostic_event(msg, logger_func=LOG.error) + raise sources.InvalidMetaDataException(msg) from e # Reset flag as we will need to report ready again for re-use. self._negotiated = False @@ -1371,6 +1387,12 @@ def _report_failure( ) self._negotiated = True return True + except UrlError as e: + report_diagnostic_event( + "Failed to report failure using " + "cached ephemeral dhcp context: %s" % e, + logger_func=LOG.error, + ) except Exception as e: report_diagnostic_event( "Failed to report failure using " @@ -1395,6 +1417,7 @@ def _report_failure( self._negotiated = True return True except Exception as e: + LOG.warning("Unhandled exception: %s", e) report_diagnostic_event( "Failed to report failure using new ephemeral dhcp: %s" % e, logger_func=LOG.debug, @@ -1440,7 +1463,7 @@ def _report_ready( def _ppstype_from_imds(self, imds_md: dict) -> Optional[str]: try: return imds_md["extended"]["compute"]["ppsType"] - except Exception as e: + except KeyError as e: report_diagnostic_event( "Could not retrieve pps configuration from IMDS: %s" % e, logger_func=LOG.debug, @@ -1864,7 +1887,11 @@ def _redact_password(cnt, fname): ): elem.text = DEF_PASSWD_REDACTION return ET.tostring(root) - except Exception: + except ET.ParseError: + LOG.critical("failed to redact userpassword in %s", fname) + return cnt + except Exception as e: + LOG.warning("Unhandled exception: %s", e) LOG.critical("failed to redact userpassword in %s", fname) return cnt diff --git a/cloudinit/sources/DataSourceCloudSigma.py b/cloudinit/sources/DataSourceCloudSigma.py index 5fdea1fba42..54a6fca0ae5 100644 --- a/cloudinit/sources/DataSourceCloudSigma.py +++ b/cloudinit/sources/DataSourceCloudSigma.py @@ -8,6 +8,8 @@ import re from base64 import b64decode +from serial import SerialException, SerialTimeoutException + from cloudinit import dmi, sources from cloudinit.sources import DataSourceHostname from cloudinit.sources.helpers.cloudsigma import SERIAL_PORT, Cepko @@ -55,7 +57,11 @@ def _get_data(self): try: server_context = self.cepko.all().result server_meta = server_context["meta"] - except Exception: + except (ValueError, SerialException, SerialTimeoutException, OSError): + LOG.debug("CloudSigma: Unable to read from serial port") + return False + except Exception as e: + LOG.warning("Unhandled exception: %s", e) # TODO: check for explicit "config on", and then warn # but since no explicit config is available now, just debug. LOG.debug("CloudSigma: Unable to read from serial port") diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index 4289e65e209..edbad161e58 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -826,6 +826,7 @@ def identify_platform(): if result: return result except Exception as e: + LOG.warning("Unhandled exception: %s", e) LOG.warning( "calling %s with %s raised exception: %s", checker, data, e ) diff --git a/cloudinit/sources/DataSourceGCE.py b/cloudinit/sources/DataSourceGCE.py index aee31498e4a..68fb36ba60b 100644 --- a/cloudinit/sources/DataSourceGCE.py +++ b/cloudinit/sources/DataSourceGCE.py @@ -109,6 +109,7 @@ def _get_data(self): url_params=url_params, ) except Exception as e: + LOG.warning("Unhandled exception: %s", e) LOG.debug( "Error fetching IMD with candidate NIC %s: %s", candidate_nic, @@ -191,9 +192,7 @@ def _write_host_key_to_guest_attributes(key_type, key_value) -> bool: LOG.debug("Wrote %s host key to guest attributes.", key_type) return True else: - LOG.info( - "Unable to write %s host key to guest attributes.", key_type - ) + LOG.info("Unable to write %s host key to guest attributes.", key_type) return False diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py index 18bf8902de1..dadfc41b5f9 100644 --- a/cloudinit/sources/DataSourceNoCloud.py +++ b/cloudinit/sources/DataSourceNoCloud.py @@ -369,7 +369,8 @@ def parse_cmdline_data(ds_id, fill, cmdline=None): continue try: (k, v) = item.split("=", 1) - except Exception: + except Exception as e: + LOG.warning("Unhandled exception: %s", e) k = item v = None if k in s2l: diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py index 89fc5de8d66..cc867fb6188 100644 --- a/cloudinit/sources/DataSourceOVF.py +++ b/cloudinit/sources/DataSourceOVF.py @@ -14,6 +14,7 @@ """ import base64 +import binascii import logging import os import re @@ -158,13 +159,21 @@ def read_ovf_environment(contents, read_network=False): elif prop in network_props and read_network: try: network_config = base64.b64decode(val.encode()) - md[prop] = safeload_yaml_or_dict(network_config).get("network") - except Exception: + md[prop] = (yaml.safe_load(network_config) or {}).get( + "network" + ) + except (binascii.Error, yaml.YAMLError): + LOG.debug("Ignore network-config in wrong format") + except Exception as e: + LOG.warning("Unhandled exception: %s", e) LOG.debug("Ignore network-config in wrong format") elif prop == "user-data": try: ud = base64.b64decode(val.encode()) - except Exception: + except binascii.Error: + ud = val.encode() + except Exception as e: + LOG.warning("Unhandled exception: %s", e) ud = val.encode() return (md, ud, cfg) @@ -400,14 +409,3 @@ class XmlError(Exception): # Return a list of data sources that match this set of dependencies def get_datasource_list(depends): return sources.list_from_depends(depends, datasources) - - -def safeload_yaml_or_dict(data): - """ - The meta data could be JSON or YAML. Since YAML is a strict superset of - JSON, we will unmarshal the data as YAML. If data is None then a new - dictionary is returned. - """ - if not data: - return {} - return yaml.safe_load(data) diff --git a/cloudinit/sources/DataSourceVMware.py b/cloudinit/sources/DataSourceVMware.py index 0e328cf3a2a..f1e07ad1a64 100644 --- a/cloudinit/sources/DataSourceVMware.py +++ b/cloudinit/sources/DataSourceVMware.py @@ -1064,8 +1064,8 @@ def main(): """ try: loggers.setup_basic_logging() - except Exception: - pass + except Exception as e: + LOG.warning("Unhandled exception: %s", e) metadata = { WAIT_ON_NETWORK: { WAIT_ON_NETWORK_IPV4: True, diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 33944ff18c9..568b3d7235c 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -1210,7 +1210,11 @@ def pkl_load(fname: str) -> Optional[DataSource]: pickle_contents = None try: pickle_contents = util.load_binary_file(fname) + except OSError as e: + if os.path.isfile(fname): + LOG.warning("failed loading pickle in %s: %s", fname, e) except Exception as e: + LOG.warning("Unhandled exception: %s", e) if os.path.isfile(fname): LOG.warning("failed loading pickle in %s: %s", fname, e) diff --git a/cloudinit/sources/azure/errors.py b/cloudinit/sources/azure/errors.py index 53373ce5c89..ae8a56d9e48 100644 --- a/cloudinit/sources/azure/errors.py +++ b/cloudinit/sources/azure/errors.py @@ -56,8 +56,11 @@ def __init__( try: self.vm_id = identity.query_vm_id() - except Exception as id_error: - self.vm_id = f"failed to read vm id: {id_error!r}" + except (RuntimeError, OSError, ValueError) as e: + self.vm_id = f"failed to read vm id: {e!r}" + except Exception as e: + LOG.warning("Unhandled exception: %s", e) + self.vm_id = f"failed to read vm id: {e!r}" def as_encoded_report( self, diff --git a/cloudinit/sources/azure/kvp.py b/cloudinit/sources/azure/kvp.py index 735c4616be1..5a9635eee2c 100644 --- a/cloudinit/sources/azure/kvp.py +++ b/cloudinit/sources/azure/kvp.py @@ -42,8 +42,11 @@ def report_failure_to_host(error: errors.ReportableError) -> bool: def report_success_to_host() -> bool: try: vm_id = identity.query_vm_id() - except Exception as id_error: - vm_id = f"failed to read vm id: {id_error!r}" + except (RuntimeError, OSError, ValueError) as e: + vm_id = f"failed to read vm id: {e!r}" + except Exception as e: + LOG.warning("Unhandled exception: %s", e) + vm_id = f"failed to read vm id: {e!r}" report = errors.encode_report( [ diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py index 0e5467b58e8..75e8756473d 100644 --- a/cloudinit/sources/helpers/azure.py +++ b/cloudinit/sources/helpers/azure.py @@ -207,9 +207,15 @@ def report_dmesg_to_kvp(): try: out, _ = subp.subp(["dmesg"], decode=False, capture=True) report_compressed_event("dmesg", out) - except Exception as ex: + except (zlib.error, subp.ProcessExecutionError) as e: report_diagnostic_event( - "Exception when dumping dmesg log: %s" % repr(ex), + "Exception when dumping dmesg log: %s" % repr(e), + logger_func=LOG.warning, + ) + except Exception as e: + LOG.warning("Unhandled exception: %s", e) + report_diagnostic_event( + "Exception when dumping dmesg log: %s" % repr(e), logger_func=LOG.warning, ) @@ -712,7 +718,13 @@ def eject_iso(self, iso_dev, distro: distros.Distro) -> None: LOG.debug("Ejecting the provisioning iso") try: distro.eject_media(iso_dev) + except subp.ProcessExecutionError as e: + report_diagnostic_event( + "Failed ejecting the provisioning iso: %s" % e, + logger_func=LOG.error, + ) except Exception as e: + LOG.warning("Unhandled exception: %s", e) report_diagnostic_event( "Failed ejecting the provisioning iso: %s" % e, logger_func=LOG.error, diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 97ec18faf98..9171817096d 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -10,6 +10,7 @@ import base64 import copy import functools +import json import logging import os @@ -179,15 +180,22 @@ def _read_ec2_metadata(self): pass def _find_working_version(self): + versions_available = [] try: versions_available = self._fetch_available_versions() + except (OSError, url_helper.UrlError) as e: + LOG.debug( + "Unable to read openstack versions from %s due to: %s", + self.base_path, + e, + ) except Exception as e: + LOG.warning("Unhandled exception: %s", e) LOG.debug( "Unable to read openstack versions from %s due to: %s", self.base_path, e, ) - versions_available = [] # openstack.OS_VERSIONS is stored in chronological order, so # reverse it to check newest first. @@ -272,7 +280,7 @@ def datafiles(version): found = False try: data = self._path_read(path) - except IOError as e: + except (OSError, url_helper.UrlError) as e: if not required: LOG.debug( "Failed reading optional path %s due to: %s", path, e @@ -288,7 +296,12 @@ def datafiles(version): if found and translator: try: data = translator(data) + except json.JSONDecodeError as e: + raise BrokenMetadata( + "Failed to process path %s: %s" % (path, e) + ) from e except Exception as e: + LOG.warning("Unhandled exception: %s", e) raise BrokenMetadata( "Failed to process path %s: %s" % (path, e) ) from e @@ -304,6 +317,11 @@ def datafiles(version): raise BrokenMetadata( "Badly formatted metadata random_seed entry: %s" % e ) from e + except Exception as e: + LOG.warning("Unhandled exception: %s", e) + raise BrokenMetadata( + "Badly formatted metadata random_seed entry: %s" % e + ) from e # load any files that were provided files = {} @@ -314,7 +332,7 @@ def datafiles(version): path = item["path"] try: files[path] = self._read_content_path(item) - except Exception as e: + except (OSError, url_helper.UrlError) as e: raise BrokenMetadata( "Failed to read provided file %s: %s" % (path, e) ) from e @@ -390,7 +408,12 @@ def _read_ec2_metadata(self): else: try: return util.load_json(self._path_read(path)) + except (OSError, ValueError) as e: + raise BrokenMetadata( + "Failed to process path %s: %s" % (path, e) + ) from e except Exception as e: + LOG.warning("Unhandled exception: %s", e) raise BrokenMetadata( "Failed to process path %s: %s" % (path, e) ) from e @@ -424,7 +447,7 @@ def read_v1(self): # determine that every member of FILES_V1 has a callable in # the appropriate position md[key] = translator(contents) # pylint: disable=E1102 - except Exception as e: + except json.JSONDecodeError as e: raise BrokenMetadata( "Failed to process path %s: %s" % (path, e) ) from e diff --git a/cloudinit/sources/helpers/vmware/imc/guestcust_util.py b/cloudinit/sources/helpers/vmware/imc/guestcust_util.py index 1be10c9708b..a761ecbdbdf 100644 --- a/cloudinit/sources/helpers/vmware/imc/guestcust_util.py +++ b/cloudinit/sources/helpers/vmware/imc/guestcust_util.py @@ -52,7 +52,7 @@ def send_rpc(rpc): # Remove the trailing newline in the output. if out: out = out.rstrip() - except Exception as e: + except subp.ProcessExecutionError as e: logger.debug("Failed to send RPC command") logger.exception(e) diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 84bc0ce07a0..b464171049a 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -484,9 +484,13 @@ def _reflect_cur_instance(self): previous_ds = None ds_fn = os.path.join(idir, "datasource") try: - previous_ds = util.load_text_file(ds_fn).strip() - except Exception: + previous_ds = util.load_text_file(ds_fn, quiet=True).strip() + except OSError: pass + except UnicodeDecodeError: + LOG.warning("Invalid previous datasource in file: %s", previous_ds) + except Exception as e: + LOG.warning("Unhandled exception: %s", e) if not previous_ds: previous_ds = ds util.write_file(ds_fn, "%s\n" % ds) @@ -524,10 +528,15 @@ def previous_iid(self): dp = self.paths.get_cpath("data") iid_fn = os.path.join(dp, "instance-id") + self._previous_iid = NO_PREVIOUS_INSTANCE_ID try: self._previous_iid = util.load_text_file(iid_fn).strip() - except Exception: - self._previous_iid = NO_PREVIOUS_INSTANCE_ID + except OSError: + pass + except UnicodeDecodeError: + LOG.warning("Invalid instance id in file: %s", iid_fn) + except Exception as e: + LOG.warning("Unhandled exception: %s", e) LOG.debug("previous iid found to be %s", self._previous_iid) return self._previous_iid @@ -1033,7 +1042,7 @@ def _apply_netcfg_names(self, netcfg): LOG.debug("applying net config names for %s", netcfg) self.distro.networking.apply_network_config_names(netcfg) except Exception as e: - LOG.warning("Failed to rename devices: %s", e) + util.logexc(LOG, "Failed to rename devices: %s", e) def _get_per_boot_network_semaphore(self): return namedtuple("Semaphore", "semaphore args")( diff --git a/cloudinit/templater.py b/cloudinit/templater.py index d43ed7b1aca..4010ea8caff 100644 --- a/cloudinit/templater.py +++ b/cloudinit/templater.py @@ -163,8 +163,6 @@ def jinja_render(content, params): raise JinjaSyntaxParsingException( error=template_syntax_error, ) from template_syntax_error - except Exception as unknown_error: - raise unknown_error from unknown_error if text.find("\n") != -1: ident, rest = text.split("\n", 1) # remove the first line diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index a1d095ccb9b..c8fe3bfd06e 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -745,6 +745,7 @@ def read_url_handle_exceptions( reason = "request error [%s]" % e url_exc = e except Exception as e: + LOG.warning("Unhandled exception: %s", e) reason = "unexpected error [%s]" % e url_exc = e time_taken = int(time.monotonic() - start_time) @@ -925,7 +926,11 @@ def exception_cb(self, msg, exception): date = exception.headers["date"] try: remote_time = time.mktime(parsedate(date)) + except (ValueError, OverflowError) as e: + LOG.warning("Failed to convert datetime '%s': %s", date, e) + return except Exception as e: + LOG.warning("Unhandled exception: %s", e) LOG.warning("Failed to convert datetime '%s': %s", date, e) return diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index e99ad9a183a..e6b118e82fe 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -15,6 +15,8 @@ from email.mime.nonmultipart import MIMENonMultipart from email.mime.text import MIMEText +import yaml + from cloudinit import features, handlers, util from cloudinit.url_helper import UrlError, read_file_or_url @@ -177,8 +179,10 @@ def _attach_launch_index(self, msg): payload = util.load_yaml(msg.get_payload(decode=True)) if payload: payload_idx = payload.get("launch-index") - except Exception: + except yaml.YAMLError: pass + except Exception as e: + LOG.warning("Unhandled exception: %s", e) # Header overrides contents, for now (?) or the other way around? if header_idx is not None: payload_idx = header_idx diff --git a/cloudinit/util.py b/cloudinit/util.py index 60c6c67e0af..cbdc60c3eca 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -33,6 +33,7 @@ import subprocess import sys import time +import zlib from base64 import b64decode from collections import deque, namedtuple from contextlib import contextmanager, suppress @@ -407,7 +408,13 @@ def decomp_gzip(data, quiet=True, decode=True): return decode_binary(gh.read()) else: return gh.read() + except (OSError, EOFError, zlib.error) as e: + if quiet: + return data + else: + raise DecompressionError(str(e)) from e except Exception as e: + LOG.warning("Unhandled exception: %s", e) if quiet: return data else: @@ -586,8 +593,10 @@ def get_linux_distro(): try: # Was removed in 3.8 dist = platform.dist() # type: ignore # pylint: disable=W1505,E1101 - except Exception: + except AttributeError: pass + except Exception as e: + LOG.warning("Unhandled exception: %s", e) finally: found = None for entry in dist: @@ -1361,8 +1370,10 @@ def search_for_mirror(candidates): if is_resolvable_url(cand): LOG.debug("found working mirror: '%s'", cand) return cand - except Exception: - pass + except ValueError: + LOG.debug("Failed to parse url: %s", cand) + except Exception as e: + LOG.warning("Unhandled exception: %s", e) return None @@ -1590,13 +1601,24 @@ def _get_cmdline(): contents = load_text_file("/proc/1/cmdline") # replace nulls with space and drop trailing null cmdline = contents.replace("\x00", " ")[:-1] + except OSError as e: + LOG.warning("failed reading /proc/1/cmdline: %s", e) + cmdline = "" except Exception as e: + LOG.warning( + "Unhandled exception: %s", + ) LOG.warning("failed reading /proc/1/cmdline: %s", e) cmdline = "" else: try: cmdline = load_text_file("/proc/cmdline").strip() + except OSError: + cmdline = "" except Exception: + LOG.warning( + "Unhandled exception: %s", + ) cmdline = "" return cmdline @@ -2100,7 +2122,10 @@ def copy(src, dest): def time_rfc2822(): try: ts = time.strftime("%a, %d %b %Y %H:%M:%S %z", time.gmtime()) - except Exception: + except ValueError: + ts = "??" + except Exception as e: + LOG.warning("Unhandled exception: %s", e) ts = "??" return ts diff --git a/tests/unittests/config/test_cc_runcmd.py b/tests/unittests/config/test_cc_runcmd.py index c99e3dfa256..ef43e13a03e 100644 --- a/tests/unittests/config/test_cc_runcmd.py +++ b/tests/unittests/config/test_cc_runcmd.py @@ -50,21 +50,23 @@ def test_runcmd_shellify_fails(self, cls): cls.side_effect = TypeError("patched shellify") valid_config = {"runcmd": ["echo 42"]} cc = get_cloud(paths=self.paths) - with self.assertRaises(TypeError) as cm: + with self.assertRaises(TypeError): with self.allow_subp(["/bin/sh"]): handle("cc_runcmd", valid_config, cc, None) - self.assertIn("Failed to shellify", str(cm.exception)) + self.assertIn( + "Failed to shellify", + self.logs.getvalue(), + ) def test_handler_invalid_command_set(self): """Commands which can't be converted to shell will raise errors.""" invalid_config = {"runcmd": 1} cc = get_cloud(paths=self.paths) - with self.assertRaises(TypeError) as cm: + with self.assertRaises(TypeError): handle("cc_runcmd", invalid_config, cc, []) self.assertIn( - "Failed to shellify 1 into file" - " /var/lib/cloud/instances/iid-datasource-none/scripts/runcmd", - str(cm.exception), + "Failed to shellify", + self.logs.getvalue(), ) def test_handler_write_valid_runcmd_schema_to_file(self): diff --git a/tests/unittests/config/test_modules.py b/tests/unittests/config/test_modules.py index d986442e250..ae50574d0ce 100644 --- a/tests/unittests/config/test_modules.py +++ b/tests/unittests/config/test_modules.py @@ -134,7 +134,7 @@ def test_run_section(self, frequency, active, caplog, mocker): assert mods.run_section("not_matter") if active: assert [ - mock.call([list(module_details)]) + mock.call([module_details]) ] == m_run_modules.call_args_list assert not caplog.text else: @@ -171,9 +171,7 @@ def test_run_section_examples( mocker.patch.object(module, "handle") m_run_modules = mocker.patch.object(mods, "_run_modules") assert mods.run_section("not_matter") - assert [ - mock.call([list(module_details)]) - ] == m_run_modules.call_args_list + assert [mock.call([module_details])] == m_run_modules.call_args_list assert "Skipping" not in caplog.text @mock.patch(M_PATH + "signature") diff --git a/tests/unittests/sources/test_vmware.py b/tests/unittests/sources/test_vmware.py index cfeff6d53a7..54c7afb3816 100644 --- a/tests/unittests/sources/test_vmware.py +++ b/tests/unittests/sources/test_vmware.py @@ -905,6 +905,7 @@ def test_get_imc_data_vmware_customization_enabled(self): "util.del_dir": True, "guestcust_util.search_file": self.tdir, "guestcust_util.wait_for_cust_cfg_file": conf_file, + "guestcust_util.send_rpc": ("", ""), }, ds.get_imc_data_fn, ) diff --git a/tests/unittests/test_util.py b/tests/unittests/test_util.py index 221e21de5c2..960337197d9 100644 --- a/tests/unittests/test_util.py +++ b/tests/unittests/test_util.py @@ -1317,7 +1317,7 @@ def test_get_linux_distro_no_impl( ): """Verify we get an empty tuple when no information exists and Exceptions are not propagated""" - m_platform_dist.side_effect = Exception() + m_platform_dist.side_effect = AttributeError() m_platform_system.return_value = "Linux" m_path_exists.return_value = 0 dist = util.get_linux_distro() From 02b775d077f1f0821a3bdeecca71a31173fcd072 Mon Sep 17 00:00:00 2001 From: Brett Holman Date: Tue, 1 Oct 2024 08:36:28 -0600 Subject: [PATCH 09/11] fixup! chore: Make exception handling more explicit --- cloudinit/cmd/clean.py | 2 - cloudinit/cmd/main.py | 8 ++- cloudinit/config/cc_disk_setup.py | 66 ++++++++++++++++++- cloudinit/config/cc_grub_dpkg.py | 2 +- cloudinit/config/cc_mounts.py | 8 +-- cloudinit/config/cc_power_state_change.py | 4 -- cloudinit/config/cc_scripts_per_boot.py | 2 +- cloudinit/config/cc_scripts_per_once.py | 2 +- cloudinit/config/cc_scripts_vendor.py | 2 +- cloudinit/config/cc_ssh_import_id.py | 10 ++- cloudinit/config/cc_wireguard.py | 14 +++- cloudinit/distros/__init__.py | 12 ++-- cloudinit/handlers/jinja_template.py | 11 +++- cloudinit/net/__init__.py | 14 +++- cloudinit/net/netplan.py | 5 +- cloudinit/netinfo.py | 16 ++++- cloudinit/reporting/handlers.py | 10 +-- cloudinit/sources/DataSourceAzure.py | 60 ++++++++--------- cloudinit/sources/DataSourceCloudSigma.py | 14 ++-- cloudinit/sources/DataSourceEc2.py | 1 - cloudinit/sources/DataSourceGCE.py | 17 +++-- cloudinit/sources/DataSourceNoCloud.py | 8 ++- cloudinit/sources/DataSourceOVF.py | 21 ++++-- cloudinit/sources/DataSourceVMware.py | 19 +++++- cloudinit/sources/__init__.py | 10 ++- cloudinit/sources/azure/errors.py | 11 +++- cloudinit/sources/azure/kvp.py | 10 ++- cloudinit/sources/helpers/azure.py | 12 ---- cloudinit/sources/helpers/openstack.py | 42 +++++++++--- .../helpers/vmware/imc/guestcust_util.py | 5 +- cloudinit/stages.py | 26 ++++++-- cloudinit/url_helper.py | 15 +++-- cloudinit/user_data.py | 11 +++- cloudinit/util.py | 46 ++++++++----- 34 files changed, 356 insertions(+), 160 deletions(-) diff --git a/cloudinit/cmd/clean.py b/cloudinit/cmd/clean.py index 5f7287e45f6..1d61c11d2b8 100755 --- a/cloudinit/cmd/clean.py +++ b/cloudinit/cmd/clean.py @@ -8,7 +8,6 @@ import argparse import glob -import logging import os import sys @@ -38,7 +37,6 @@ GEN_SSH_CONFIG_FILES = [ "/etc/ssh/sshd_config.d/50-cloud-init.conf", ] -LOG = logging.getLogger(__name__) def get_parser(parser=None): diff --git a/cloudinit/cmd/main.py b/cloudinit/cmd/main.py index e710e60e087..f5e5fea9db1 100644 --- a/cloudinit/cmd/main.py +++ b/cloudinit/cmd/main.py @@ -802,11 +802,13 @@ def status_wrapper(name, args): try: status = json.loads(util.load_text_file(status_path)) except OSError: - LOG.warning("File %s cannot be accessed %s.", status_path, mode) + LOG.warning("File %s cannot be accessed: %s", status_path, mode) except (UnicodeDecodeError, json.JSONDecodeError) as e: - LOG.warning("File %s not valid json %s: %s", status_path, mode, e) + LOG.warning("File %s not valid json: %s", status_path, e) except Exception as e: - LOG.warning("File %s not valid json %s: %s", status_path, mode, e) + LOG.warning( + "Unhandled exception loading file %s: %s", status_path, e + ) if mode not in status["v1"]: # this should never happen, but leave it just to be safe diff --git a/cloudinit/config/cc_disk_setup.py b/cloudinit/config/cc_disk_setup.py index 1459044f96d..cfe2c9da207 100644 --- a/cloudinit/config/cc_disk_setup.py +++ b/cloudinit/config/cc_disk_setup.py @@ -12,7 +12,7 @@ import shlex from pathlib import Path -from cloudinit import performance, subp, util +from cloudinit import lifecycle, performance, subp, util from cloudinit.cloud import Cloud from cloudinit.config import Config from cloudinit.config.schema import MetaSchema @@ -184,6 +184,17 @@ def enumerate_disk(device, nodeps=False): raise RuntimeError( "Failed during disk check for %s\n%s" % (device, e) ) from e + except Exception as e: + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) + raise RuntimeError( + "Failed during disk check for %s\n%s" % (device, e) + ) from e parts = [x for x in (info.strip()).splitlines() if len(x.split()) > 0] @@ -355,6 +366,15 @@ def get_hdd_size(device): sector_size, _ = subp.subp(["blockdev", "--getss", device]) except subp.ProcessExecutionError as e: raise RuntimeError("Failed to get %s size\n%s" % (device, e)) from e + except Exception as e: + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) + raise RuntimeError("Failed to get %s size\n%s" % (device, e)) from e return int(size_in_bytes) / int(sector_size) @@ -376,6 +396,17 @@ def check_partition_mbr_layout(device, layout): raise RuntimeError( "Error running partition command on %s\n%s" % (device, e) ) from e + except Exception as e: + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) + raise RuntimeError( + "Error running partition command on %s\n%s" % (device, e) + ) from e found_layout = [] for line in out.splitlines(): @@ -407,6 +438,17 @@ def check_partition_gpt_layout(device, layout): raise RuntimeError( "Error running partition command on %s\n%s" % (device, e) ) from e + except Exception as e: + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) + raise RuntimeError( + "Error running partition command on %s\n%s" % (device, e) + ) from e out_lines = iter(out.splitlines()) # Skip header. Output looks like: @@ -631,7 +673,7 @@ def read_parttbl(device): util.udevadm_settle() try: subp.subp(probe_cmd) - except subp.ProcessExecutionError as e: + except Exception as e: util.logexc(LOG, "Failed reading the partition table %s" % e) util.udevadm_settle() @@ -650,6 +692,17 @@ def exec_mkpart_mbr(device, layout): raise RuntimeError( "Failed to partition device %s\n%s" % (device, e) ) from e + except Exception as e: + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) + raise RuntimeError( + "Failed to partition device %s\n%s" % (device, e) + ) from e read_parttbl(device) @@ -986,3 +1039,12 @@ def mkfs(fs_cfg): subp.subp(fs_cmd, shell=shell) except subp.ProcessExecutionError as e: raise RuntimeError("Failed to exec of '%s':\n%s" % (fs_cmd, e)) from e + except Exception as e: + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) + raise RuntimeError("Failed to exec of '%s':\n%s" % (fs_cmd, e)) from e diff --git a/cloudinit/config/cc_grub_dpkg.py b/cloudinit/config/cc_grub_dpkg.py index d39a4599cf3..fef0f99728e 100644 --- a/cloudinit/config/cc_grub_dpkg.py +++ b/cloudinit/config/cc_grub_dpkg.py @@ -125,7 +125,7 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: try: subp.subp(["debconf-set-selections"], data=dconf_sel) - except subp.ProcessExecutionError as e: + except Exception as e: util.logexc( LOG, "Failed to run debconf-set-selections for grub_dpkg: %s", e ) diff --git a/cloudinit/config/cc_mounts.py b/cloudinit/config/cc_mounts.py index 4b16988572e..40ac7197251 100644 --- a/cloudinit/config/cc_mounts.py +++ b/cloudinit/config/cc_mounts.py @@ -331,12 +331,10 @@ def handle_swapcfg(swapcfg): LOG.debug("swap file %s already in use", fname) return fname LOG.debug("swap file %s exists, but not in /proc/swaps", fname) - except OSError: - LOG.warning( - "swap file %s exists. Error reading /proc/swaps", fname - ) + except OSError as e: + LOG.warning("Error reading /proc/swaps: %s", e) except Exception as e: - LOG.warning("Unhandled exception: %s", e) + LOG.warning("Unhandled exception while reading /proc/swaps: %s", e) return fname try: diff --git a/cloudinit/config/cc_power_state_change.py b/cloudinit/config/cc_power_state_change.py index 32a1a5e620b..5bd6b96d847 100644 --- a/cloudinit/config/cc_power_state_change.py +++ b/cloudinit/config/cc_power_state_change.py @@ -71,11 +71,7 @@ def check_condition(cond): else: LOG.warning("%sunexpected exit %s. do not apply change.", pre, ret) return False - except OSError as e: - LOG.warning("%sUnexpected error: %s", pre, e) - return False except Exception as e: - LOG.warning("Unhandled exception: %s", e) LOG.warning("%sUnexpected error: %s", pre, e) return False diff --git a/cloudinit/config/cc_scripts_per_boot.py b/cloudinit/config/cc_scripts_per_boot.py index 53316e8dc7f..fa78b8a48d8 100644 --- a/cloudinit/config/cc_scripts_per_boot.py +++ b/cloudinit/config/cc_scripts_per_boot.py @@ -37,7 +37,7 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: runparts_path = os.path.join(cloud.get_cpath(), "scripts", SCRIPT_SUBDIR) try: subp.runparts(runparts_path) - except RuntimeError: + except Exception: LOG.warning( "Failed to run module %s (%s in %s)", name, diff --git a/cloudinit/config/cc_scripts_per_once.py b/cloudinit/config/cc_scripts_per_once.py index ee1d813edce..4d9ac583c49 100644 --- a/cloudinit/config/cc_scripts_per_once.py +++ b/cloudinit/config/cc_scripts_per_once.py @@ -35,7 +35,7 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: runparts_path = os.path.join(cloud.get_cpath(), "scripts", SCRIPT_SUBDIR) try: subp.runparts(runparts_path) - except RuntimeError: + except Exception: LOG.warning( "Failed to run module %s (%s in %s)", name, diff --git a/cloudinit/config/cc_scripts_vendor.py b/cloudinit/config/cc_scripts_vendor.py index 899e7867bca..a102c794ab5 100644 --- a/cloudinit/config/cc_scripts_vendor.py +++ b/cloudinit/config/cc_scripts_vendor.py @@ -38,7 +38,7 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: try: subp.runparts(runparts_path, exe_prefix=prefix) - except RuntimeError: + except Exception: LOG.warning( "Failed to run module %s (%s in %s)", name, diff --git a/cloudinit/config/cc_ssh_import_id.py b/cloudinit/config/cc_ssh_import_id.py index 58a068ced16..fcb4e436cf6 100644 --- a/cloudinit/config/cc_ssh_import_id.py +++ b/cloudinit/config/cc_ssh_import_id.py @@ -11,7 +11,7 @@ import pwd from typing import List -from cloudinit import subp, util +from cloudinit import lifecycle, subp, util from cloudinit.cloud import Cloud from cloudinit.config import Config from cloudinit.config.schema import MetaSchema @@ -83,8 +83,12 @@ def handle(name: str, cfg: Config, cloud: Cloud, args: list) -> None: try: import_ssh_ids(import_ids, user) except Exception as exc: - util.logexc( - LOG, "ssh-import-id failed for: %s %s", user, import_ids + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg=f"ssh-import-id failed for {user} {import_ids} with: %s", + args=exc, ) elist.append(exc) diff --git a/cloudinit/config/cc_wireguard.py b/cloudinit/config/cc_wireguard.py index dc3a78b97ef..ecbbbd7862f 100644 --- a/cloudinit/config/cc_wireguard.py +++ b/cloudinit/config/cc_wireguard.py @@ -6,7 +6,7 @@ import logging import re -from cloudinit import subp, util +from cloudinit import lifecycle, subp, util from cloudinit.cloud import Cloud from cloudinit.config import Config from cloudinit.config.schema import MetaSchema @@ -75,6 +75,18 @@ def write_config(wg_int: dict): "Failure writing Wireguard configuration file" f' {wg_int["config_path"]}:{NL}{str(e)}' ) from e + except Exception as e: + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) + raise RuntimeError( + "Unhandled failure writing Wireguard configuration file" + f' {wg_int["config_path"]}:{NL}{str(e)}' + ) from e def enable_wg(wg_int: dict, cloud: Cloud): diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index 77c372d1065..edea4f49e5b 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -416,10 +416,6 @@ def update_package_sources(self, *, force=False): continue try: manager.update_package_sources(force=force) - except subp.ProcessExecutionError as e: - LOG.error( - "Failed to update package using %s: %s", manager.name, e - ) except Exception as e: LOG.error( "Failed to update package using %s: %s", manager.name, e @@ -1780,5 +1776,11 @@ def uses_systemd(): except OSError: return False except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) return False diff --git a/cloudinit/handlers/jinja_template.py b/cloudinit/handlers/jinja_template.py index 62203cf4f01..c9568519b5e 100644 --- a/cloudinit/handlers/jinja_template.py +++ b/cloudinit/handlers/jinja_template.py @@ -7,7 +7,7 @@ from errno import EACCES from typing import Optional, Type -from cloudinit import handlers +from cloudinit import handlers, lifecycle from cloudinit.atomic_helper import b64d, json_dumps from cloudinit.helpers import Paths from cloudinit.settings import PER_ALWAYS @@ -126,7 +126,14 @@ def render_jinja_payload_from_file( except (OSError, ValueError, TypeError) as e: raise JinjaLoadError("Loading Jinja instance data failed") from e except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) + raise JinjaLoadError("Loading Jinja instance data failed") from e rendered_payload = render_jinja_payload( diff --git a/cloudinit/net/__init__.py b/cloudinit/net/__init__.py index 65897664787..9955888d211 100644 --- a/cloudinit/net/__init__.py +++ b/cloudinit/net/__init__.py @@ -14,7 +14,7 @@ from typing import Any, Callable, Dict, List, Optional, Tuple from urllib.parse import urlparse -from cloudinit import subp, util +from cloudinit import lifecycle, subp, util from cloudinit.net.netops.iproute2 import Iproute2 from cloudinit.url_helper import UrlError, readurl @@ -209,8 +209,6 @@ def get_dev_features(devname): features = read_sys_net(devname, "device/features") except (OSError, UnicodeError): pass - except Exception as e: - LOG.warning("Unhandled exception: %s", e) return features @@ -823,6 +821,16 @@ def find_entry(mac, driver, device_id): % (op, params, mac, new_name, e) ) except Exception as e: + # This exception is later raised, but as a looser exception. + # Make sure to log a warning due to unexpected exception type + # on newer releases. + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) errors.append( "[unknown] Unexpected exception " "performing %s%s for %s, %s: %s" diff --git a/cloudinit/net/netplan.py b/cloudinit/net/netplan.py index 00e4a59619d..314a0ec474a 100644 --- a/cloudinit/net/netplan.py +++ b/cloudinit/net/netplan.py @@ -273,10 +273,9 @@ def netplan_api_write_yaml_file(net_config_content: str) -> bool: ) return False except Exception as e: - LOG.warning("Unhandled exception: %s") LOG.warning( - "Unable to render network config using netplan python module." - " Fallback to write %s. %s", + "Unhandled exception while rendering network config using " + "netplan python module. Fallback to write %s. %s", CLOUDINIT_NETPLAN_FILE, e, ) diff --git a/cloudinit/netinfo.py b/cloudinit/netinfo.py index 67d57d32d2b..383451c7d6a 100644 --- a/cloudinit/netinfo.py +++ b/cloudinit/netinfo.py @@ -582,7 +582,13 @@ def netdev_pformat(): ) ) except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) lines.append( util.center( "Net device info failed ({error})".format(error=str(e)), @@ -641,7 +647,13 @@ def route_pformat(): ) util.logexc(LOG, "Route info failed: %s" % e) except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) lines.append( util.center( "Route info failed ({error})".format(error=str(e)), "!", 80 diff --git a/cloudinit/reporting/handlers.py b/cloudinit/reporting/handlers.py index 0ecd1efbb49..1e89b12345b 100644 --- a/cloudinit/reporting/handlers.py +++ b/cloudinit/reporting/handlers.py @@ -50,7 +50,7 @@ def __init__(self, level="DEBUG"): input_level = level try: level = getattr(logging, level.upper()) - except AttributeError: + except Exception: LOG.warning("invalid level '%s', using WARN", input_level) level = logging.WARN self.level = level @@ -131,15 +131,7 @@ def process_requests(self): log_req_resp=False, ) consecutive_failed = 0 - except url_helper.UrlError as e: - LOG.warning( - "Failed posting event: %s. This was caused by: %s", - args[1], - e, - ) - consecutive_failed += 1 except Exception as e: - LOG.warning("Unhandled exception: %s", e) LOG.warning( "Failed posting event: %s. This was caused by: %s", args[1], diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 5e4e08fc291..10aa2dd5ee5 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -5,7 +5,6 @@ # This file is part of cloud-init. See LICENSE file for license information. import base64 -import binascii import functools import logging import os @@ -20,7 +19,15 @@ import requests -from cloudinit import net, performance, sources, ssh_util, subp, util +from cloudinit import ( + lifecycle, + net, + performance, + sources, + ssh_util, + subp, + util, +) from cloudinit.event import EventScope, EventType from cloudinit.net import device_driver from cloudinit.net.dhcp import ( @@ -793,12 +800,7 @@ def crawl_metadata(self): crawled_data["userdata_raw"] = base64.b64decode( "".join(imds_userdata.split()) ) - except binascii.Error: - report_diagnostic_event( - "Bad userdata in IMDS", logger_func=LOG.warning - ) - except Exception as e: - LOG.warning("Unhandled exception: %s", e) + except Exception: report_diagnostic_event( "Bad userdata in IMDS", logger_func=LOG.warning ) @@ -821,8 +823,14 @@ def crawl_metadata(self): try: ssh_keys = self._report_ready(pubkey_info=pubkey_info) except Exception as e: - LOG.warning("Unhandled exception: %s", e) # Failed to report ready, but continue with best effort. + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Failed to report ready: %s", + args=e, + ) else: LOG.debug("negotiating returned %s", ssh_keys) if ssh_keys: @@ -907,16 +915,12 @@ def _get_data(self): """ try: get_boot_telemetry() - except RuntimeError as e: - LOG.warning("Failed to get boot telemetry: %s", e) except Exception as e: - LOG.warning("Unhandled excption: %s", e) LOG.warning("Failed to get boot telemetry: %s", e) try: get_system_info() except Exception as e: - LOG.warning("Unhandled exception: %s", e) LOG.warning("Failed to get system information: %s", e) try: @@ -1160,7 +1164,6 @@ def _report_ready_for_pps( report_diagnostic_event(msg, logger_func=LOG.error) raise sources.InvalidMetaDataException(msg) from error except Exception as e: - LOG.warning("Unhandled exception: %s", e) msg = "Failed reporting ready while in the preprovisioning pool." report_diagnostic_event(msg, logger_func=LOG.error) raise sources.InvalidMetaDataException(msg) from e @@ -1387,12 +1390,6 @@ def _report_failure( ) self._negotiated = True return True - except UrlError as e: - report_diagnostic_event( - "Failed to report failure using " - "cached ephemeral dhcp context: %s" % e, - logger_func=LOG.error, - ) except Exception as e: report_diagnostic_event( "Failed to report failure using " @@ -1417,7 +1414,13 @@ def _report_failure( self._negotiated = True return True except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) report_diagnostic_event( "Failed to report failure using new ephemeral dhcp: %s" % e, logger_func=LOG.debug, @@ -1461,14 +1464,15 @@ def _report_ready( return data def _ppstype_from_imds(self, imds_md: dict) -> Optional[str]: - try: - return imds_md["extended"]["compute"]["ppsType"] - except KeyError as e: + pps_type = ( + imds_md.get("extended", {}).get("compute", {}).get("ppsType") + ) + if not pps_type: report_diagnostic_event( - "Could not retrieve pps configuration from IMDS: %s" % e, + "Could not retrieve pps configuration from IMDS", logger_func=LOG.debug, ) - return None + return pps_type def _determine_pps_type(self, ovf_cfg: dict, imds_md: dict) -> PPSType: """Determine PPS type using OVF, IMDS data, and reprovision marker.""" @@ -1890,10 +1894,6 @@ def _redact_password(cnt, fname): except ET.ParseError: LOG.critical("failed to redact userpassword in %s", fname) return cnt - except Exception as e: - LOG.warning("Unhandled exception: %s", e) - LOG.critical("failed to redact userpassword in %s", fname) - return cnt if not datadir: return diff --git a/cloudinit/sources/DataSourceCloudSigma.py b/cloudinit/sources/DataSourceCloudSigma.py index 54a6fca0ae5..11f2c180c3e 100644 --- a/cloudinit/sources/DataSourceCloudSigma.py +++ b/cloudinit/sources/DataSourceCloudSigma.py @@ -10,7 +10,7 @@ from serial import SerialException, SerialTimeoutException -from cloudinit import dmi, sources +from cloudinit import dmi, lifecycle, sources from cloudinit.sources import DataSourceHostname from cloudinit.sources.helpers.cloudsigma import SERIAL_PORT, Cepko @@ -61,10 +61,16 @@ def _get_data(self): LOG.debug("CloudSigma: Unable to read from serial port") return False except Exception as e: - LOG.warning("Unhandled exception: %s", e) # TODO: check for explicit "config on", and then warn - # but since no explicit config is available now, just debug. - LOG.debug("CloudSigma: Unable to read from serial port") + # but since no explicit config is available now, just log. + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception, could not read from serial port: %s", + args=e, + ) + return False self.dsmode = self._determine_dsmode( diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py index edbad161e58..4289e65e209 100644 --- a/cloudinit/sources/DataSourceEc2.py +++ b/cloudinit/sources/DataSourceEc2.py @@ -826,7 +826,6 @@ def identify_platform(): if result: return result except Exception as e: - LOG.warning("Unhandled exception: %s", e) LOG.warning( "calling %s with %s raised exception: %s", checker, data, e ) diff --git a/cloudinit/sources/DataSourceGCE.py b/cloudinit/sources/DataSourceGCE.py index 68fb36ba60b..9bf43b4a4db 100644 --- a/cloudinit/sources/DataSourceGCE.py +++ b/cloudinit/sources/DataSourceGCE.py @@ -7,7 +7,7 @@ import logging from base64 import b64decode -from cloudinit import dmi, net, sources, url_helper, util +from cloudinit import dmi, lifecycle, net, sources, url_helper, util from cloudinit.distros import ug_util from cloudinit.event import EventScope, EventType from cloudinit.net.dhcp import NoDHCPLeaseError @@ -109,13 +109,18 @@ def _get_data(self): url_params=url_params, ) except Exception as e: - LOG.warning("Unhandled exception: %s", e) - LOG.debug( - "Error fetching IMD with candidate NIC %s: %s", - candidate_nic, - e, + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg=( + "Error fetching IMD with candidate NIC " + f"{candidate_nic}: %s" + ), + args=e, ) continue + except NoDHCPLeaseError: continue if ret["success"]: diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py index dadfc41b5f9..2d56a61bc2e 100644 --- a/cloudinit/sources/DataSourceNoCloud.py +++ b/cloudinit/sources/DataSourceNoCloud.py @@ -370,7 +370,13 @@ def parse_cmdline_data(ds_id, fill, cmdline=None): try: (k, v) = item.split("=", 1) except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) k = item v = None if k in s2l: diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py index cc867fb6188..14a800e5d63 100644 --- a/cloudinit/sources/DataSourceOVF.py +++ b/cloudinit/sources/DataSourceOVF.py @@ -22,7 +22,7 @@ import yaml -from cloudinit import sources, subp, util +from cloudinit import lifecycle, sources, subp, util LOG = logging.getLogger(__name__) @@ -162,18 +162,27 @@ def read_ovf_environment(contents, read_network=False): md[prop] = (yaml.safe_load(network_config) or {}).get( "network" ) - except (binascii.Error, yaml.YAMLError): - LOG.debug("Ignore network-config in wrong format") except Exception as e: - LOG.warning("Unhandled exception: %s", e) - LOG.debug("Ignore network-config in wrong format") + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Ignore network-config in wrong format: %s", + args=e, + ) elif prop == "user-data": try: ud = base64.b64decode(val.encode()) except binascii.Error: ud = val.encode() except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Error encoding user-data: %s", + args=e, + ) ud = val.encode() return (md, ud, cfg) diff --git a/cloudinit/sources/DataSourceVMware.py b/cloudinit/sources/DataSourceVMware.py index f1e07ad1a64..e72ffe359a8 100644 --- a/cloudinit/sources/DataSourceVMware.py +++ b/cloudinit/sources/DataSourceVMware.py @@ -27,7 +27,15 @@ import socket import time -from cloudinit import atomic_helper, dmi, net, netinfo, sources, util +from cloudinit import ( + atomic_helper, + dmi, + lifecycle, + net, + netinfo, + sources, + util, +) from cloudinit.log import loggers from cloudinit.sources.helpers.vmware.imc import guestcust_util from cloudinit.subp import ProcessExecutionError, subp, which @@ -1065,7 +1073,14 @@ def main(): try: loggers.setup_basic_logging() except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception while setting up logging: %s", + args=e, + ) + metadata = { WAIT_ON_NETWORK: { WAIT_ON_NETWORK_IPV4: True, diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 568b3d7235c..a9fe7236a64 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -1214,9 +1214,13 @@ def pkl_load(fname: str) -> Optional[DataSource]: if os.path.isfile(fname): LOG.warning("failed loading pickle in %s: %s", fname, e) except Exception as e: - LOG.warning("Unhandled exception: %s", e) - if os.path.isfile(fname): - LOG.warning("failed loading pickle in %s: %s", fname, e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception while reading pickle file: %s", + args=e, + ) # This is allowed so just return nothing successfully loaded... if not pickle_contents: diff --git a/cloudinit/sources/azure/errors.py b/cloudinit/sources/azure/errors.py index ae8a56d9e48..b3adfecd2f1 100644 --- a/cloudinit/sources/azure/errors.py +++ b/cloudinit/sources/azure/errors.py @@ -13,7 +13,7 @@ import requests -from cloudinit import subp, version +from cloudinit import lifecycle, subp, version from cloudinit.sources.azure import identity from cloudinit.url_helper import UrlError @@ -56,10 +56,17 @@ def __init__( try: self.vm_id = identity.query_vm_id() + except (RuntimeError, OSError, ValueError) as e: self.vm_id = f"failed to read vm id: {e!r}" except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception while querying VM ID: %s", + args=e, + ) self.vm_id = f"failed to read vm id: {e!r}" def as_encoded_report( diff --git a/cloudinit/sources/azure/kvp.py b/cloudinit/sources/azure/kvp.py index 5a9635eee2c..3ac4371b6d6 100644 --- a/cloudinit/sources/azure/kvp.py +++ b/cloudinit/sources/azure/kvp.py @@ -6,7 +6,7 @@ from datetime import datetime from typing import Optional -from cloudinit import version +from cloudinit import lifecycle, version from cloudinit.reporting import handlers, instantiated_handler_registry from cloudinit.sources.azure import errors, identity @@ -45,7 +45,13 @@ def report_success_to_host() -> bool: except (RuntimeError, OSError, ValueError) as e: vm_id = f"failed to read vm id: {e!r}" except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception while querying VM ID: %s", + args=e, + ) vm_id = f"failed to read vm id: {e!r}" report = errors.encode_report( diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py index 75e8756473d..0914b4256e0 100644 --- a/cloudinit/sources/helpers/azure.py +++ b/cloudinit/sources/helpers/azure.py @@ -207,13 +207,7 @@ def report_dmesg_to_kvp(): try: out, _ = subp.subp(["dmesg"], decode=False, capture=True) report_compressed_event("dmesg", out) - except (zlib.error, subp.ProcessExecutionError) as e: - report_diagnostic_event( - "Exception when dumping dmesg log: %s" % repr(e), - logger_func=LOG.warning, - ) except Exception as e: - LOG.warning("Unhandled exception: %s", e) report_diagnostic_event( "Exception when dumping dmesg log: %s" % repr(e), logger_func=LOG.warning, @@ -718,13 +712,7 @@ def eject_iso(self, iso_dev, distro: distros.Distro) -> None: LOG.debug("Ejecting the provisioning iso") try: distro.eject_media(iso_dev) - except subp.ProcessExecutionError as e: - report_diagnostic_event( - "Failed ejecting the provisioning iso: %s" % e, - logger_func=LOG.error, - ) except Exception as e: - LOG.warning("Unhandled exception: %s", e) report_diagnostic_event( "Failed ejecting the provisioning iso: %s" % e, logger_func=LOG.error, diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 9171817096d..8df6c6923a1 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -14,7 +14,7 @@ import logging import os -from cloudinit import net, sources, subp, url_helper, util +from cloudinit import lifecycle, net, sources, subp, url_helper, util from cloudinit.sources import BrokenMetadata from cloudinit.sources.helpers import ec2 @@ -190,11 +190,15 @@ def _find_working_version(self): e, ) except Exception as e: - LOG.warning("Unhandled exception: %s", e) - LOG.debug( - "Unable to read openstack versions from %s due to: %s", - self.base_path, - e, + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg=( + "Unhandled exception while reading openstack version " + f"from {self.base_path} due to: {e}" + ), + args=e, ) # openstack.OS_VERSIONS is stored in chronological order, so @@ -301,7 +305,13 @@ def datafiles(version): "Failed to process path %s: %s" % (path, e) ) from e except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg=("Unhandled exception while decoding data: %s"), + args=e, + ) raise BrokenMetadata( "Failed to process path %s: %s" % (path, e) ) from e @@ -318,7 +328,13 @@ def datafiles(version): "Badly formatted metadata random_seed entry: %s" % e ) from e except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg=("Unhandled exception while getting random seed: %s"), + args=e, + ) raise BrokenMetadata( "Badly formatted metadata random_seed entry: %s" % e ) from e @@ -413,7 +429,15 @@ def _read_ec2_metadata(self): "Failed to process path %s: %s" % (path, e) ) from e except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg=( + "Unhandled exception while reading meta-data.json: %s" + ), + args=e, + ) raise BrokenMetadata( "Failed to process path %s: %s" % (path, e) ) from e diff --git a/cloudinit/sources/helpers/vmware/imc/guestcust_util.py b/cloudinit/sources/helpers/vmware/imc/guestcust_util.py index a761ecbdbdf..1b75df83e56 100644 --- a/cloudinit/sources/helpers/vmware/imc/guestcust_util.py +++ b/cloudinit/sources/helpers/vmware/imc/guestcust_util.py @@ -52,9 +52,8 @@ def send_rpc(rpc): # Remove the trailing newline in the output. if out: out = out.rstrip() - except subp.ProcessExecutionError as e: - logger.debug("Failed to send RPC command") - logger.exception(e) + except Exception: + logger.exception("Failed to send RPC command") return (out, err) diff --git a/cloudinit/stages.py b/cloudinit/stages.py index b464171049a..2be50992ffd 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -487,10 +487,15 @@ def _reflect_cur_instance(self): previous_ds = util.load_text_file(ds_fn, quiet=True).strip() except OSError: pass - except UnicodeDecodeError: - LOG.warning("Invalid previous datasource in file: %s", previous_ds) except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg=f"Unhandled error loading datasource file {ds_fn}: %s", + args=e, + ) + if not previous_ds: previous_ds = ds util.write_file(ds_fn, "%s\n" % ds) @@ -533,10 +538,17 @@ def previous_iid(self): self._previous_iid = util.load_text_file(iid_fn).strip() except OSError: pass - except UnicodeDecodeError: - LOG.warning("Invalid instance id in file: %s", iid_fn) except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg=( + "Unhandled error loading previous instance id " + f"{iid_fn}: %s" + ), + args=e, + ) LOG.debug("previous iid found to be %s", self._previous_iid) return self._previous_iid @@ -1042,7 +1054,7 @@ def _apply_netcfg_names(self, netcfg): LOG.debug("applying net config names for %s", netcfg) self.distro.networking.apply_network_config_names(netcfg) except Exception as e: - util.logexc(LOG, "Failed to rename devices: %s", e) + LOG.warning(LOG, "Failed to rename devices: %s", e) def _get_per_boot_network_semaphore(self): return namedtuple("Semaphore", "semaphore args")( diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index c8fe3bfd06e..16eeb36dce8 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -28,7 +28,7 @@ import requests from requests import exceptions -from cloudinit import performance, util, version +from cloudinit import lifecycle, performance, util, version LOG = logging.getLogger(__name__) @@ -745,9 +745,16 @@ def read_url_handle_exceptions( reason = "request error [%s]" % e url_exc = e except Exception as e: - LOG.warning("Unhandled exception: %s", e) reason = "unexpected error [%s]" % e url_exc = e + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception in url handler: %s", + args=e, + ) + time_taken = int(time.monotonic() - start_time) max_wait_str = "%ss" % max_wait if max_wait else "unlimited" status_msg = "Calling '%s' failed [%s/%s]: %s" % ( @@ -926,11 +933,7 @@ def exception_cb(self, msg, exception): date = exception.headers["date"] try: remote_time = time.mktime(parsedate(date)) - except (ValueError, OverflowError) as e: - LOG.warning("Failed to convert datetime '%s': %s", date, e) - return except Exception as e: - LOG.warning("Unhandled exception: %s", e) LOG.warning("Failed to convert datetime '%s': %s", date, e) return diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index e6b118e82fe..65e626ef001 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -17,7 +17,7 @@ import yaml -from cloudinit import features, handlers, util +from cloudinit import features, handlers, lifecycle, util from cloudinit.url_helper import UrlError, read_file_or_url LOG = logging.getLogger(__name__) @@ -182,7 +182,14 @@ def _attach_launch_index(self, msg): except yaml.YAMLError: pass except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception getting launch-index: %s", + args=e, + ) + # Header overrides contents, for now (?) or the other way around? if header_idx is not None: payload_idx = header_idx diff --git a/cloudinit/util.py b/cloudinit/util.py index cbdc60c3eca..4124e18b8aa 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -64,6 +64,7 @@ from cloudinit import ( features, importer, + lifecycle, mergers, net, performance, @@ -414,7 +415,13 @@ def decomp_gzip(data, quiet=True, decode=True): else: raise DecompressionError(str(e)) from e except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception while decoding gzip data: %s", + args=e, + ) if quiet: return data else: @@ -596,7 +603,14 @@ def get_linux_distro(): except AttributeError: pass except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception: %s", + args=e, + ) + finally: found = None for entry in dist: @@ -1601,23 +1615,19 @@ def _get_cmdline(): contents = load_text_file("/proc/1/cmdline") # replace nulls with space and drop trailing null cmdline = contents.replace("\x00", " ")[:-1] - except OSError as e: - LOG.warning("failed reading /proc/1/cmdline: %s", e) - cmdline = "" except Exception as e: - LOG.warning( - "Unhandled exception: %s", - ) LOG.warning("failed reading /proc/1/cmdline: %s", e) cmdline = "" else: try: cmdline = load_text_file("/proc/cmdline").strip() - except OSError: - cmdline = "" - except Exception: - LOG.warning( - "Unhandled exception: %s", + except Exception as e: + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception while reading /proc/cmdline: %s", + args=e, ) cmdline = "" @@ -2122,10 +2132,14 @@ def copy(src, dest): def time_rfc2822(): try: ts = time.strftime("%a, %d %b %Y %H:%M:%S %z", time.gmtime()) - except ValueError: - ts = "??" except Exception as e: - LOG.warning("Unhandled exception: %s", e) + lifecycle.log_with_downgradable_level( + logger=LOG, + version="24.4", + requested_level=logging.WARN, + msg="Unhandled exception getting rfc2822 time {}: %s", + args=e, + ) ts = "??" return ts From 5294901ec0f299b432018f9daf7d8b3e16aabfb3 Mon Sep 17 00:00:00 2001 From: Brett Holman Date: Mon, 22 Apr 2024 21:02:05 -0600 Subject: [PATCH 10/11] chore(IOError): Update IOError to OSError In Python 3.3+, IOError is an alias of OSError[1]. Some call sites use one or the other or both. Standardize on the OSError. [1] https://docs.python.org/3/library/exceptions.html#IOError --- cloudinit/cmd/cloud_id.py | 2 +- cloudinit/cmd/devel/render.py | 2 +- cloudinit/cmd/query.py | 17 ++++++++--------- cloudinit/config/cc_apt_configure.py | 4 ++-- cloudinit/config/cc_mcollective.py | 4 ++-- cloudinit/config/cc_mounts.py | 2 +- cloudinit/config/cc_power_state_change.py | 6 +++--- cloudinit/config/cc_seed_random.py | 2 +- cloudinit/config/schema.py | 4 ++-- cloudinit/distros/__init__.py | 16 ++++++++-------- cloudinit/distros/alpine.py | 8 ++++---- cloudinit/distros/aosc.py | 2 +- cloudinit/distros/arch.py | 4 ++-- cloudinit/distros/debian.py | 4 ++-- cloudinit/distros/freebsd.py | 2 +- cloudinit/distros/gentoo.py | 4 ++-- cloudinit/distros/opensuse.py | 4 ++-- cloudinit/distros/parsers/hostname.py | 2 +- cloudinit/distros/parsers/resolv_conf.py | 4 ++-- cloudinit/distros/rhel_util.py | 2 +- cloudinit/helpers.py | 4 ++-- cloudinit/log/loggers.py | 4 ++-- cloudinit/net/bsd.py | 2 +- cloudinit/net/cmdline.py | 2 +- cloudinit/reporting/handlers.py | 6 +++--- cloudinit/sources/DataSourceAltCloud.py | 6 +++--- cloudinit/sources/DataSourceAzure.py | 2 +- cloudinit/sources/DataSourceBigstep.py | 2 +- cloudinit/sources/DataSourceConfigDrive.py | 4 ++-- cloudinit/sources/DataSourceIBMCloud.py | 12 +++++++++--- cloudinit/sources/DataSourceOpenNebula.py | 2 +- cloudinit/sources/DataSourceOpenStack.py | 6 +++--- cloudinit/sources/DataSourceSmartOS.py | 2 +- cloudinit/sources/DataSourceWSL.py | 18 +++++++++--------- cloudinit/sources/__init__.py | 2 +- cloudinit/sources/helpers/openstack.py | 6 +++--- cloudinit/ssh_util.py | 6 +++--- cloudinit/stages.py | 2 +- cloudinit/subp.py | 4 ++-- cloudinit/url_helper.py | 6 +++--- cloudinit/user_data.py | 2 +- cloudinit/util.py | 14 +++++++------- tests/integration_tests/instances.py | 2 +- tests/unittests/cmd/test_query.py | 2 +- tests/unittests/config/test_cc_seed_random.py | 2 +- tests/unittests/config/test_cc_yum_add_repo.py | 2 +- .../config/test_cc_zypper_add_repo.py | 2 +- tests/unittests/config/test_schema.py | 4 ++-- tests/unittests/net/test_init.py | 5 ++--- tests/unittests/sources/test_altcloud.py | 4 ++-- tests/unittests/sources/test_wsl.py | 4 ++-- tests/unittests/test_merging.py | 4 ++-- tests/unittests/test_ssh_util.py | 2 +- tests/unittests/test_util.py | 4 ++-- 54 files changed, 125 insertions(+), 121 deletions(-) diff --git a/cloudinit/cmd/cloud_id.py b/cloudinit/cmd/cloud_id.py index 650e53cdc00..22d6d607862 100644 --- a/cloudinit/cmd/cloud_id.py +++ b/cloudinit/cmd/cloud_id.py @@ -76,7 +76,7 @@ def handle_args(name, args): try: with open(args.instance_data) as file: instance_data = json.load(file) - except IOError: + except OSError: return log_util.error( "File not found '%s'. Provide a path to instance data json file" " using --instance-data" % args.instance_data diff --git a/cloudinit/cmd/devel/render.py b/cloudinit/cmd/devel/render.py index 152b7768197..7c556a67a5e 100755 --- a/cloudinit/cmd/devel/render.py +++ b/cloudinit/cmd/devel/render.py @@ -86,7 +86,7 @@ def render_template(user_data_path, instance_data_path=None, debug=False): try: with open(user_data_path) as stream: user_data = stream.read() - except IOError: + except OSError: LOG.error("Missing user-data file: %s", user_data_path) return 1 try: diff --git a/cloudinit/cmd/query.py b/cloudinit/cmd/query.py index 283c6069a39..35a5920f0e7 100644 --- a/cloudinit/cmd/query.py +++ b/cloudinit/cmd/query.py @@ -19,7 +19,6 @@ import logging import os import sys -from errno import EACCES from cloudinit import atomic_helper, util from cloudinit.cmd.devel import read_cfg_paths @@ -147,7 +146,7 @@ def _read_instance_data(instance_data, user_data, vendor_data) -> dict: Non-root users will have redacted INSTANCE_JSON_FILE content and redacted vendordata and userdata values. - :raise: IOError/OSError on absence of instance-data.json file or invalid + :raise: OSError on absence of instance-data.json file or invalid access perms. """ uid = os.getuid() @@ -181,11 +180,11 @@ def _read_instance_data(instance_data, user_data, vendor_data) -> dict: try: instance_json = util.load_text_file(instance_data_fn) - except (IOError, OSError) as e: - if e.errno == EACCES: - LOG.error("No read permission on '%s'. Try sudo", instance_data_fn) - else: - LOG.error("Missing instance-data file: %s", instance_data_fn) + except PermissionError: + LOG.error("No read permission on '%s'. Try sudo", instance_data_fn) + raise + except OSError: + LOG.error("Missing instance-data file: %s", instance_data_fn) raise instance_data = util.load_json(instance_json) @@ -193,7 +192,7 @@ def _read_instance_data(instance_data, user_data, vendor_data) -> dict: combined_cloud_config = util.load_json( util.load_text_file(combined_cloud_config_fn) ) - except (IOError, OSError): + except OSError: # File will not yet be present in init-local stage. # It's created in `init` when vendor-data and user-data are processed. combined_cloud_config = None @@ -274,7 +273,7 @@ def handle_args(name, args): instance_data = _read_instance_data( args.instance_data, args.user_data, args.vendor_data ) - except (IOError, OSError): + except OSError: return 1 if args.format: payload = "## template: jinja\n{fmt}".format(fmt=args.format) diff --git a/cloudinit/config/cc_apt_configure.py b/cloudinit/config/cc_apt_configure.py index 787270e665d..e43a847fc96 100644 --- a/cloudinit/config/cc_apt_configure.py +++ b/cloudinit/config/cc_apt_configure.py @@ -153,7 +153,7 @@ def apply_apt(cfg, cloud, gpg): try: apply_apt_config(cfg, APT_PROXY_FN, APT_CONFIG_FN) - except (IOError, OSError): + except OSError: LOG.exception("Failed to apply proxy or apt config info:") # Process 'apt_source -> sources {dict}' @@ -733,7 +733,7 @@ def add_apt_sources( omode = "w" util.write_file(sourcefn, contents, omode=omode) - except IOError as detail: + except OSError as detail: LOG.exception("failed write to file %s: %s", sourcefn, detail) raise diff --git a/cloudinit/config/cc_mcollective.py b/cloudinit/config/cc_mcollective.py index a70bd59d665..b7281018270 100644 --- a/cloudinit/config/cc_mcollective.py +++ b/cloudinit/config/cc_mcollective.py @@ -48,7 +48,7 @@ def configure( try: old_contents = util.load_binary_file(server_cfg, quiet=False) mcollective_config = ConfigObj(io.BytesIO(old_contents)) - except IOError as e: + except OSError as e: if e.errno != errno.ENOENT: raise else: @@ -85,7 +85,7 @@ def configure( # We got all our config as wanted we'll copy # the previous server.cfg and overwrite the old with our new one util.copy(server_cfg, "%s.old" % (server_cfg)) - except IOError as e: + except OSError as e: if e.errno == errno.ENOENT: # Doesn't exist to copy... pass diff --git a/cloudinit/config/cc_mounts.py b/cloudinit/config/cc_mounts.py index 40ac7197251..db569e21042 100644 --- a/cloudinit/config/cc_mounts.py +++ b/cloudinit/config/cc_mounts.py @@ -282,7 +282,7 @@ def setup_swapfile(fname, size=None, maxsize=None): if str(size).lower() == "auto": try: memsize = util.read_meminfo()["total"] - except IOError: + except OSError: LOG.debug("Not creating swap: failed to read meminfo") return diff --git a/cloudinit/config/cc_power_state_change.py b/cloudinit/config/cc_power_state_change.py index 5bd6b96d847..028d02481f8 100644 --- a/cloudinit/config/cc_power_state_change.py +++ b/cloudinit/config/cc_power_state_change.py @@ -48,7 +48,7 @@ def givecmdline(pid): return m.group(2) else: return util.load_text_file("/proc/%s/cmdline" % pid) - except IOError: + except OSError: return None @@ -194,11 +194,11 @@ def fatal(msg): msg = "cmdline changed for %s [now: %s]" % (pid, cmdline) break - except IOError as ioerr: + except OSError as ioerr: if ioerr.errno in known_errnos: msg = "pidfile gone [%d]" % ioerr.errno else: - fatal("IOError during wait: %s" % ioerr) + fatal("OSError during wait: %s" % ioerr) break except Exception as e: diff --git a/cloudinit/config/cc_seed_random.py b/cloudinit/config/cc_seed_random.py index edac6dbb9f3..fa935480ae0 100644 --- a/cloudinit/config/cc_seed_random.py +++ b/cloudinit/config/cc_seed_random.py @@ -39,7 +39,7 @@ def _decode(data, encoding=None): elif encoding.lower() in ["gzip", "gz"]: return util.decomp_gzip(data, quiet=False, decode=None) else: - raise IOError("Unknown random_seed encoding: %s" % (encoding)) + raise OSError("Unknown random_seed encoding: %s" % (encoding)) def handle_random_seed_command(command, required, update_env): diff --git a/cloudinit/config/schema.py b/cloudinit/config/schema.py index a78609ed04b..ce3df59e075 100644 --- a/cloudinit/config/schema.py +++ b/cloudinit/config/schema.py @@ -1631,7 +1631,7 @@ def get_schema(schema_type: SchemaType = SchemaType.CLOUD_CONFIG) -> dict: full_schema = None try: full_schema = json.loads(load_text_file(schema_file)) - except (IOError, OSError): + except OSError: LOG.warning( "Skipping %s schema validation. No JSON schema file found %s.", schema_type.value, @@ -1771,7 +1771,7 @@ def get_processed_or_fallback_path( try: paths = read_cfg_paths(fetch_existing_datasource="trust") - except (IOError, OSError) as e: + except OSError as e: if e.errno == EACCES: LOG.debug( "Using default instance-data/user-data paths for non-root user" diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py index edea4f49e5b..7244e5f2a25 100644 --- a/cloudinit/distros/__init__.py +++ b/cloudinit/distros/__init__.py @@ -378,7 +378,7 @@ def _write_network_state(self, network_state, renderer: Renderer): def _find_tz_file(self, tz): tz_file = os.path.join(self.tz_zone_dir, str(tz)) if not os.path.isfile(tz_file): - raise IOError( + raise OSError( "Invalid timezone %s, no file found at %s" % (tz, tz_file) ) return tz_file @@ -595,7 +595,7 @@ def update_hostname(self, hostname, fqdn, prev_hostname_fn): for fn in update_files: try: self._write_hostname(hostname, fn) - except IOError: + except OSError: util.logexc( LOG, "Failed to write hostname %s to %s", hostname, fn ) @@ -1169,14 +1169,14 @@ def write_doas_rules(self, user, rules, doas_file=None): contents = [util.make_header(), content] try: util.write_file(doas_file, "\n".join(contents), mode=0o440) - except IOError as e: + except OSError as e: util.logexc(LOG, "Failed to write doas file %s", doas_file) raise e else: if content not in util.load_text_file(doas_file): try: util.append_file(doas_file, content) - except IOError as e: + except OSError as e: util.logexc( LOG, "Failed to append to doas file %s", doas_file ) @@ -1231,7 +1231,7 @@ def ensure_sudo_dir(self, path, sudo_base="/etc/sudoers"): sudoers_contents = "\n".join(lines) util.append_file(sudo_base, sudoers_contents) LOG.debug("Added '#includedir %s' to %s", path, sudo_base) - except IOError as e: + except OSError as e: util.logexc(LOG, "Failed to write %s", sudo_base) raise e util.ensure_dir(path, 0o750) @@ -1264,14 +1264,14 @@ def write_sudo_rules(self, user, rules, sudo_file=None): ] try: util.write_file(sudo_file, "\n".join(contents), 0o440) - except IOError as e: + except OSError as e: util.logexc(LOG, "Failed to write sudoers file %s", sudo_file) raise e else: if content not in util.load_text_file(sudo_file): try: util.append_file(sudo_file, content) - except IOError as e: + except OSError as e: util.logexc( LOG, "Failed to append to sudoers file %s", sudo_file ) @@ -1493,7 +1493,7 @@ def _get_proc_stat_by_index(pid: int, field: int) -> Optional[int]: ) return None return int(match.group(field)) - except IOError as e: + except OSError as e: LOG.warning("Failed to load /proc/%s/stat. %s", pid, e) except IndexError: LOG.warning( diff --git a/cloudinit/distros/alpine.py b/cloudinit/distros/alpine.py index 19912d3724f..d0764f0ba56 100644 --- a/cloudinit/distros/alpine.py +++ b/cloudinit/distros/alpine.py @@ -88,7 +88,7 @@ def _write_hostname(self, hostname, filename): # Try to update the previous one # so lets see if we can read it first. conf = self._read_hostname_conf(filename) - except IOError: + except OSError: create_hostname_file = util.get_cfg_option_bool( self._cfg, "create_hostname_file", True ) @@ -118,7 +118,7 @@ def _read_hostname(self, filename, default=None): try: conf = self._read_hostname_conf(filename) hostname = conf.hostname - except IOError: + except OSError: pass if not hostname: return default @@ -412,7 +412,7 @@ def add_user(self, name, **kwargs) -> bool: util.write_file( shadow_file, shadow_contents, omode="w", preserve_mode=True ) - except IOError as e: + except OSError as e: util.logexc(LOG, "Failed to update %s file", shadow_file) raise e else: @@ -530,7 +530,7 @@ def expire_passwd(self, user): omode="w", preserve_mode=True, ) - except IOError as e: + except OSError as e: util.logexc(LOG, "Failed to update %s file", shadow_file) raise e else: diff --git a/cloudinit/distros/aosc.py b/cloudinit/distros/aosc.py index 96fa48b8b6e..ab66e23c0a7 100644 --- a/cloudinit/distros/aosc.py +++ b/cloudinit/distros/aosc.py @@ -121,7 +121,7 @@ def read_locale_conf(sys_path): try: contents = util.load_text_file(sys_path).splitlines() exists = True - except IOError: + except OSError: contents = [] return (exists, SysConf(contents)) diff --git a/cloudinit/distros/arch.py b/cloudinit/distros/arch.py index f09d34ca5e3..8afb0b91488 100644 --- a/cloudinit/distros/arch.py +++ b/cloudinit/distros/arch.py @@ -64,7 +64,7 @@ def _write_hostname(self, hostname, filename): # Try to update the previous one # so lets see if we can read it first. conf = self._read_hostname_conf(filename) - except IOError: + except OSError: create_hostname_file = util.get_cfg_option_bool( self._cfg, "create_hostname_file", True ) @@ -94,7 +94,7 @@ def _read_hostname(self, filename, default=None): try: conf = self._read_hostname_conf(filename) hostname = conf.hostname - except IOError: + except OSError: pass if not hostname: return default diff --git a/cloudinit/distros/debian.py b/cloudinit/distros/debian.py index d68bd5c44e3..ac136dc1697 100644 --- a/cloudinit/distros/debian.py +++ b/cloudinit/distros/debian.py @@ -129,7 +129,7 @@ def _write_hostname(self, hostname, filename): # Try to update the previous one # so lets see if we can read it first. conf = self._read_hostname_conf(filename) - except IOError: + except OSError: create_hostname_file = util.get_cfg_option_bool( self._cfg, "create_hostname_file", True ) @@ -159,7 +159,7 @@ def _read_hostname(self, filename, default=None): try: conf = self._read_hostname_conf(filename) hostname = conf.hostname - except IOError: + except OSError: pass if not hostname: return default diff --git a/cloudinit/distros/freebsd.py b/cloudinit/distros/freebsd.py index fc1c38a424a..f4af448db7b 100644 --- a/cloudinit/distros/freebsd.py +++ b/cloudinit/distros/freebsd.py @@ -219,7 +219,7 @@ def apply_locale(self, locale, out_fn=None): util.logexc(LOG, "Failed to apply locale %s", locale) try: util.copy(self.login_conf_fn_bak, self.login_conf_fn) - except IOError: + except OSError: util.logexc( LOG, "Failed to restore %s backup", self.login_conf_fn ) diff --git a/cloudinit/distros/gentoo.py b/cloudinit/distros/gentoo.py index 5ab41bbd9db..66bb52264c2 100644 --- a/cloudinit/distros/gentoo.py +++ b/cloudinit/distros/gentoo.py @@ -63,7 +63,7 @@ def _write_hostname(self, hostname, filename): # Try to update the previous one # so lets see if we can read it first. conf = self._read_hostname_conf(filename) - except IOError: + except OSError: create_hostname_file = util.get_cfg_option_bool( self._cfg, "create_hostname_file", True ) @@ -98,7 +98,7 @@ def _read_hostname(self, filename, default=None): try: conf = self._read_hostname_conf(filename) hostname = conf.hostname - except IOError: + except OSError: pass if not hostname: return default diff --git a/cloudinit/distros/opensuse.py b/cloudinit/distros/opensuse.py index 91620a98e07..3248782d5cc 100644 --- a/cloudinit/distros/opensuse.py +++ b/cloudinit/distros/opensuse.py @@ -169,7 +169,7 @@ def _read_hostname(self, filename, default=None): try: conf = self._read_hostname_conf(filename) hostname = conf.hostname - except IOError: + except OSError: pass if not hostname: return default @@ -242,7 +242,7 @@ def _write_hostname(self, hostname, filename): # Try to update the previous one # so lets see if we can read it first. conf = self._read_hostname_conf(filename) - except IOError: + except OSError: if create_hostname_file: pass else: diff --git a/cloudinit/distros/parsers/hostname.py b/cloudinit/distros/parsers/hostname.py index 7250b6a8eb2..a7ffbab752b 100644 --- a/cloudinit/distros/parsers/hostname.py +++ b/cloudinit/distros/parsers/hostname.py @@ -71,5 +71,5 @@ def _parse(self, contents): entries.append(("hostname", [head, tail])) hostnames_found.add(head) if len(hostnames_found) > 1: - raise IOError("Multiple hostnames (%s) found!" % (hostnames_found)) + raise OSError("Multiple hostnames (%s) found!" % (hostnames_found)) return entries diff --git a/cloudinit/distros/parsers/resolv_conf.py b/cloudinit/distros/parsers/resolv_conf.py index 6884c740989..9a84aad3e9a 100644 --- a/cloudinit/distros/parsers/resolv_conf.py +++ b/cloudinit/distros/parsers/resolv_conf.py @@ -148,7 +148,7 @@ def _parse(self, contents): try: (cfg_opt, cfg_values) = head.split(None, 1) except (IndexError, ValueError) as e: - raise IOError( + raise OSError( "Incorrectly formatted resolv.conf line %s" % (i + 1) ) from e if cfg_opt not in [ @@ -158,6 +158,6 @@ def _parse(self, contents): "sortlist", "options", ]: - raise IOError("Unexpected resolv.conf option %s" % (cfg_opt)) + raise OSError("Unexpected resolv.conf option %s" % (cfg_opt)) entries.append(("option", [cfg_opt, cfg_values, tail])) return entries diff --git a/cloudinit/distros/rhel_util.py b/cloudinit/distros/rhel_util.py index 6a1b28163fd..22813484a4f 100644 --- a/cloudinit/distros/rhel_util.py +++ b/cloudinit/distros/rhel_util.py @@ -45,6 +45,6 @@ def read_sysconfig_file(fn): try: contents = util.load_text_file(fn).splitlines() exists = True - except IOError: + except OSError: contents = [] return (exists, SysConf(contents)) diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py index 85d0fe950cf..738e8c6247d 100644 --- a/cloudinit/helpers.py +++ b/cloudinit/helpers.py @@ -70,7 +70,7 @@ def clear(self, name, freq): sem_file = self._get_path(name, freq) try: util.del_file(sem_file) - except (IOError, OSError): + except OSError: util.logexc(LOG, "Failed deleting semaphore %s", sem_file) return False return True @@ -86,7 +86,7 @@ def _acquire(self, name, freq): contents = "%s: %s\n" % (os.getpid(), time()) try: util.write_file(sem_file, contents) - except (IOError, OSError): + except OSError: util.logexc(LOG, "Failed writing semaphore file %s", sem_file) return None return FileLock(sem_file) diff --git a/cloudinit/log/loggers.py b/cloudinit/log/loggers.py index fd83c994c88..37f37c6a53b 100644 --- a/cloudinit/log/loggers.py +++ b/cloudinit/log/loggers.py @@ -57,7 +57,7 @@ def flush_loggers(root): return for h in root.handlers: if isinstance(h, (logging.StreamHandler)): - with suppress(IOError): + with suppress(OSError): h.flush() flush_loggers(root.parent) @@ -185,7 +185,7 @@ def setup_backup_logging(): def handleError(self, record): """A closure that emits logs on stderr when other methods fail""" - with suppress(IOError): + with suppress(OSError): fallback_handler.handle(record) fallback_handler.flush() diff --git a/cloudinit/net/bsd.py b/cloudinit/net/bsd.py index 450a6668f4e..f7e682b3518 100644 --- a/cloudinit/net/bsd.py +++ b/cloudinit/net/bsd.py @@ -172,7 +172,7 @@ def _resolve_conf(self, settings): ) ) resolvconf.parse() - except IOError: + except OSError: util.logexc( LOG, "Failed to parse %s, use new empty file", diff --git a/cloudinit/net/cmdline.py b/cloudinit/net/cmdline.py index c2c1d5af45b..b67fe8cf550 100644 --- a/cloudinit/net/cmdline.py +++ b/cloudinit/net/cmdline.py @@ -247,7 +247,7 @@ def _decomp_gzip(blob): try: gzfp = gzip.GzipFile(mode="rb", fileobj=iobuf) return gzfp.read() - except IOError: + except OSError: return blob finally: if gzfp: diff --git a/cloudinit/reporting/handlers.py b/cloudinit/reporting/handlers.py index 1e89b12345b..84679804175 100644 --- a/cloudinit/reporting/handlers.py +++ b/cloudinit/reporting/handlers.py @@ -234,7 +234,7 @@ def _truncate_guest_pool_file(cls, kvp_file): if os.path.getmtime(kvp_file) < boot_time: with open(kvp_file, "w"): pass - except (OSError, IOError) as e: + except OSError as e: LOG.warning("failed to truncate kvp pool file, %s", e) finally: cls._already_truncated_pool_file = True @@ -361,7 +361,7 @@ def write_key(self, key: str, value: str) -> None: try: self._append_kvp_item(data) - except (OSError, IOError): + except OSError: LOG.warning("failed posting kvp=%s value=%s", key, value) def _encode_event(self, event): @@ -408,7 +408,7 @@ def _publish_event_routine(self): event = None try: self._append_kvp_item(encoded_data) - except (OSError, IOError) as e: + except OSError as e: LOG.warning("failed posting events to kvp, %s", e) finally: for _ in range(items_from_queue): diff --git a/cloudinit/sources/DataSourceAltCloud.py b/cloudinit/sources/DataSourceAltCloud.py index aeecd15d05f..c6cf0bfad01 100644 --- a/cloudinit/sources/DataSourceAltCloud.py +++ b/cloudinit/sources/DataSourceAltCloud.py @@ -59,10 +59,10 @@ def read_user_data_callback(mount_dir): # First try deltacloud_user_data_file. On failure try user_data_file. try: user_data = util.load_text_file(deltacloud_user_data_file).strip() - except IOError: + except OSError: try: user_data = util.load_text_file(user_data_file).strip() - except IOError: + except OSError: util.logexc(LOG, "Failed accessing user data file.") return None @@ -109,7 +109,7 @@ def get_cloud_type(self): cloud_type = ( util.load_text_file(CLOUD_INFO_FILE).strip().upper() ) - except IOError: + except OSError: util.logexc( LOG, "Unable to access cloud info file at %s.", diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 10aa2dd5ee5..b5f48f7d8c0 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -1974,7 +1974,7 @@ def _check_freebsd_cdrom(cdrom_dev): with open(cdrom_dev) as fp: fp.read(1024) return True - except IOError: + except OSError: LOG.debug("cdrom (%s) is not configured", cdrom_dev) return False diff --git a/cloudinit/sources/DataSourceBigstep.py b/cloudinit/sources/DataSourceBigstep.py index 01272780abd..043313b54d1 100644 --- a/cloudinit/sources/DataSourceBigstep.py +++ b/cloudinit/sources/DataSourceBigstep.py @@ -42,7 +42,7 @@ def _get_url_from_file(self): ) try: content = util.load_text_file(url_file) - except IOError as e: + except OSError as e: # If the file doesn't exist, then the server probably isn't a # Bigstep instance; otherwise, another problem exists which needs # investigation diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py index 6ddfff429dd..847df706f7b 100644 --- a/cloudinit/sources/DataSourceConfigDrive.py +++ b/cloudinit/sources/DataSourceConfigDrive.py @@ -223,7 +223,7 @@ def get_previous_iid(paths): fname = os.path.join(paths.get_cpath("data"), "instance-id") try: return util.load_text_file(fname).rstrip("\n") - except IOError: + except OSError: return None @@ -249,7 +249,7 @@ def write_injected_files(files): filename = os.sep + filename try: util.write_file(filename, content, mode=0o660) - except IOError: + except OSError: util.logexc(LOG, "Failed writing file: %s", filename) diff --git a/cloudinit/sources/DataSourceIBMCloud.py b/cloudinit/sources/DataSourceIBMCloud.py index 9b3ce8d9f83..3091fa86d0f 100644 --- a/cloudinit/sources/DataSourceIBMCloud.py +++ b/cloudinit/sources/DataSourceIBMCloud.py @@ -370,11 +370,17 @@ def load_file(path: str, translator: Callable[[bytes], Any]) -> Any: try: raw = util.load_binary_file(path) return translator(raw) - except IOError as e: - LOG.debug("Failed reading path '%s': %s", path, e) + except OSError as e: + LOG.warning("Failed reading path '%s': %s", path, e) return None + except UnicodeError as e: + LOG.warning("Failed decoding %s", path) + raise sources.BrokenMetadata(f"Failed to decode {path}: {e}") except Exception as e: - raise sources.BrokenMetadata(f"Failed decoding {path}: {e}") + LOG.warning("Unhandled exception %s", e) + raise sources.BrokenMetadata( + f"Unhandled exception reading {path}: {e}" + ) files = [ # tuples of (results_name, path, translator) diff --git a/cloudinit/sources/DataSourceOpenNebula.py b/cloudinit/sources/DataSourceOpenNebula.py index 5ad3b6bc6c1..33ce0ff3b46 100644 --- a/cloudinit/sources/DataSourceOpenNebula.py +++ b/cloudinit/sources/DataSourceOpenNebula.py @@ -430,7 +430,7 @@ def read_context_disk_dir(source_dir, distro, asuser=None): raise BrokenContextDiskDir( "Error processing context.sh: %s" % (e) ) from e - except IOError as e: + except OSError as e: raise NonContextDiskDir( "Error reading context.sh: %s" % (e) ) from e diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py index 019a0a122c5..5a9a3e59b47 100644 --- a/cloudinit/sources/DataSourceOpenStack.py +++ b/cloudinit/sources/DataSourceOpenStack.py @@ -212,9 +212,9 @@ def _crawl_metadata(self): raise sources.InvalidMetaDataException( "No active metadata service found" ) - except IOError as e: + except OSError as e: raise sources.InvalidMetaDataException( - "IOError contacting metadata service: {error}".format( + "OSError contacting metadata service: {error}".format( error=str(e) ) ) @@ -230,7 +230,7 @@ def _crawl_metadata(self): ) except openstack.NonReadable as e: raise sources.InvalidMetaDataException(str(e)) - except (openstack.BrokenMetadata, IOError) as e: + except (openstack.BrokenMetadata, OSError) as e: msg = "Broken metadata address {addr}".format( addr=self.metadata_address ) diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index 38026c69a47..a7493539470 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -810,7 +810,7 @@ def write_boot_content( if content and os.path.exists(content_f): util.ensure_dir(os.path.dirname(link)) os.symlink(content_f, link) - except IOError as e: + except OSError as e: util.logexc(LOG, "failed establishing content link: %s", e) diff --git a/cloudinit/sources/DataSourceWSL.py b/cloudinit/sources/DataSourceWSL.py index 5e146ecc177..fcb34781837 100644 --- a/cloudinit/sources/DataSourceWSL.py +++ b/cloudinit/sources/DataSourceWSL.py @@ -63,7 +63,7 @@ def cmd_executable() -> PurePath: mounts = mounted_win_drives() if not mounts: - raise IOError("Windows drives are not mounted.") + raise OSError("Windows drives are not mounted.") # cmd.exe path is being stable for decades. candidate = "%s/Windows/System32/cmd.exe" @@ -75,7 +75,7 @@ def cmd_executable() -> PurePath: LOG.debug("Found cmd.exe at <%s>", cmd) return PurePath(cmd) - raise IOError( + raise OSError( "Couldn't find cmd.exe in any mount point: %s" % ", ".join(mounts) ) @@ -84,7 +84,7 @@ def find_home() -> PurePath: """ Finds the user's home directory path as a WSL path. - raises: IOError when no mountpoint with cmd.exe is found + raises: OSError when no mountpoint with cmd.exe is found ProcessExecutionError when either cmd.exe is unable to retrieve the user's home directory """ @@ -334,7 +334,7 @@ def find_user_data_file(self, seed_dir: PurePath) -> PurePath: ef.name.casefold(): ef.path for ef in os.scandir(seed_dir) } if not existing_files: - raise IOError("%s directory is empty" % seed_dir) + raise OSError("%s directory is empty" % seed_dir) folded_names = [ f.casefold() @@ -344,7 +344,7 @@ def find_user_data_file(self, seed_dir: PurePath) -> PurePath: if filename in existing_files.keys(): return PurePath(existing_files[filename]) - raise IOError( + raise OSError( "%s doesn't contain any of the expected user-data files" % seed_dir ) @@ -360,7 +360,7 @@ def check_instance_id(self, sys_cfg) -> bool: metadata = load_instance_metadata(data_dir, instance_name()) return current == metadata.get("instance-id") - except (IOError, ValueError) as err: + except (OSError, ValueError) as err: LOG.warning( "Unable to check_instance_id from metadata file: %s", str(err), @@ -378,7 +378,7 @@ def _get_data(self) -> bool: try: user_home = find_home() - except IOError as e: + except OSError as e: LOG.debug("Unable to detect WSL datasource: %s", e) return False @@ -391,7 +391,7 @@ def _get_data(self) -> bool: self.metadata = load_instance_metadata( seed_dir, self.instance_name ) - except (ValueError, IOError) as err: + except (ValueError, OSError) as err: LOG.error("Unable to load metadata: %s", str(err)) return False @@ -404,7 +404,7 @@ def _get_data(self) -> bool: if user_data is None and seed_dir is not None: user_data = ConfigData(self.find_user_data_file(seed_dir)) - except (ValueError, IOError) as err: + except (ValueError, OSError) as err: log = LOG.info if agent_data else LOG.error log( "Unable to load any user-data file in %s: %s", diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index a9fe7236a64..d2634ccd89f 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -1167,7 +1167,7 @@ def convert_vendordata(data, recurse=True): raise ValueError("Unknown data type for vendordata: %s" % type(data)) -class BrokenMetadata(IOError): +class BrokenMetadata(OSError): pass diff --git a/cloudinit/sources/helpers/openstack.py b/cloudinit/sources/helpers/openstack.py index 8df6c6923a1..411764b72c2 100644 --- a/cloudinit/sources/helpers/openstack.py +++ b/cloudinit/sources/helpers/openstack.py @@ -79,7 +79,7 @@ ) -class NonReadable(IOError): +class NonReadable(OSError): pass @@ -362,7 +362,7 @@ def datafiles(version): try: content = self._read_content_path(net_item, decode=True) results["network_config"] = content - except IOError as e: + except OSError as e: raise BrokenMetadata( "Failed to read network configuration: %s" % (e) ) from e @@ -464,7 +464,7 @@ def read_v1(self): path = found[name] try: contents = self._path_read(path) - except IOError as e: + except OSError as e: raise BrokenMetadata("Failed to read: %s" % path) from e try: # Disable not-callable pylint check; pylint isn't able to diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index e4d17b56bfc..5b05a901455 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -193,7 +193,7 @@ def parse_authorized_keys(fnames): lines = util.load_text_file(fname).splitlines() for line in lines: contents.append(parser.parse(line)) - except (IOError, OSError): + except OSError: util.logexc(LOG, "Error reading lines from %s", fname) return contents @@ -396,7 +396,7 @@ def check_create_path(username, filename, strictmodes): ) if not permissions: return False - except (IOError, OSError) as e: + except OSError as e: util.logexc(LOG, str(e)) return False @@ -419,7 +419,7 @@ def extract_authorized_keys(username, sshd_cfg_file=DEF_SSHD_CFG): key_paths, pw_ent.pw_dir, username ) - except (IOError, OSError): + except OSError: # Give up and use a default key filename auth_key_fns[0] = default_authorizedkeys_file util.logexc( diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 2be50992ffd..75e8429714b 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -870,7 +870,7 @@ def consume_data(self, frequency=PER_INSTANCE): instance_json = util.load_json( util.load_text_file(json_sensitive_file) ) - except (OSError, IOError) as e: + except OSError as e: LOG.warning( "Skipping write of system_info/features to %s." " Unable to read file: %s", diff --git a/cloudinit/subp.py b/cloudinit/subp.py index eebf009d4f0..2bb8abfe440 100644 --- a/cloudinit/subp.py +++ b/cloudinit/subp.py @@ -67,7 +67,7 @@ def prepend_base_command(base_command, commands): return fixed_commands -class ProcessExecutionError(IOError): +class ProcessExecutionError(OSError): MESSAGE_TMPL = ( "%(description)s\n" "Command: %(cmd)s\n" @@ -123,7 +123,7 @@ def __init__( "stderr": self._ensure_string(self.stderr), "reason": self._ensure_string(self.reason), } - IOError.__init__(self, message) + OSError.__init__(self, message) def _ensure_string(self, text): """ diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index 16eeb36dce8..ede10025ae7 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -226,7 +226,7 @@ def _read_file(path: str, **kwargs) -> "FileResponse": return FileResponse(contents, path) except FileNotFoundError as e: raise UrlError(cause=e, code=NOT_FOUND, headers=None, url=path) from e - except IOError as e: + except OSError as e: raise UrlError(cause=e, code=e.errno, headers=None, url=path) from e @@ -333,9 +333,9 @@ def iter_content( yield from self._response.iter_content(chunk_size, decode_unicode) -class UrlError(IOError): +class UrlError(OSError): def __init__(self, cause, code=None, headers=None, url=None): - IOError.__init__(self, str(cause)) + OSError.__init__(self, str(cause)) self.cause = cause self.code = code self.headers = headers diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py index 65e626ef001..66af3284887 100644 --- a/cloudinit/user_data.py +++ b/cloudinit/user_data.py @@ -272,7 +272,7 @@ def _do_include(self, content, append_msg): if include_url not in message: message += " for url: {0}".format(include_url) _handle_error(message, urle) - except IOError as ioe: + except OSError as ioe: error_message = "Fetching from {} resulted in {}".format( include_url, ioe ) diff --git a/cloudinit/util.py b/cloudinit/util.py index 4124e18b8aa..f88130d63a4 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -1296,7 +1296,7 @@ def get_fqdn_from_hosts(hostname, filename="/etc/hosts"): if hostname in toks[2:]: fqdn = toks[1] break - except IOError: + except OSError: pass return fqdn @@ -1646,7 +1646,7 @@ def fips_enabled() -> bool: try: contents = load_text_file(fips_proc).strip() return contents == "1" - except (IOError, OSError): + except OSError: # for BSD systems and Linux systems where the proc entry is not # available, we assume FIPS is disabled to retain the old behavior # for now. @@ -1995,7 +1995,7 @@ def mounts(): "opts": opts, } LOG.debug("Fetched %s mounts from %s", mounted, method) - except (IOError, OSError): + except OSError: logexc(LOG, "Failed fetching mount points") return mounted @@ -2065,7 +2065,7 @@ def mount_cb( umount = tmpd # This forces it to be unmounted (when set) mountpoint = tmpd break - except (IOError, OSError) as exc: + except OSError as exc: if log_error: LOG.debug( "Failed to mount device: '%s' with type: '%s' " @@ -2479,7 +2479,7 @@ def is_container(): return True if "LIBVIRT_LXC_UUID" in pid1env: return True - except (IOError, OSError): + except OSError: pass # Detect OpenVZ containers @@ -2494,7 +2494,7 @@ def is_container(): (_key, val) = line.strip().split(":", 1) if val != "0": return True - except (IOError, OSError): + except OSError: pass return False @@ -2520,7 +2520,7 @@ def get_proc_env( contents: Union[str, bytes] try: contents = load_binary_file(fn) - except (IOError, OSError): + except OSError: return {} env = {} diff --git a/tests/integration_tests/instances.py b/tests/integration_tests/instances.py index a5ce8f1e14f..5c6685e9677 100644 --- a/tests/integration_tests/instances.py +++ b/tests/integration_tests/instances.py @@ -115,7 +115,7 @@ def read_from_file(self, remote_path) -> str: if result.failed: # TODO: Raise here whatever pycloudlib raises when it has # a consistent error response - raise IOError( + raise OSError( "Failed reading remote file via cat: {}\n" "Return code: {}\n" "Stderr: {}\n" diff --git a/tests/unittests/cmd/test_query.py b/tests/unittests/cmd/test_query.py index 39f5c565abf..a0836e977bb 100644 --- a/tests/unittests/cmd/test_query.py +++ b/tests/unittests/cmd/test_query.py @@ -177,7 +177,7 @@ def test_handle_args_error_when_no_read_permission_instance_data( [ (OSError(errno.EACCES, "Not allowed"),), (OSError(errno.ENOENT, "Not allowed"),), - (IOError,), + (OSError,), ], ) def test_handle_args_error_when_no_read_permission_init_cfg( diff --git a/tests/unittests/config/test_cc_seed_random.py b/tests/unittests/config/test_cc_seed_random.py index 15c59523466..1f28179a852 100644 --- a/tests/unittests/config/test_cc_seed_random.py +++ b/tests/unittests/config/test_cc_seed_random.py @@ -85,7 +85,7 @@ def test_append_random_unknown_encoding(self): } } pytest.raises( - IOError, + OSError, cc_seed_random.handle, "test", cfg, diff --git a/tests/unittests/config/test_cc_yum_add_repo.py b/tests/unittests/config/test_cc_yum_add_repo.py index c77262f508f..92d778da0e0 100644 --- a/tests/unittests/config/test_cc_yum_add_repo.py +++ b/tests/unittests/config/test_cc_yum_add_repo.py @@ -45,7 +45,7 @@ def test_bad_config(self): self.patchUtils(self.tmp) cc_yum_add_repo.handle("yum_add_repo", cfg, None, []) self.assertRaises( - IOError, util.load_text_file, "/etc/yum.repos.d/epel_testing.repo" + OSError, util.load_text_file, "/etc/yum.repos.d/epel_testing.repo" ) def test_metalink_config(self): diff --git a/tests/unittests/config/test_cc_zypper_add_repo.py b/tests/unittests/config/test_cc_zypper_add_repo.py index 21f033afd8a..bdf1b688eb8 100644 --- a/tests/unittests/config/test_cc_zypper_add_repo.py +++ b/tests/unittests/config/test_cc_zypper_add_repo.py @@ -28,7 +28,7 @@ def test_bad_repo_config(self): self.patchUtils(self.tmp) cc_zypper_add_repo._write_repos(cfg["repos"], "/etc/zypp/repos.d") self.assertRaises( - IOError, util.load_text_file, "/etc/zypp/repos.d/foo.repo" + OSError, util.load_text_file, "/etc/zypp/repos.d/foo.repo" ) def test_write_repos(self): diff --git a/tests/unittests/config/test_schema.py b/tests/unittests/config/test_schema.py index 53ee11f4c98..49ee0a16fa8 100644 --- a/tests/unittests/config/test_schema.py +++ b/tests/unittests/config/test_schema.py @@ -2691,7 +2691,7 @@ class TestHandleSchemaArgs: "failure, expected_logs", ( ( - IOError("No permissions on /var/lib/cloud/instance"), + OSError("No permissions on /var/lib/cloud/instance"), ["Using default instance-data/user-data paths for non-root"], ), ( @@ -2711,7 +2711,7 @@ def test_handle_schema_unable_to_read_cfg_paths( caplog, tmpdir, ): - if isinstance(failure, IOError): + if isinstance(failure, OSError): failure.errno = EACCES read_cfg_paths.side_effect = [failure, paths] user_data_fn = tmpdir.join("user-data") diff --git a/tests/unittests/net/test_init.py b/tests/unittests/net/test_init.py index d4be808aec7..bc026b6c8b3 100644 --- a/tests/unittests/net/test_init.py +++ b/tests/unittests/net/test_init.py @@ -61,13 +61,12 @@ def test_read_sys_net_strips_contents_of_sys_path(self): assert content.strip() == net.read_sys_net("dev", "attr") def test_read_sys_net_reraises_oserror(self): - """read_sys_net raises OSError/IOError when file doesn't exist.""" - # Non-specific Exception because versions of python OSError vs IOError. + """read_sys_net raises OSError when file doesn't exist.""" with pytest.raises(Exception, match="No such file or directory"): net.read_sys_net("dev", "attr") def test_read_sys_net_safe_handles_error(self): - """read_sys_net_safe handles OSError/IOError""" + """read_sys_net_safe handles OSError""" assert not net.read_sys_net_safe("dev", "attr") def test_read_sys_net_safe_returns_false_on_noent_failure(self): diff --git a/tests/unittests/sources/test_altcloud.py b/tests/unittests/sources/test_altcloud.py index dba0f6a4d71..0fb58d54fc5 100644 --- a/tests/unittests/sources/test_altcloud.py +++ b/tests/unittests/sources/test_altcloud.py @@ -98,7 +98,7 @@ def test_cloud_info_file_ioerror(self): """Return UNKNOWN when /etc/sysconfig/cloud-info exists but errors.""" self.assertEqual("/etc/sysconfig/cloud-info", dsac.CLOUD_INFO_FILE) dsrc = dsac.DataSourceAltCloud({}, None, self.paths) - # Attempting to read the directory generates IOError + # Attempting to read the directory generates OSError with mock.patch.object(dsac, "CLOUD_INFO_FILE", self.tmp): self.assertEqual("UNKNOWN", dsrc.get_cloud_type()) self.assertIn( @@ -110,7 +110,7 @@ def test_cloud_info_file(self): dsrc = dsac.DataSourceAltCloud({}, None, self.paths) cloud_info = self.tmp_path("cloud-info", dir=self.tmp) util.write_file(cloud_info, " OverRiDdeN CloudType ") - # Attempting to read the directory generates IOError + # Attempting to read the directory generates OSError with mock.patch.object(dsac, "CLOUD_INFO_FILE", cloud_info): self.assertEqual("OVERRIDDEN CLOUDTYPE", dsrc.get_cloud_type()) diff --git a/tests/unittests/sources/test_wsl.py b/tests/unittests/sources/test_wsl.py index 2012cd90a3c..f9fdb543af8 100644 --- a/tests/unittests/sources/test_wsl.py +++ b/tests/unittests/sources/test_wsl.py @@ -132,7 +132,7 @@ def test_cmd_not_executable(self, m_mounts, m_os_access): assert None is not cmd.relative_to(GOOD_MOUNTS["C:\\"]["mountpoint"]) m_os_access.return_value = False - with pytest.raises(IOError): + with pytest.raises(OSError): wsl.cmd_executable() @mock.patch("os.access") @@ -146,7 +146,7 @@ def test_cmd_exe_no_win_mounts(self, m_mounts, m_os_access): m_mounts.return_value = deepcopy(GOOD_MOUNTS) m_mounts.return_value.pop("C:\\") m_mounts.return_value.pop("D:\\") - with pytest.raises(IOError): + with pytest.raises(OSError): wsl.cmd_executable() @pytest.mark.parametrize( diff --git a/tests/unittests/test_merging.py b/tests/unittests/test_merging.py index efb71618ce3..f7ab222e05a 100644 --- a/tests/unittests/test_merging.py +++ b/tests/unittests/test_merging.py @@ -110,14 +110,14 @@ def _load_merge_files(self): base_fn = os.path.basename(fn) file_id = re.match(r"source(\d+)\-(\d+)[.]yaml", base_fn) if not file_id: - raise IOError( + raise OSError( "File %s does not have a numeric identifier" % (fn) ) file_id = int(file_id.group(1)) source_ids[file_id].append(fn) expected_fn = os.path.join(merge_root, EXPECTED_PAT % (file_id)) if not os.path.isfile(expected_fn): - raise IOError("No expected file found at %s" % (expected_fn)) + raise OSError("No expected file found at %s" % (expected_fn)) expected_files[file_id] = expected_fn for i in sorted(source_ids.keys()): source_file_contents = [] diff --git a/tests/unittests/test_ssh_util.py b/tests/unittests/test_ssh_util.py index cd78f75b739..2fdcd7370fc 100644 --- a/tests/unittests/test_ssh_util.py +++ b/tests/unittests/test_ssh_util.py @@ -378,7 +378,7 @@ class TestParseSSHConfig: "is_file, file_content", [ pytest.param(True, ("",), id="empty-file"), - pytest.param(False, IOError, id="not-a-file"), + pytest.param(False, OSError, id="not-a-file"), ], ) def test_dummy_file(self, m_is_file, m_load_file, is_file, file_content): diff --git a/tests/unittests/test_util.py b/tests/unittests/test_util.py index 960337197d9..ff0b3175cec 100644 --- a/tests/unittests/test_util.py +++ b/tests/unittests/test_util.py @@ -2063,7 +2063,7 @@ def test_fips_enabled_based_on_proc_crypto( def fake_load_file(path): assert path == "/proc/sys/crypto/fips_enabled" if fips_enabled_content is None: - raise IOError("No file exists Bob") + raise OSError("No file exists Bob") return fips_enabled_content load_file.side_effect = fake_load_file @@ -2697,7 +2697,7 @@ def test_pexec_error_indent_text(self): ) def test_pexec_error_type(self): - self.assertIsInstance(subp.ProcessExecutionError(), IOError) + self.assertIsInstance(subp.ProcessExecutionError(), OSError) def test_pexec_error_empty_msgs(self): error = subp.ProcessExecutionError() From 1786f9d47020b0c8fc70ecd6f930f9db75c394df Mon Sep 17 00:00:00 2001 From: Brett Holman Date: Tue, 23 Apr 2024 09:17:54 -0600 Subject: [PATCH 11/11] chore(exception): Replace errno with proper exceptions --- cloudinit/config/cc_mcollective.py | 25 +++++++--------------- cloudinit/config/cc_power_state_change.py | 14 ++++-------- cloudinit/config/cc_resizefs.py | 9 +++----- cloudinit/config/schema.py | 15 ++++++------- cloudinit/handlers/jinja_template.py | 1 - cloudinit/net/__init__.py | 4 +++- cloudinit/sources/DataSourceAltCloud.py | 11 ++++------ cloudinit/sources/DataSourceBigstep.py | 11 +++------- cloudinit/sources/DataSourceNoCloud.py | 6 ++---- cloudinit/sources/DataSourceRbxCloud.py | 6 ++---- cloudinit/sources/DataSourceSmartOS.py | 9 ++------ cloudinit/temp_utils.py | 7 ++---- tests/unittests/cmd/test_query.py | 7 +++--- tests/unittests/config/test_cc_growpart.py | 5 +---- tests/unittests/config/test_schema.py | 10 ++++----- tests/unittests/net/test_init.py | 6 ++---- tests/unittests/test_builtin_handlers.py | 3 +-- tests/unittests/test_util.py | 5 ++--- 18 files changed, 53 insertions(+), 101 deletions(-) diff --git a/cloudinit/config/cc_mcollective.py b/cloudinit/config/cc_mcollective.py index b7281018270..df8a99f0a08 100644 --- a/cloudinit/config/cc_mcollective.py +++ b/cloudinit/config/cc_mcollective.py @@ -9,9 +9,9 @@ """Mcollective: Install, configure and start mcollective""" -import errno import io import logging +from contextlib import suppress # Used since this can maintain comments # and doesn't need a top level section @@ -48,15 +48,12 @@ def configure( try: old_contents = util.load_binary_file(server_cfg, quiet=False) mcollective_config = ConfigObj(io.BytesIO(old_contents)) - except OSError as e: - if e.errno != errno.ENOENT: - raise - else: - LOG.debug( - "Did not find file %s (starting with an empty config)", - server_cfg, - ) - mcollective_config = ConfigObj() + except FileNotFoundError: + LOG.debug( + "Did not find file %s (starting with an empty config)", + server_cfg, + ) + mcollective_config = ConfigObj() for cfg_name, cfg in config.items(): if cfg_name == "public-cert": util.write_file(pubcert_file, cfg, mode=0o644) @@ -81,16 +78,10 @@ def configure( # Otherwise just try to convert it to a string mcollective_config[cfg_name] = str(cfg) - try: + with suppress(FileNotFoundError): # We got all our config as wanted we'll copy # the previous server.cfg and overwrite the old with our new one util.copy(server_cfg, "%s.old" % (server_cfg)) - except OSError as e: - if e.errno == errno.ENOENT: - # Doesn't exist to copy... - pass - else: - raise # Now we got the whole (new) file, write to disk... contents = io.BytesIO() diff --git a/cloudinit/config/cc_power_state_change.py b/cloudinit/config/cc_power_state_change.py index 028d02481f8..7b421a55c35 100644 --- a/cloudinit/config/cc_power_state_change.py +++ b/cloudinit/config/cc_power_state_change.py @@ -6,7 +6,6 @@ """Power State Change: Change power state""" -import errno import logging import os import re @@ -181,8 +180,6 @@ def fatal(msg): LOG.warning(msg) doexit(EXIT_FAIL) - known_errnos = (errno.ENOENT, errno.ESRCH) - while True: if time.monotonic() > end_time: msg = "timeout reached before %s ended" % pid @@ -193,14 +190,11 @@ def fatal(msg): if cmdline != pidcmdline: msg = "cmdline changed for %s [now: %s]" % (pid, cmdline) break - - except OSError as ioerr: - if ioerr.errno in known_errnos: - msg = "pidfile gone [%d]" % ioerr.errno - else: - fatal("OSError during wait: %s" % ioerr) + except (ProcessLookupError, FileNotFoundError): + msg = "pidfile gone" break - + except OSError as ioerr: + fatal("OSError during wait: %s" % ioerr) except Exception as e: fatal("Unexpected Exception: %s" % e) diff --git a/cloudinit/config/cc_resizefs.py b/cloudinit/config/cc_resizefs.py index 8a983653af7..1defe14d155 100644 --- a/cloudinit/config/cc_resizefs.py +++ b/cloudinit/config/cc_resizefs.py @@ -8,7 +8,6 @@ """Resizefs: cloud-config module which resizes the filesystem""" -import errno import logging import os import re @@ -215,19 +214,17 @@ def maybe_get_writable_device_path(devpath, info): try: statret = os.stat(devpath) - except OSError as exc: - if container and exc.errno == errno.ENOENT: + except FileNotFoundError: + if container: LOG.debug( "Device '%s' did not exist in container. cannot resize: %s", devpath, info, ) - elif exc.errno == errno.ENOENT: + else: LOG.warning( "Device '%s' did not exist. cannot resize: %s", devpath, info ) - else: - raise exc return None if not stat.S_ISBLK(statret.st_mode) and not stat.S_ISCHR(statret.st_mode): diff --git a/cloudinit/config/schema.py b/cloudinit/config/schema.py index ce3df59e075..b5ddcff7704 100644 --- a/cloudinit/config/schema.py +++ b/cloudinit/config/schema.py @@ -14,7 +14,6 @@ from contextlib import suppress from copy import deepcopy from enum import Enum -from errno import EACCES from functools import partial from itertools import chain from typing import ( @@ -1771,14 +1770,12 @@ def get_processed_or_fallback_path( try: paths = read_cfg_paths(fetch_existing_datasource="trust") - except OSError as e: - if e.errno == EACCES: - LOG.debug( - "Using default instance-data/user-data paths for non-root user" - ) - paths = read_cfg_paths() - else: - raise + except PermissionError: + LOG.debug( + "Using default instance-data/user-data " + "paths with insufficient permissions" + ) + paths = read_cfg_paths() except DataSourceNotFoundException: paths = read_cfg_paths() LOG.warning( diff --git a/cloudinit/handlers/jinja_template.py b/cloudinit/handlers/jinja_template.py index c9568519b5e..3e7a95215ed 100644 --- a/cloudinit/handlers/jinja_template.py +++ b/cloudinit/handlers/jinja_template.py @@ -4,7 +4,6 @@ import logging import os import re -from errno import EACCES from typing import Optional, Type from cloudinit import handlers, lifecycle diff --git a/cloudinit/net/__init__.py b/cloudinit/net/__init__.py index 9955888d211..4612c2dd780 100644 --- a/cloudinit/net/__init__.py +++ b/cloudinit/net/__init__.py @@ -78,8 +78,10 @@ def read_sys_net_safe(iface, field): iface, field, ) + except (FileNotFoundError, NotADirectoryError): + return False except OSError as e: - if e.errno in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL): + if e.errno == errno.EINVAL: return False raise diff --git a/cloudinit/sources/DataSourceAltCloud.py b/cloudinit/sources/DataSourceAltCloud.py index c6cf0bfad01..c9ade29ddd2 100644 --- a/cloudinit/sources/DataSourceAltCloud.py +++ b/cloudinit/sources/DataSourceAltCloud.py @@ -12,7 +12,6 @@ instance on RHEVm and vSphere. """ -import errno import logging import os import os.path @@ -213,9 +212,8 @@ def user_data_rhevm(self): try: return_str = util.mount_cb(floppy_dev, read_user_data_callback) - except OSError as err: - if err.errno != errno.ENOENT: - raise + except FileNotFoundError: + pass except util.MountFailedError: util.logexc( LOG, @@ -253,9 +251,8 @@ def user_data_vsphere(self): if return_str: self.source = cdrom_dev break - except OSError as err: - if err.errno != errno.ENOENT: - raise + except FileNotFoundError: + pass except util.MountFailedError: util.logexc( LOG, diff --git a/cloudinit/sources/DataSourceBigstep.py b/cloudinit/sources/DataSourceBigstep.py index 043313b54d1..e28b0b29dc7 100644 --- a/cloudinit/sources/DataSourceBigstep.py +++ b/cloudinit/sources/DataSourceBigstep.py @@ -4,7 +4,6 @@ # # This file is part of cloud-init. See LICENSE file for license information. -import errno import json import os @@ -41,16 +40,12 @@ def _get_url_from_file(self): self.paths.cloud_dir, "data", "seed", "bigstep", "url" ) try: - content = util.load_text_file(url_file) - except OSError as e: + return util.load_text_file(url_file) + except FileNotFoundError: # If the file doesn't exist, then the server probably isn't a # Bigstep instance; otherwise, another problem exists which needs # investigation - if e.errno == errno.ENOENT: - return None - else: - raise - return content + return None # Used to match classes to dependencies diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py index 2d56a61bc2e..48a2c7f16df 100644 --- a/cloudinit/sources/DataSourceNoCloud.py +++ b/cloudinit/sources/DataSourceNoCloud.py @@ -8,7 +8,6 @@ # # This file is part of cloud-init. See LICENSE file for license information. -import errno import logging import os from functools import partial @@ -157,9 +156,8 @@ def _pp2d_callback(mp, data): LOG.debug("Using data from %s", dev) found.append(dev) break - except OSError as e: - if e.errno != errno.ENOENT: - raise + except FileNotFoundError: + pass except util.MountFailedError: util.logexc( LOG, "Failed to mount %s when looking for data", dev diff --git a/cloudinit/sources/DataSourceRbxCloud.py b/cloudinit/sources/DataSourceRbxCloud.py index d3679102d23..68424491df2 100644 --- a/cloudinit/sources/DataSourceRbxCloud.py +++ b/cloudinit/sources/DataSourceRbxCloud.py @@ -9,7 +9,6 @@ This file contains code used to gather the user data passed to an instance on rootbox / hyperone cloud platforms """ -import errno import logging import os import os.path @@ -96,9 +95,8 @@ def get_md(): ) if rbx_data: return rbx_data - except OSError as err: - if err.errno != errno.ENOENT: - raise + except FileNotFoundError: + pass except util.MountFailedError: util.logexc( LOG, "Failed to mount %s when looking for user data", device diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py index a7493539470..320cbf31c8c 100644 --- a/cloudinit/sources/DataSourceSmartOS.py +++ b/cloudinit/sources/DataSourceSmartOS.py @@ -22,7 +22,6 @@ import base64 import binascii -import errno import fcntl import json import logging @@ -443,12 +442,8 @@ def as_ascii(): if byte == b"\n": return as_ascii() response.append(byte) - except OSError as exc: - if exc.errno == errno.EAGAIN: - raise JoyentMetadataTimeoutException( - msg % as_ascii() - ) from exc - raise + except BlockingIOError as e: + raise JoyentMetadataTimeoutException(msg % as_ascii()) from e def _write(self, msg): self.fp.write(msg.encode("ascii")) diff --git a/cloudinit/temp_utils.py b/cloudinit/temp_utils.py index faa4aaa287a..339ac2a96b9 100644 --- a/cloudinit/temp_utils.py +++ b/cloudinit/temp_utils.py @@ -1,11 +1,11 @@ # This file is part of cloud-init. See LICENSE file for license information. import contextlib -import errno import logging import os import shutil import tempfile +from contextlib import suppress from cloudinit import util @@ -61,11 +61,8 @@ def ExtendedTemporaryFile(**kwargs): # file to unlink has been unlinked elsewhere.. def _unlink_if_exists(path): - try: + with suppress(FileNotFoundError): os.unlink(path) - except OSError as e: - if e.errno != errno.ENOENT: - raise e fh.unlink = _unlink_if_exists diff --git a/tests/unittests/cmd/test_query.py b/tests/unittests/cmd/test_query.py index a0836e977bb..97bee0f3920 100644 --- a/tests/unittests/cmd/test_query.py +++ b/tests/unittests/cmd/test_query.py @@ -1,6 +1,5 @@ # This file is part of cloud-init. See LICENSE file for license information. -import errno import gzip import json import os @@ -167,7 +166,7 @@ def test_handle_args_error_when_no_read_permission_instance_data( varname=None, ) with mock.patch(M_PATH + "util.load_binary_file") as m_load: - m_load.side_effect = OSError(errno.EACCES, "Not allowed") + m_load.side_effect = PermissionError("Not allowed") assert 1 == query.handle_args("anyname", args) msg = "No read permission on '%s'. Try sudo" % noread_fn assert msg in caplog.text @@ -175,8 +174,8 @@ def test_handle_args_error_when_no_read_permission_instance_data( @pytest.mark.parametrize( "exception", [ - (OSError(errno.EACCES, "Not allowed"),), - (OSError(errno.ENOENT, "Not allowed"),), + (PermissionError("Not allowed"),), + (FileNotFoundError("Not allowed"),), (OSError,), ], ) diff --git a/tests/unittests/config/test_cc_growpart.py b/tests/unittests/config/test_cc_growpart.py index 8137ac508a5..cc113cf8cbc 100644 --- a/tests/unittests/config/test_cc_growpart.py +++ b/tests/unittests/config/test_cc_growpart.py @@ -1,7 +1,6 @@ # This file is part of cloud-init. See LICENSE file for license information. # pylint: disable=attribute-defined-outside-init -import errno import logging import os import re @@ -361,9 +360,7 @@ def mystat(path): if path in devs: return devstat_ret if path in enoent: - e = OSError("%s: does not exist" % path) - e.errno = errno.ENOENT - raise e + raise FileNotFoundError("%s: does not exist" % path) return real_stat(path) opinfo = self.distro.device_part_info diff --git a/tests/unittests/config/test_schema.py b/tests/unittests/config/test_schema.py index 49ee0a16fa8..393c4b75b02 100644 --- a/tests/unittests/config/test_schema.py +++ b/tests/unittests/config/test_schema.py @@ -11,7 +11,6 @@ import unittest from collections import namedtuple from copy import deepcopy -from errno import EACCES from pathlib import Path from textwrap import dedent from types import ModuleType @@ -2691,8 +2690,11 @@ class TestHandleSchemaArgs: "failure, expected_logs", ( ( - OSError("No permissions on /var/lib/cloud/instance"), - ["Using default instance-data/user-data paths for non-root"], + PermissionError("No permissions on /var/lib/cloud/instance"), + [ + "Using default instance-data/user-data paths with " + "insufficient permissions" + ], ), ( DataSourceNotFoundException("No cached datasource found yet"), @@ -2711,8 +2713,6 @@ def test_handle_schema_unable_to_read_cfg_paths( caplog, tmpdir, ): - if isinstance(failure, OSError): - failure.errno = EACCES read_cfg_paths.side_effect = [failure, paths] user_data_fn = tmpdir.join("user-data") with open(user_data_fn, "w") as f: diff --git a/tests/unittests/net/test_init.py b/tests/unittests/net/test_init.py index bc026b6c8b3..af308e1804e 100644 --- a/tests/unittests/net/test_init.py +++ b/tests/unittests/net/test_init.py @@ -2,7 +2,6 @@ # pylint: disable=attribute-defined-outside-init import copy -import errno import ipaddress import os from pathlib import Path @@ -532,10 +531,9 @@ def setUp(self): def test_get_devicelist_raise_oserror(self): """get_devicelist raise any non-ENOENT OSerror.""" - error = OSError("Can not do it") - error.errno = errno.EPERM # Set non-ENOENT + error = PermissionError("Can not do it") self.m_sys_path.side_effect = error - with self.assertRaises(OSError) as context_manager: + with self.assertRaises(PermissionError) as context_manager: net.get_devicelist() exception = context_manager.exception self.assertEqual("Can not do it", str(exception)) diff --git a/tests/unittests/test_builtin_handlers.py b/tests/unittests/test_builtin_handlers.py index 95bdf45146d..676bc3e48dc 100644 --- a/tests/unittests/test_builtin_handlers.py +++ b/tests/unittests/test_builtin_handlers.py @@ -3,7 +3,6 @@ """Tests of the built-in user data handlers.""" import copy -import errno import os import re @@ -189,7 +188,7 @@ def test_jinja_template_handle_errors_on_unreadable_instance_data( h = JinjaTemplatePartHandler(paths, sub_handlers=[script_handler]) with mock.patch(MPATH + "load_text_file") as m_load: with pytest.raises(JinjaLoadError) as context_manager: - m_load.side_effect = OSError(errno.EACCES, "Not allowed") + m_load.side_effect = PermissionError("Not allowed") h.handle_part( data="data", ctype="!" + handlers.CONTENT_START, diff --git a/tests/unittests/test_util.py b/tests/unittests/test_util.py index ff0b3175cec..d27ea8dc32e 100644 --- a/tests/unittests/test_util.py +++ b/tests/unittests/test_util.py @@ -3,7 +3,6 @@ """Tests for cloudinit.util""" import base64 -import errno import io import json import logging @@ -551,7 +550,7 @@ def test_read_conf_with_config_invalid_jinja_syntax( @mock.patch( M_PATH + "read_conf", - side_effect=(OSError(errno.EACCES, "Not allowed"), {"0": "0"}), + side_effect=(PermissionError("Not allowed"), {"0": "0"}), ) def test_read_conf_d_no_permissions( self, m_read_conf, caplog, capsys, tmpdir @@ -586,7 +585,7 @@ def test_read_conf_d_no_permissions( @mock.patch(M_PATH + "mergemanydict") @mock.patch(M_PATH + "read_conf_d", return_value={"my_config": "foo"}) @mock.patch( - M_PATH + "read_conf", side_effect=OSError(errno.EACCES, "Not allowed") + M_PATH + "read_conf", side_effect=PermissionError("Not allowed") ) def test_read_conf_with_confd_no_permissions( self,