diff --git a/cloudinit/atomic_helper.py b/cloudinit/atomic_helper.py index 8d355b3c2433..91c0dbabfaea 100644 --- a/cloudinit/atomic_helper.py +++ b/cloudinit/atomic_helper.py @@ -7,35 +7,50 @@ import tempfile from base64 import b64decode, b64encode -from cloudinit import util +from cloudinit import log, util _DEF_PERMS = 0o644 LOG = logging.getLogger(__name__) def b64d(source): - # Base64 decode some data, accepting bytes or unicode/str, and returning - # str/unicode if the result is utf-8 compatible, otherwise returning bytes. - decoded = b64decode(source) - try: - return decoded.decode("utf-8") - except UnicodeDecodeError: - return decoded + """base64 decode data + + :param source: a bytes or str to decode + :return: base64 as a decoded str if utf-8 encoded, otherwise bytes + """ + with log.log_time("Base64 decoding"): + decoded = b64decode(source) + try: + return decoded.decode("utf-8") + except UnicodeDecodeError: + return decoded def b64e(source): - # Base64 encode some data, accepting bytes or unicode/str, and returning - # str/unicode if the result is utf-8 compatible, otherwise returning bytes. - if not isinstance(source, bytes): - source = source.encode("utf-8") - return b64encode(source).decode("utf-8") + """base64 encode data + + :param source: a bytes or str to decode + :return: base64 encoded str + """ + with log.log_time("Base64 encodeing"): + if not isinstance(source, bytes): + source = source.encode("utf-8") + return b64encode(source).decode("utf-8") def write_file( filename, content, mode=_DEF_PERMS, omode="wb", preserve_mode=False ): - # open filename in mode 'omode', write content, set permissions to 'mode' + """open filename in mode omode, write content, set permissions to mode""" + + with log.log_time(f"Writing {filename}"): + return _write_file(filename, content, mode, omode, preserve_mode) + +def _write_file( + filename, content, mode=_DEF_PERMS, omode="wb", preserve_mode=False +): if preserve_mode: try: file_stat = os.stat(filename) @@ -77,13 +92,14 @@ def json_serialize_default(_obj): def json_dumps(data): """Return data in nicely formatted json.""" - return json.dumps( - data, - indent=1, - sort_keys=True, - separators=(",", ": "), - default=json_serialize_default, - ) + with log.log_time("Dumping json"): + return json.dumps( + data, + indent=1, + sort_keys=True, + separators=(",", ": "), + default=json_serialize_default, + ) def write_json(filename, data, mode=_DEF_PERMS): diff --git a/cloudinit/cmd/main.py b/cloudinit/cmd/main.py index e5964883e7e3..466c897e74f9 100644 --- a/cloudinit/cmd/main.py +++ b/cloudinit/cmd/main.py @@ -134,12 +134,13 @@ def close_stdin(logger: Callable[[str], None] = LOG.debug): logger: a function for logging messages """ - if not os.isatty(sys.stdin.fileno()): - logger("Closing stdin") - with open(os.devnull) as fp: - os.dup2(fp.fileno(), sys.stdin.fileno()) - else: - logger("Not closing stdin, stdin is a tty.") + with log.log_time("Closing stdin"): + if not os.isatty(sys.stdin.fileno()): + logger("Closing stdin") + with open(os.devnull) as fp: + os.dup2(fp.fileno(), sys.stdin.fileno()) + else: + logger("Not closing stdin, stdin is a tty.") def extract_fns(args): @@ -310,7 +311,7 @@ def purge_cache_on_python_version_change(init): init.paths.get_cpath("data"), "python-version" ) if os.path.exists(python_version_path): - cached_python_version = open(python_version_path).read() + cached_python_version = util.load_text_file(python_version_path) # The Python version has changed out from under us, anything that was # pickled previously is likely useless due to API changes. if cached_python_version != current_python_version: diff --git a/cloudinit/config/cc_growpart.py b/cloudinit/config/cc_growpart.py index 6eee11b7c481..1cedb7bbc269 100644 --- a/cloudinit/config/cc_growpart.py +++ b/cloudinit/config/cc_growpart.py @@ -20,7 +20,7 @@ from pathlib import Path from typing import Optional, Tuple -from cloudinit import lifecycle, subp, temp_utils, util +from cloudinit import lifecycle, log, subp, temp_utils, util from cloudinit.cloud import Cloud from cloudinit.config import Config from cloudinit.config.schema import MetaSchema @@ -318,7 +318,7 @@ def resize_encrypted(blockdev, partition) -> Tuple[str, str]: if not KEYDATA_PATH.exists(): return (RESIZE.SKIPPED, "No encryption keyfile found") try: - with KEYDATA_PATH.open() as f: + with log.log_time(f"Reading {KEYDATA_PATH}"), KEYDATA_PATH.open() as f: keydata = json.load(f) key = keydata["key"] decoded_key = base64.b64decode(key) diff --git a/cloudinit/config/modules.py b/cloudinit/config/modules.py index a82e1ff8e8e3..c55bfbc55f1a 100644 --- a/cloudinit/config/modules.py +++ b/cloudinit/config/modules.py @@ -12,7 +12,7 @@ from types import ModuleType from typing import Dict, List, NamedTuple, Optional -from cloudinit import config, importer, lifecycle, type_utils, util +from cloudinit import config, importer, lifecycle, log, type_utils, util from cloudinit.distros import ALL_DISTROS from cloudinit.helpers import ConfigMerger from cloudinit.reporting.events import ReportEventStack @@ -283,11 +283,15 @@ def _run_modules(self, mostly_mods: List[ModuleDetails]): deprecated_version="23.2", ) func_args.update({"log": LOG}) - ran, _r = cc.run( - run_name, mod.handle, func_args, freq=freq - ) + + timer = log.log_time("and", skip_log=True) + with timer: + ran, _r = cc.run( + run_name, mod.handle, func_args, freq=freq + ) if ran: - myrep.message = "%s ran successfully" % run_name + time = f" {timer.output}" if timer.output else "" + myrep.message = f"{run_name} ran successfully {time}" else: myrep.message = "%s previously ran" % run_name diff --git a/cloudinit/config/schema.py b/cloudinit/config/schema.py index 27caa0bd3065..2530257be4e6 100644 --- a/cloudinit/config/schema.py +++ b/cloudinit/config/schema.py @@ -31,7 +31,7 @@ import yaml -from cloudinit import features, importer, lifecycle, safeyaml +from cloudinit import features, importer, lifecycle, log, safeyaml from cloudinit.cmd.devel import read_cfg_paths from cloudinit.handlers import INCLUSION_TYPES_MAP, type_from_starts_with from cloudinit.helpers import Paths @@ -732,6 +732,27 @@ def validate_cloudconfig_schema( @raises: ValueError on invalid schema_type not in CLOUD_CONFIG or NETWORK_CONFIG_V1 or NETWORK_CONFIG_V2 """ + with log.log_time(f"Validating {schema_type}"): + return _validate_cloudconfig_schema( + config, + schema, + schema_type, + strict, + strict_metaschema, + log_details, + log_deprecations, + ) + + +def _validate_cloudconfig_schema( + config: dict, + schema: Optional[dict] = None, + schema_type: SchemaType = SchemaType.CLOUD_CONFIG, + strict: bool = False, + strict_metaschema: bool = False, + log_details: bool = True, + log_deprecations: bool = False, +) -> bool: from cloudinit.net.netplan import available as netplan_available if schema_type == SchemaType.NETWORK_CONFIG: diff --git a/cloudinit/dmi.py b/cloudinit/dmi.py index 453e8510ab47..9de7ab2a4331 100644 --- a/cloudinit/dmi.py +++ b/cloudinit/dmi.py @@ -5,7 +5,7 @@ from collections import namedtuple from typing import Optional -from cloudinit import subp +from cloudinit import log, subp from cloudinit.util import ( is_container, is_DragonFlyBSD, @@ -91,7 +91,9 @@ def _read_dmi_syspath(key: str) -> Optional[str]: return None try: - with open(dmi_key_path, "rb") as fp: + with log.log_time(f"Reading {dmi_key_path}"), open( + dmi_key_path, "rb" + ) as fp: key_data = fp.read() except PermissionError: LOG.debug("Could not read %s", dmi_key_path) diff --git a/cloudinit/log/log_util.py b/cloudinit/log/log_util.py index 0d01f8eb20a4..d87a13b34a10 100644 --- a/cloudinit/log/log_util.py +++ b/cloudinit/log/log_util.py @@ -3,6 +3,8 @@ import sys import time +LOG = logging.getLogger(__name__) + def logexc( log, msg, *args, log_level: int = logging.WARNING, exc_info=True @@ -57,7 +59,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): def write_to_console(conpath, text): - with open(conpath, "w") as wfh: + with log_time(f"Writing to {conpath}"), open(conpath, "w") as wfh: wfh.write(text) wfh.flush() diff --git a/cloudinit/net/eni.py b/cloudinit/net/eni.py index 59dc395f9229..44ac80af05c7 100644 --- a/cloudinit/net/eni.py +++ b/cloudinit/net/eni.py @@ -7,7 +7,7 @@ import re from typing import Optional -from cloudinit import subp, util +from cloudinit import log, subp, util from cloudinit.net import ParserError, renderer, subnet_is_ipv6 from cloudinit.net.network_state import NetworkState @@ -208,8 +208,7 @@ def _parse_deb_config_data(ifaces, contents, src_dir, src_path): ) ] for entry in dir_contents: - with open(entry, "r") as fp: - src_data = fp.read().strip() + src_data = util.load_text_file(entry).strip() abs_entry = os.path.abspath(entry) _parse_deb_config_data( ifaces, src_data, os.path.dirname(abs_entry), abs_entry @@ -310,9 +309,10 @@ def _parse_deb_config_data(ifaces, contents, src_dir, src_path): def convert_eni_data(eni_data): # return a network config representation of what is in eni_data - ifaces = {} - _parse_deb_config_data(ifaces, eni_data, src_dir=None, src_path=None) - return _ifaces_to_net_config_data(ifaces) + with log.log_time("Converting eni data"): + ifaces = {} + _parse_deb_config_data(ifaces, eni_data, src_dir=None, src_path=None) + return _ifaces_to_net_config_data(ifaces) def _ifaces_to_net_config_data(ifaces): diff --git a/cloudinit/reporting/events.py b/cloudinit/reporting/events.py index fac77435be06..05d6d840e45b 100644 --- a/cloudinit/reporting/events.py +++ b/cloudinit/reporting/events.py @@ -9,10 +9,12 @@ They can be published to registered handlers with report_event. """ import base64 +import logging import os.path import time from typing import List +from cloudinit import log from cloudinit.reporting import ( available_handlers, instantiated_handler_registry, @@ -23,6 +25,7 @@ START_EVENT_TYPE = "start" DEFAULT_EVENT_ORIGIN = "cloudinit" +LOG = logging.getLogger(__name__) class _nameset(set): @@ -301,7 +304,7 @@ def _collect_file_info(files): if not os.path.isfile(fname): content = None else: - with open(fname, "rb") as fp: + with log.log_time(f"Reading {fname}"), open(fname, "rb") as fp: content = base64.b64encode(fp.read()).decode() ret.append({"path": fname, "content": content, "encoding": "base64"}) return ret diff --git a/cloudinit/reporting/handlers.py b/cloudinit/reporting/handlers.py index 80258f3e932f..3ea80f827de2 100644 --- a/cloudinit/reporting/handlers.py +++ b/cloudinit/reporting/handlers.py @@ -14,7 +14,7 @@ from threading import Event from typing import Union -from cloudinit import url_helper, util +from cloudinit import log, url_helper, util from cloudinit.registry import DictRegistry LOG = logging.getLogger(__name__) @@ -310,7 +310,9 @@ def _decode_kvp_item(self, record_data): return {"key": k, "value": v} def _append_kvp_item(self, record_data): - with open(self._kvp_file_path, "ab") as f: + with log.log_time(f"Appending {self._kvp_file_path}"), open( + self._kvp_file_path, "ab" + ) as f: fcntl.flock(f, fcntl.LOCK_EX) for data in record_data: f.write(data) diff --git a/cloudinit/safeyaml.py b/cloudinit/safeyaml.py index a4328068f66a..b448c40abda7 100644 --- a/cloudinit/safeyaml.py +++ b/cloudinit/safeyaml.py @@ -4,12 +4,17 @@ # # This file is part of cloud-init. See LICENSE file for license information. +import logging from collections import defaultdict from itertools import chain from typing import Any, Dict, List, Tuple import yaml +from cloudinit import log + +LOG = logging.getLogger(__name__) + # SchemaPathMarks track the path to an element within a loaded YAML file. # The start_mark and end_mark contain the row and column indicators @@ -250,23 +255,25 @@ def load_with_marks(blob) -> Tuple[Any, Dict[str, int]]: """ - result = yaml.load(blob, Loader=_CustomSafeLoaderWithMarks) - if not isinstance(result, dict): - schemamarks = {} - else: - schemamarks = result.pop("schemamarks") - return result, schemamarks + with log.log_time("Loading yaml"): + result = yaml.load(blob, Loader=_CustomSafeLoaderWithMarks) + if not isinstance(result, dict): + schemamarks = {} + else: + schemamarks = result.pop("schemamarks") + return result, schemamarks def dumps(obj, explicit_start=True, explicit_end=True, noalias=False): """Return data in nicely formatted yaml.""" - return yaml.dump( - obj, - line_break="\n", - indent=4, - explicit_start=explicit_start, - explicit_end=explicit_end, - default_flow_style=False, - Dumper=(NoAliasSafeDumper if noalias else yaml.dumper.SafeDumper), - ) + with log.log_time("Dumping yaml"): + return yaml.dump( + obj, + line_break="\n", + indent=4, + explicit_start=explicit_start, + explicit_end=explicit_end, + default_flow_style=False, + Dumper=(NoAliasSafeDumper if noalias else yaml.dumper.SafeDumper), + ) diff --git a/cloudinit/socket.py b/cloudinit/socket.py index 7ef19f43798a..cdcb0c2adb87 100644 --- a/cloudinit/socket.py +++ b/cloudinit/socket.py @@ -4,9 +4,9 @@ import os import socket import sys -import time from contextlib import suppress +from cloudinit import log from cloudinit.settings import DEFAULT_RUN_DIR LOG = logging.getLogger(__name__) @@ -109,14 +109,14 @@ def __enter__(self): "STATUS=Waiting on external services to " f"complete before starting the {self.stage} stage." ) - start_time = time.monotonic() # block until init system sends us data # the first value returned contains a message from the init system # (should be "start") # the second value contains the path to a unix socket on which to # reply, which is expected to be /path/to/{self.stage}-return.sock sock = self.sockets[self.stage] - chunk, self.remote = sock.recvfrom(5) + with log.log_time(f"Waiting to start stage {self.stage}"): + chunk, self.remote = sock.recvfrom(5) if b"start" != chunk: # The protocol expects to receive a command "start" @@ -130,10 +130,7 @@ def __enter__(self): self.__exit__(None, None, None) raise ValueError(f"Unexpected path to unix socket: {self.remote}") - total = time.monotonic() - start_time - time_msg = f"took {total: .3f}s to " if total > 0.01 else "" sd_notify(f"STATUS=Running ({self.stage} stage)") - LOG.debug("sync(%s): synchronization %scomplete", self.stage, time_msg) return self def __exit__(self, exc_type, exc_val, exc_tb): diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 149bb7545d4e..9e6e3707a8dc 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -19,7 +19,7 @@ import requests -from cloudinit import net, sources, ssh_util, subp, util +from cloudinit import log, net, sources, ssh_util, subp, util from cloudinit.event import EventScope, EventType from cloudinit.net import device_driver from cloudinit.net.dhcp import ( @@ -1995,7 +1995,7 @@ def load_azure_ds_dir(source_dir): if not os.path.isfile(ovf_file): raise NonAzureDataSource("No ovf-env file found") - with open(ovf_file, "rb") as fp: + with log.log_time("Reading ovf-env.xml"), open(ovf_file, "rb") as fp: contents = fp.read() md, ud, cfg = read_azure_ovf(contents) diff --git a/cloudinit/sources/DataSourceVMware.py b/cloudinit/sources/DataSourceVMware.py index 888060c9bd99..b4fdd24403e8 100644 --- a/cloudinit/sources/DataSourceVMware.py +++ b/cloudinit/sources/DataSourceVMware.py @@ -266,9 +266,10 @@ def get_instance_id(self): # read the file /sys/class/dmi/id/product_uuid for the instance ID. if self.metadata and "instance-id" in self.metadata: return self.metadata["instance-id"] - with open(PRODUCT_UUID_FILE_PATH, "r") as id_file: - self.metadata["instance-id"] = str(id_file.read()).rstrip().lower() - return self.metadata["instance-id"] + self.metadata["instance-id"] = ( + util.load_text_file(PRODUCT_UUID_FILE_PATH).rstrip().lower() + ) + return self.metadata["instance-id"] def check_if_fallback_is_allowed(self): if ( diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py index 1493afc5195e..ae6eea1b4fb8 100644 --- a/cloudinit/sources/helpers/azure.py +++ b/cloudinit/sources/helpers/azure.py @@ -457,7 +457,9 @@ def generate_certificate(self): ] ) certificate = "" - for line in open(self.certificate_names["certificate"]): + for line in util.load_text_file( + self.certificate_names["certificate"] + ).split("\n"): if "CERTIFICATE" not in line: certificate += line.rstrip() self.certificate = certificate diff --git a/cloudinit/sources/helpers/vmware/imc/guestcust_util.py b/cloudinit/sources/helpers/vmware/imc/guestcust_util.py index b7fa5e4a85e0..4a78cbc17d33 100644 --- a/cloudinit/sources/helpers/vmware/imc/guestcust_util.py +++ b/cloudinit/sources/helpers/vmware/imc/guestcust_util.py @@ -13,7 +13,7 @@ import yaml -from cloudinit import subp, util +from cloudinit import log, subp, util from .config import Config from .config_custom_script import PostCustomScript, PreCustomScript @@ -87,7 +87,9 @@ def get_nics_to_enable(nicsfilepath): if not os.path.exists(nicsfilepath): return None - with open(nicsfilepath, "r") as fp: + with log.log_time(f"Reading {nicsfilepath}"), open( + nicsfilepath, "r" + ) as fp: nics = fp.read(NICS_SIZE) return nics diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py index f7661929e49f..4ecbd0a587f9 100644 --- a/cloudinit/ssh_util.py +++ b/cloudinit/ssh_util.py @@ -546,10 +546,9 @@ def parse_ssh_config_map(fname): def _includes_dconf(fname: str) -> bool: if not os.path.isfile(fname): return False - with open(fname, "r") as f: - for line in f: - if line.startswith(f"Include {fname}.d/*.conf"): - return True + for line in util.load_text_file(fname).split("\n"): + if line.startswith(f"Include {fname}.d/*.conf"): + return True return False diff --git a/cloudinit/subp.py b/cloudinit/subp.py index 44307343937b..199970ac52e9 100644 --- a/cloudinit/subp.py +++ b/cloudinit/subp.py @@ -5,11 +5,12 @@ import logging import os import subprocess -import time from errno import ENOEXEC from io import TextIOWrapper from typing import List, Optional, Union +from cloudinit import log + LOG = logging.getLogger(__name__) SubpResult = collections.namedtuple("SubpResult", ["stdout", "stderr"]) @@ -256,24 +257,19 @@ def subp( x if isinstance(x, bytes) else x.encode("utf-8") for x in args ] try: - before = time.monotonic() - sp = subprocess.Popen( - bytes_args, - stdout=stdout, - stderr=stderr, - stdin=stdin, - env=env, - shell=shell, - cwd=cwd, - ) - out, err = sp.communicate(data, timeout=timeout) - total = time.monotonic() - before - if total > 0.1: - LOG.debug( - "%s took %.3ss to run", - logstring if logstring else args, - total, + with log.log_time( + "Running {}".format(logstring if logstring else args) + ): + sp = subprocess.Popen( + bytes_args, + stdout=stdout, + stderr=stderr, + stdin=stdin, + env=env, + shell=shell, + cwd=cwd, ) + out, err = sp.communicate(data, timeout=timeout) except OSError as e: raise ProcessExecutionError( cmd=args, diff --git a/cloudinit/templater.py b/cloudinit/templater.py index 8d83345fbac5..4b70a78a9246 100644 --- a/cloudinit/templater.py +++ b/cloudinit/templater.py @@ -20,6 +20,7 @@ from jinja2 import TemplateSyntaxError +from cloudinit import log from cloudinit import type_utils as tu from cloudinit import util from cloudinit.atomic_helper import write_file @@ -138,7 +139,8 @@ def replacer(match): ) return str(selected_params[key]) - return BASIC_MATCHER.sub(replacer, content) + with log.log_time("Rendering basic"): + return BASIC_MATCHER.sub(replacer, content) def detect_template(text): @@ -146,15 +148,16 @@ def jinja_render(content, params): # keep_trailing_newline is in jinja2 2.7+, not 2.6 add = "\n" if content.endswith("\n") else "" try: - return ( - JTemplate( - content, - undefined=UndefinedJinjaVariable, - trim_blocks=True, - extensions=["jinja2.ext.do"], - ).render(**params) - + add - ) + with log.log_time("Rendering jinja2"): + return ( + JTemplate( + content, + undefined=UndefinedJinjaVariable, + trim_blocks=True, + extensions=["jinja2.ext.do"], + ).render(**params) + + add + ) except TemplateSyntaxError as template_syntax_error: template_syntax_error.lineno += 1 raise JinjaSyntaxParsingException( @@ -212,8 +215,7 @@ def render_string(content, params): def render_template(variant, template, output, is_yaml, prefix=None): - with open(template, "r") as fh: - contents = fh.read() + contents = util.load_text_file(template) tpl_params = {"variant": variant, "prefix": prefix} contents = (render_string(contents, tpl_params)).rstrip() + "\n" if is_yaml: diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py index f404d1130bdb..a674fe8a7956 100644 --- a/cloudinit/url_helper.py +++ b/cloudinit/url_helper.py @@ -28,7 +28,7 @@ import requests from requests import exceptions -from cloudinit import util, version +from cloudinit import log, util, version LOG = logging.getLogger(__name__) @@ -894,7 +894,9 @@ def __init__( def read_skew_file(self): if self.skew_data_file and os.path.isfile(self.skew_data_file): - with open(self.skew_data_file, mode="r") as fp: + with log.log_time(f"Reading {self.skew_data_file}"), open( + self.skew_data_file, mode="r" + ) as fp: return json.load(fp) return None @@ -906,7 +908,9 @@ def update_skew_file(self, host, value): if cur is None: cur = {} cur[host] = value - with open(self.skew_data_file, mode="w") as fp: + with log.log_time(f"Writing {self.skew_data_file}"), open( + self.skew_data_file, mode="w" + ) as fp: fp.write(json.dumps(cur)) def exception_cb(self, msg, exception): diff --git a/cloudinit/util.py b/cloudinit/util.py index 3846e01e3021..eccd54f2db53 100644 --- a/cloudinit/util.py +++ b/cloudinit/util.py @@ -62,6 +62,7 @@ from cloudinit import ( features, importer, + log, mergers, net, settings, @@ -154,12 +155,13 @@ def maybe_b64decode(data: bytes) -> bytes: @param data: data as bytes. TypeError is raised if not bytes. """ - if not isinstance(data, bytes): - raise TypeError("data is '%s', expected bytes" % type(data)) - try: - return b64decode(data, validate=True) - except binascii.Error: - return data + with log.log_time("Base64 decoding"): + if not isinstance(data, bytes): + raise TypeError("data is '%s', expected bytes" % type(data)) + try: + return b64decode(data, validate=True) + except binascii.Error: + return data def fully_decoded_payload(part): @@ -1558,16 +1560,18 @@ def load_binary_file( read_cb: Optional[Callable[[int], None]] = None, quiet: bool = False, ) -> bytes: - LOG.debug("Reading from %s (quiet=%s)", fname, quiet) - with io.BytesIO() as ofh: - try: - with open(fname, "rb") as ifh: - pipe_in_out(ifh, ofh, chunk_cb=read_cb) - except FileNotFoundError: - if not quiet: - raise - contents = ofh.getvalue() - LOG.debug("Read %s bytes from %s", len(contents), fname) + timer = log.log_time("", skip_log=True) + with timer: + LOG.debug("Reading from %s (quiet=%s)", fname, quiet) + with io.BytesIO() as ofh: + try: + with open(fname, "rb") as ifh: + pipe_in_out(ifh, ofh, chunk_cb=read_cb) + except FileNotFoundError: + if not quiet: + raise + contents = ofh.getvalue() + LOG.debug("Reading %s bytes from %s%s", len(contents), fname, timer.output) return contents @@ -1849,14 +1853,15 @@ def ensure_dirs(dirlist, mode=0o755): def load_json(text, root_types=(dict,)): - decoded = json.loads(decode_binary(text)) - if not isinstance(decoded, tuple(root_types)): - expected_types = ", ".join([str(t) for t in root_types]) - raise TypeError( - "(%s) root types expected, got %s instead" - % (expected_types, type(decoded)) - ) - return decoded + with log.log_time("Loading json"): + decoded = json.loads(decode_binary(text)) + if not isinstance(decoded, tuple(root_types)): + expected_types = ", ".join([str(t) for t in root_types]) + raise TypeError( + "(%s) root types expected, got %s instead" + % (expected_types, type(decoded)) + ) + return decoded def get_non_exist_parent_dir(path): @@ -2242,9 +2247,9 @@ def write_file( content, mode=0o644, omode="wb", - preserve_mode=False, + preserve_mode: bool = False, *, - ensure_dir_exists=True, + ensure_dir_exists: bool = True, user=None, group=None, ): @@ -2265,37 +2270,38 @@ def write_file( @param group: The group to set on the file. """ - if preserve_mode: - try: - mode = get_permissions(filename) - except OSError: - pass + with log.log_time(f"Writing file {filename}"): + if preserve_mode: + try: + mode = get_permissions(filename) + except OSError: + pass - if ensure_dir_exists: - ensure_dir(os.path.dirname(filename), user=user, group=group) - if "b" in omode.lower(): - content = encode_text(content) - write_type = "bytes" - else: - content = decode_binary(content) - write_type = "characters" - try: - mode_r = "%o" % mode - except TypeError: - mode_r = "%r" % mode - LOG.debug( - "Writing to %s - %s: [%s] %s %s", - filename, - omode, - mode_r, - len(content), - write_type, - ) - with SeLinuxGuard(path=filename): - with open(filename, omode) as fh: - fh.write(content) - fh.flush() - chmod(filename, mode) + if ensure_dir_exists: + ensure_dir(os.path.dirname(filename), user=user, group=group) + if "b" in omode.lower(): + content = encode_text(content) + write_type = "bytes" + else: + content = decode_binary(content) + write_type = "characters" + try: + mode_r = "%o" % mode + except TypeError: + mode_r = "%r" % mode + LOG.debug( + "Writing to %s - %s: [%s] %s %s", + filename, + omode, + mode_r, + len(content), + write_type, + ) + with SeLinuxGuard(path=filename): + with open(filename, omode) as fh: + fh.write(content) + fh.flush() + chmod(filename, mode) def delete_dir_contents(dirname): diff --git a/tests/unittests/config/test_schema.py b/tests/unittests/config/test_schema.py index 590e0899d54e..df5692fa4f70 100644 --- a/tests/unittests/config/test_schema.py +++ b/tests/unittests/config/test_schema.py @@ -20,7 +20,7 @@ import pytest import yaml -from cloudinit import features +from cloudinit import features, log from cloudinit.config.schema import ( VERSIONED_USERDATA_SCHEMA_FILE, MetaSchema, @@ -815,7 +815,9 @@ def test_validateconfig_strict_metaschema_do_not_raise_exception( def test_validateconfig_logs_deprecations( self, schema, config, expected_msg, log_deprecations, caplog ): - with mock.patch.object(features, "DEPRECATION_INFO_BOUNDARY", "devel"): + with mock.patch.object( + features, "DEPRECATION_INFO_BOUNDARY", "devel" + ), mock.patch.object(log, "log_time"): validate_cloudconfig_schema( config, schema=schema,