Skip to content
Snippets Groups Projects

Restructure log location

Merged Ashwin Kumar Karnad requested to merge restructure-log-location into main
Compare and Show latest version
2 files
+ 198
39
Compare changes
  • Side-by-side
  • Inline
Files
2
+ 188
33
@@ -82,7 +82,7 @@ def create_log_file_names(
toolchain : str
toolchain name (only for build log file)
returns : tuple
tuple containing the strings of installer and build log file names
tuple containing installer_log_file, build_log_file, file names
"""
installer_log_file = config_vars["installer_log_template"].substitute(
mpsd_release=mpsd_release,
@@ -156,7 +156,7 @@ def set_up_logging(loglevel="warning", filename=None):
This function sets up the logging configuration for the script.
It configures the log level, log format, and log handlers
for both file and console output.
for both file and console(=shell) output.
Parameters
@@ -170,34 +170,121 @@ def set_up_logging(loglevel="warning", filename=None):
- filename to save logging messages into
If loglevel is 'debug', save line numbers in log messages.
Returns
-------
None.
Logger instances are generally not passed around, but retrieved from the
logging module as shown below (they are singletons).
We provide two loggers:
1. log = logging.getLogger('')
This is the 'root' logger. It uses a RichHandler if rich is available for
output to the shell, otherwise plain text.
Typical use:
log.debug("...")
log.info("...")
log.warn("...")
Equivalent to
logging.debug("...")
logging.info("...")
2. print_log = logging.getlogger('print')
This uses the logging module to issue the message, but prints without
any further markup (i.e. no date, loglevel, line number, etc). Think
PRINT via the LOGging module.
We use this as a replacement for the print function (i.e. for messages
that should not be affected by logging levels, and which should always
be printed).
Typical and intended use:
print_log.info("Available toolchains are ...")
The major difference from the normal print command is that the output
will be send to the stdout (as for print) AND the file with name
filename, so that these messages appear in the log file together with
normal log output.
"""
# convert loglevel string into loglevel as number
log_level_numeric = getattr(logging, loglevel.upper(), logging.WARNING)
assert log_level_numeric
if not isinstance(log_level_numeric, int):
raise ValueError("Invalid log level: %s" % loglevel)
handlers = []
if filename:
handlers.append(logging.FileHandler(filename))
# set up the main logger ("root" logger)
logger = logging.getLogger("")
# - "logger" logs everything
# - we use loglevel at handler level to write everything to file
# - and filter using log_level_numeric (as the user provides) to
# send logging messages to the console
logger.setLevel(0)
# the handler determines where the logs go: stdout/file
if rich_available:
# set up logging as recommended for rich, see
# https://rich.readthedocs.io/en/stable/logging.html
handlers.append(rich.logging.RichHandler())
shell_handler = rich.logging.RichHandler()
# rich handler provides metadata automatically:
logging_format = "%(message)s"
else: # rich not available, define our own output
# for shell output, only show time (not date and time)
shell_formatter = logging.Formatter(logging_format, datefmt="[%X]")
else:
shell_handler = logging.StreamHandler()
# include line numbers in output if level is DEBUG
linenumbers = " %(lineno)4d" if log_level_numeric == logging.DEBUG else ""
handlers.append(logging.StreamHandler())
logging_format = "%(asctime)s %(levelname)7s" + linenumbers + " | %(message)s"
shell_formatter = logging.Formatter(logging_format)
logging.basicConfig(
level=log_level_numeric,
format=logging_format,
datefmt="[%X]",
handlers=handlers,
force=True,
)
# here we hook everything together
shell_handler.setFormatter(shell_formatter)
# use the log_level_numeric to decide how much logging is sent to shell
shell_handler.setLevel(log_level_numeric)
logger.addHandler(shell_handler)
# if filename provided, write log messages to that file, too.
if filename:
file_handler = logging.FileHandler(filename)
# if we have a file, we write all information in there.
# We could change the level, for example restrict to only DEBUG and above with
# file_handler.setLevel(logging.DEBUG)
file_logging_format = "%(asctime)s %(levelname)7s %(lineno)4d | %(message)s"
file_formatter = logging.Formatter(file_logging_format, datefmt="[%X]")
file_handler.setFormatter(file_formatter)
logger.addHandler(file_handler)
#
# new logger for printing
#
print_log = logging.getLogger("print")
print_log.setLevel(logging.INFO)
print_log.propagate = False
# create formatter 'empty' formatter
formatter = logging.Formatter("%(message)s")
# create, format and add handler for shell output
ch = logging.StreamHandler()
ch.setFormatter(formatter)
print_log.addHandler(ch)
# if filename provided, write output of print_log to that file, too
if filename:
# create, format and add file handler
fh = logging.FileHandler(filename)
fh.setFormatter(formatter)
print_log.addHandler(fh)
#
# short message
#
logging.debug(
f"Logging has been setup, loglevel={loglevel.upper()}"
+ f"{filename=} {rich_available=}"
@@ -531,6 +618,61 @@ def prepare_environment(mpsd_release: str, script_dir: Path) -> List[str]:
return available_toolchains
def get_native_microarchitecture():
"""Return native microarchitecture.
On MPSD machines, there should be an environment variable "MPSD_MICROARCH".
We try to read that. If it fails, we use the 'archspec cpu' command.
If that fails, we ask the user to install it.
Returns
-------
MPSD_MICROARCH : str
Example
-------
>>> get_native_microarchitecture()
'haswell'
"""
# attempt to get MICRO_ARCH from environment variable (should work on
# MPSD_HPC and MPSD linux laptops). If not defined, return
# "UNKNOWN_MICROARCH"
microarch = os.environ.get("MPSD_MICROARCH", "UNKNOWN_MICROARCH")
# if we have not found the microarchitecture environment variable,
# try calling archspec
if microarch == "UNKNOWN_MICROARCH":
logging.debug(
"Couldn't find MPSD_MICROARCH environment variable. Will try archspec."
)
try:
process = run(["archspec", "cpu"], stdout=subprocess.PIPE, text=True)
except FileNotFoundError as e:
logging.debug(f"Call of 'archspec cpu' failed: {e=}")
# Presumably 'archspec' is not installed.
msg = "Please install archspec, for example via 'pipx install archspec'.\n"
msg += "The command we need to execute is 'archspec cpu'.\n"
msg += "Documentation of package: https://archspec.readthedocs.io/"
logging.error(msg)
sys.exit(1)
else: # we have found archspec and executed it
if process.returncode == 0: # sanity check
microarch = process.stdout.strip()
logging.debug(
f"Found microarchitecture from 'archspec cpu' to be '{microarch}'"
)
assert len(microarch) > 0 # sanity check
else:
raise ValueError(
f"Some error occurred when calling 'archspec cpu': {process=}"
)
# at this point, we have determined the microarchitecture
log_metadata("microarchitecture", microarch)
return microarch
def install_environment(
mpsd_release: str,
toolchains: List[str],
@@ -576,7 +718,7 @@ def install_environment(
# Set required variables
release_base_dir = script_dir / mpsd_release
mpsd_microarch = get_native_microarchitecture()
toolchain_dir = release_base_dir / mpsd_microarch
toolchain_dir.mkdir(parents=True, exist_ok=True)
spack_setup_script = release_base_dir / "spack-environments" / "spack_setup.sh"
@@ -596,13 +738,16 @@ def install_environment(
"No toolchains requested. Available toolchains for release "
f"{mpsd_release} are: \n {available_toolchains}"
)
print_log = logging.getLogger("print")
print_log.info(f"{available_toolchains=}")
return
for toolchain in toolchains:
if toolchain not in available_toolchains:
raise ValueError(
f"Toolchain '{toolchain}' is not available in release {mpsd_release}."
)
# TODO: add to message how toolchains can be found
msg = f"Toolchain '{toolchain}' is not available in release {mpsd_release}."
logging.error(msg)
sys.exit(1)
# Install the toolchains
with os_chdir(toolchain_dir):
@@ -611,9 +756,15 @@ def install_environment(
if not os.path.exists("logs"):
os.mkdir("logs")
for toolchain in toolchains:
# Set the install log file name to config_vars["install_log_file"]
# and replace _toolchains_ with the toolchain name and
# _mpsd_spack_ver_ with mpsd_release
# Set the install log file name from create_log_file_names
_, build_log_file_name = create_log_file_names(
mpsd_release, mpsd_microarch, "install", toolchain=toolchain
)
build_log_folder = release_base_dir / "logs"
build_log_file = build_log_folder / build_log_file_name
# if logs folder dosent exist, create it
if not os.path.exists(build_log_folder):
os.makedirs(build_log_folder)
logging.info(f"Installing toolchain {toolchain} to {toolchain_dir}")
@@ -621,7 +772,7 @@ def install_environment(
setup_log_cmd(
mpsd_release,
script_dir,
msg=f"installing {toolchain} and logging at {install_log_file}",
msg=f"installing {toolchain} and logging at {build_log_file}",
)
setup_log_cmd(
mpsd_release,
@@ -633,7 +784,7 @@ def install_environment(
)
run(
f"bash {spack_setup_script} {' '.join(install_flags)} {toolchain} 2>&1 "
f"| tee -a {install_log_file} ",
f"| tee -a {build_log_file} ",
shell=True,
check=True,
)
@@ -736,8 +887,8 @@ def main():
# Get machine configs
os.environ.get("MPSD_OS", "UNKNOWN_OS")
mpsd_microarch = os.environ.get("MPSD_MICROARCH", "UNKNOWN_MICROARCH")
# release `dev` in script_dir/dev-23a
mpsd_microarch = get_native_microarchitecture()
# target dir is the place where this script exists. the
script_dir = Path(os.path.dirname(os.path.realpath(__file__)))
mpsd_release = args.release
# parse logging first
@@ -745,14 +896,18 @@ def main():
installer_log_name, build_log_name = create_log_file_names(
mpsd_release=mpsd_release, mpsd_microarch=mpsd_microarch, action=args.action
)
installer_log_file = (
script_dir / mpsd_release / mpsd_microarch / "logs" / installer_log_name
)
log_folder = script_dir / mpsd_release / mpsd_microarch / "logs"
# if the log_folder dosent exist, dont log this message if
# the command is a info-only command
if args.action not in ["status", "available"]:
if not os.path.exists(log_folder):
os.makedirs(log_folder)
installer_log_file = log_folder / installer_log_name
else:
installer_log_file = None
set_up_logging(args.loglevel, installer_log_file)
# target dir is the place where this script exists. the
# Check the command and run related function
if args.action == "remove":
remove_environment(args.release, args.toolchains, script_dir)
Loading