Skip to content
Snippets Groups Projects

Restructure log location

Merged Ashwin Kumar Karnad requested to merge restructure-log-location into main
Compare and Show latest version
2 files
+ 246
51
Compare changes
  • Side-by-side
  • Inline
Files
2
+ 206
42
@@ -82,7 +82,7 @@ def create_log_file_names(
@@ -82,7 +82,7 @@ def create_log_file_names(
toolchain : str
toolchain : str
toolchain name (only for build log file)
toolchain name (only for build log file)
returns : tuple
returns : tuple
tuple containing the strings of installer and build log file names
tuple containing installer_log_file, build_log_file, file names
"""
"""
installer_log_file = config_vars["installer_log_template"].substitute(
installer_log_file = config_vars["installer_log_template"].substitute(
mpsd_release=mpsd_release,
mpsd_release=mpsd_release,
@@ -151,12 +151,34 @@ def read_metadata_from_logfile(logfile: Union[str, Path]) -> dict:
@@ -151,12 +151,34 @@ def read_metadata_from_logfile(logfile: Union[str, Path]) -> dict:
}
}
 
def get_installer_log_file(mpsd_release: str, cmd: str, script_dir: str) -> str:
 
"""Get installer log file name."""
 
# Get machine configs
 
os.environ.get("MPSD_OS", "UNKNOWN_OS")
 
mpsd_microarch = get_native_microarchitecture()
 
# parse logging first
 
# decide the log_file_name
 
installer_log_name, build_log_name = create_log_file_names(
 
mpsd_release=mpsd_release, mpsd_microarch=mpsd_microarch, action=cmd
 
)
 
log_folder = script_dir / mpsd_release / "logs"
 
# if the log_folder dosent exist, dont log this message if
 
# the command is a info-only command
 
if cmd not in ["status", "available"]:
 
if not os.path.exists(log_folder):
 
os.makedirs(log_folder)
 
installer_log_file = log_folder / installer_log_name
 
else:
 
installer_log_file = None
 
return installer_log_file
 
 
def set_up_logging(loglevel="warning", filename=None):
def set_up_logging(loglevel="warning", filename=None):
"""Set up logging.
"""Set up logging.
This function sets up the logging configuration for the script.
This function sets up the logging configuration for the script.
It configures the log level, log format, and log handlers
It configures the log level, log format, and log handlers
for both file and console output.
for both file and console(=shell) output.
Parameters
Parameters
@@ -170,36 +192,123 @@ def set_up_logging(loglevel="warning", filename=None):
@@ -170,36 +192,123 @@ def set_up_logging(loglevel="warning", filename=None):
- filename to save logging messages into
- filename to save logging messages into
If loglevel is 'debug', save line numbers in log messages.
If loglevel is 'debug', save line numbers in log messages.
 
 
Returns
 
-------
 
None.
 
 
Logger instances are generally not passed around, but retrieved from the
 
logging module as shown below (they are singletons).
 
 
We provide two loggers:
 
 
1. log = logging.getLogger('')
 
 
This is the 'root' logger. It uses a RichHandler if rich is available for
 
output to the shell, otherwise plain text.
 
 
Typical use:
 
 
log.debug("...")
 
log.info("...")
 
log.warn("...")
 
 
Equivalent to
 
 
logging.debug("...")
 
logging.info("...")
 
 
2. print_log = logging.getlogger('print')
 
 
This uses the logging module to issue the message, but prints without
 
any further markup (i.e. no date, loglevel, line number, etc). Think
 
PRINT via the LOGging module.
 
 
We use this as a replacement for the print function (i.e. for messages
 
that should not be affected by logging levels, and which should always
 
be printed).
 
 
Typical and intended use:
 
 
print_log.info("Available toolchains are ...")
 
 
The major difference from the normal print command is that the output
 
will be send to the stdout (as for print) AND the file with name
 
filename, so that these messages appear in the log file together with
 
normal log output.
 
"""
"""
 
# convert loglevel string into loglevel as number
log_level_numeric = getattr(logging, loglevel.upper(), logging.WARNING)
log_level_numeric = getattr(logging, loglevel.upper(), logging.WARNING)
assert log_level_numeric
if not isinstance(log_level_numeric, int):
if not isinstance(log_level_numeric, int):
raise ValueError("Invalid log level: %s" % loglevel)
raise ValueError("Invalid log level: %s" % loglevel)
handlers = []
# set up the main logger ("root" logger)
if filename:
logger = logging.getLogger("")
handlers.append(logging.FileHandler(filename))
# - "logger" logs everything
 
# - we use loglevel at handler level to write everything to file
 
# - and filter using log_level_numeric (as the user provides) to
 
# send logging messages to the console
 
logger.setLevel(0)
 
# the handler determines where the logs go: stdout/file
if rich_available:
if rich_available:
# set up logging as recommended for rich, see
# https://rich.readthedocs.io/en/stable/logging.html
# https://rich.readthedocs.io/en/stable/logging.html
handlers.append(rich.logging.RichHandler())
shell_handler = rich.logging.RichHandler()
 
# rich handler provides metadata automatically:
logging_format = "%(message)s"
logging_format = "%(message)s"
else: # rich not available, define our own output
# for shell output, only show time (not date and time)
 
shell_formatter = logging.Formatter(logging_format, datefmt="[%X]")
 
else:
 
shell_handler = logging.StreamHandler()
# include line numbers in output if level is DEBUG
# include line numbers in output if level is DEBUG
linenumbers = " %(lineno)4d" if log_level_numeric == logging.DEBUG else ""
linenumbers = " %(lineno)4d" if log_level_numeric == logging.DEBUG else ""
handlers.append(logging.StreamHandler())
logging_format = "%(asctime)s %(levelname)7s" + linenumbers + " | %(message)s"
logging_format = "%(asctime)s %(levelname)7s" + linenumbers + " | %(message)s"
 
shell_formatter = logging.Formatter(logging_format)
logging.basicConfig(
# here we hook everything together
level=log_level_numeric,
shell_handler.setFormatter(shell_formatter)
format=logging_format,
# use the log_level_numeric to decide how much logging is sent to shell
datefmt="[%X]",
shell_handler.setLevel(log_level_numeric)
handlers=handlers,
logger.addHandler(shell_handler)
force=True,
)
# if filename provided, write log messages to that file, too.
 
if filename:
 
file_handler = logging.FileHandler(filename)
 
# if we have a file, we write all information in there.
 
# We could change the level, for example restrict to only DEBUG and above with
 
# file_handler.setLevel(logging.DEBUG)
 
file_logging_format = "%(asctime)s %(levelname)7s %(lineno)4d | %(message)s"
 
file_formatter = logging.Formatter(file_logging_format, datefmt="[%X]")
 
file_handler.setFormatter(file_formatter)
 
logger.addHandler(file_handler)
 
 
#
 
# new logger for printing
 
#
 
print_log = logging.getLogger("print")
 
print_log.setLevel(logging.INFO)
 
print_log.propagate = False
 
# create formatter 'empty' formatter
 
formatter = logging.Formatter("%(message)s")
 
 
# create, format and add handler for shell output
 
ch = logging.StreamHandler()
 
ch.setFormatter(formatter)
 
print_log.addHandler(ch)
 
 
# if filename provided, write output of print_log to that file, too
 
if filename:
 
# create, format and add file handler
 
fh = logging.FileHandler(filename)
 
fh.setFormatter(formatter)
 
print_log.addHandler(fh)
 
 
#
 
# short message
 
#
logging.debug(
logging.debug(
f"Logging has been setup, loglevel={loglevel.upper()}"
f"Logging has been setup, loglevel={loglevel.upper()} "
+ f"{filename=} {rich_available=}"
+ f"{filename=} {rich_available=}"
)
)
@@ -531,6 +640,61 @@ def prepare_environment(mpsd_release: str, script_dir: Path) -> List[str]:
@@ -531,6 +640,61 @@ def prepare_environment(mpsd_release: str, script_dir: Path) -> List[str]:
return available_toolchains
return available_toolchains
 
def get_native_microarchitecture():
 
"""Return native microarchitecture.
 
 
On MPSD machines, there should be an environment variable "MPSD_MICROARCH".
 
We try to read that. If it fails, we use the 'archspec cpu' command.
 
If that fails, we ask the user to install it.
 
 
Returns
 
-------
 
MPSD_MICROARCH : str
 
 
Example
 
-------
 
>>> get_native_microarchitecture()
 
'haswell'
 
"""
 
# attempt to get MICRO_ARCH from environment variable (should work on
 
# MPSD_HPC and MPSD linux laptops). If not defined, return
 
# "UNKNOWN_MICROARCH"
 
microarch = os.environ.get("MPSD_MICROARCH", "UNKNOWN_MICROARCH")
 
 
# if we have not found the microarchitecture environment variable,
 
# try calling archspec
 
if microarch == "UNKNOWN_MICROARCH":
 
logging.debug(
 
"Couldn't find MPSD_MICROARCH environment variable. Will try archspec."
 
)
 
try:
 
process = run(["archspec", "cpu"], stdout=subprocess.PIPE, text=True)
 
except FileNotFoundError as e:
 
logging.debug(f"Call of 'archspec cpu' failed: {e=}")
 
# Presumably 'archspec' is not installed.
 
msg = "Please install archspec, for example via 'pipx install archspec'.\n"
 
msg += "The command we need to execute is 'archspec cpu'.\n"
 
msg += "Documentation of package: https://archspec.readthedocs.io/"
 
 
logging.error(msg)
 
sys.exit(1)
 
else: # we have found archspec and executed it
 
if process.returncode == 0: # sanity check
 
microarch = process.stdout.strip()
 
logging.debug(
 
f"Found microarchitecture from 'archspec cpu' to be '{microarch}'"
 
)
 
assert len(microarch) > 0 # sanity check
 
else:
 
raise ValueError(
 
f"Some error occurred when calling 'archspec cpu': {process=}"
 
)
 
 
# at this point, we have determined the microarchitecture
 
log_metadata("microarchitecture", microarch)
 
return microarch
 
 
def install_environment(
def install_environment(
mpsd_release: str,
mpsd_release: str,
toolchains: List[str],
toolchains: List[str],
@@ -576,7 +740,7 @@ def install_environment(
@@ -576,7 +740,7 @@ def install_environment(
# Set required variables
# Set required variables
release_base_dir = script_dir / mpsd_release
release_base_dir = script_dir / mpsd_release
mpsd_microarch = get_native_microarchitecture()
toolchain_dir = release_base_dir / mpsd_microarch
toolchain_dir = release_base_dir / mpsd_microarch
toolchain_dir.mkdir(parents=True, exist_ok=True)
toolchain_dir.mkdir(parents=True, exist_ok=True)
spack_setup_script = release_base_dir / "spack-environments" / "spack_setup.sh"
spack_setup_script = release_base_dir / "spack-environments" / "spack_setup.sh"
@@ -596,13 +760,16 @@ def install_environment(
@@ -596,13 +760,16 @@ def install_environment(
"No toolchains requested. Available toolchains for release "
"No toolchains requested. Available toolchains for release "
f"{mpsd_release} are: \n {available_toolchains}"
f"{mpsd_release} are: \n {available_toolchains}"
)
)
 
print_log = logging.getLogger("print")
 
print_log.info(f"{available_toolchains=}")
return
return
for toolchain in toolchains:
for toolchain in toolchains:
if toolchain not in available_toolchains:
if toolchain not in available_toolchains:
raise ValueError(
# TODO: add to message how toolchains can be found
f"Toolchain '{toolchain}' is not available in release {mpsd_release}."
msg = f"Toolchain '{toolchain}' is not available in release {mpsd_release}."
)
logging.error(msg)
 
sys.exit(1)
# Install the toolchains
# Install the toolchains
with os_chdir(toolchain_dir):
with os_chdir(toolchain_dir):
@@ -611,9 +778,15 @@ def install_environment(
@@ -611,9 +778,15 @@ def install_environment(
if not os.path.exists("logs"):
if not os.path.exists("logs"):
os.mkdir("logs")
os.mkdir("logs")
for toolchain in toolchains:
for toolchain in toolchains:
# Set the install log file name to config_vars["install_log_file"]
# Set the install log file name from create_log_file_names
# and replace _toolchains_ with the toolchain name and
_, build_log_file_name = create_log_file_names(
# _mpsd_spack_ver_ with mpsd_release
mpsd_release, mpsd_microarch, "install", toolchain=toolchain
 
)
 
build_log_folder = release_base_dir / "logs"
 
build_log_file = build_log_folder / build_log_file_name
 
# if logs folder dosent exist, create it
 
if not os.path.exists(build_log_folder):
 
os.makedirs(build_log_folder)
logging.info(f"Installing toolchain {toolchain} to {toolchain_dir}")
logging.info(f"Installing toolchain {toolchain} to {toolchain_dir}")
@@ -621,19 +794,19 @@ def install_environment(
@@ -621,19 +794,19 @@ def install_environment(
setup_log_cmd(
setup_log_cmd(
mpsd_release,
mpsd_release,
script_dir,
script_dir,
msg=f"installing {toolchain} and logging at {install_log_file}",
msg=f"installing {toolchain} and logging at {build_log_file}",
)
)
setup_log_cmd(
setup_log_cmd(
mpsd_release,
mpsd_release,
script_dir,
script_dir,
msg=(
msg=(
f"CMD: bash {spack_setup_script} {' '.join(install_flags)}"
f"CMD: bash {spack_setup_script} {' '.join(install_flags)} "
"{toolchain}"
f"{toolchain}"
),
),
)
)
run(
run(
f"bash {spack_setup_script} {' '.join(install_flags)} {toolchain} 2>&1 "
f"bash {spack_setup_script} {' '.join(install_flags)} {toolchain} 2>&1 "
f"| tee -a {install_log_file} ",
f"| tee -a {build_log_file} ",
shell=True,
shell=True,
check=True,
check=True,
)
)
@@ -734,22 +907,13 @@ def main():
@@ -734,22 +907,13 @@ def main():
# Carry out the action
# Carry out the action
args = parser.parse_args()
args = parser.parse_args()
# Get machine configs
os.environ.get("MPSD_OS", "UNKNOWN_OS")
os.environ.get("MPSD_MICROARCH", "UNKNOWN_MICROARCH")
# parse logging first
# decide the log_file_name
(
config_vars["build_log_file"]
.replace("mpsd_spack_ver_", f"{mpsd_release}_")
.replace("_toolchains_", f"_{toolchain}_")
)
set_up_logging(args.loglevel)
# target dir is the place where this script exists. the
# target dir is the place where this script exists. the
# release `dev` in script_dir/dev-23a
script_dir = Path(os.path.dirname(os.path.realpath(__file__)))
script_dir = Path(os.path.dirname(os.path.realpath(__file__)))
 
set_up_logging(
 
args.loglevel, get_installer_log_file(args.release, args.action, script_dir)
 
)
 
# Check the command and run related function
# Check the command and run related function
if args.action == "remove":
if args.action == "remove":
remove_environment(args.release, args.toolchains, script_dir)
remove_environment(args.release, args.toolchains, script_dir)
Loading