Skip to content

Commit

Permalink
Merge pull request NCAR#127 from TeaganKing/logging
Browse files Browse the repository at this point in the history
[WIP] Logging
  • Loading branch information
TeaganKing authored Sep 19, 2024
2 parents c8c336f + 604c5bc commit ac54079
Show file tree
Hide file tree
Showing 6 changed files with 76 additions and 28 deletions.
4 changes: 2 additions & 2 deletions cupid/clear.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ def clear(config_path):
Args: CONFIG_PATH - The path to the configuration file.
"""

logger = cupid.util.setup_logging(config_path)
run_dir = read_config_file(config_path)
# Delete the "computed_notebooks" folder and all the contents inside of it
shutil.rmtree(run_dir)
print(f"All contents in {run_dir} have been cleared.")
logger.info(f"All contents in {run_dir} have been cleared.")
11 changes: 6 additions & 5 deletions cupid/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
from __future__ import annotations

import os
import warnings

import click
import intake
Expand Down Expand Up @@ -74,6 +73,7 @@ def run(
# Get control structure
control = cupid.util.get_control_dict(config_path)
cupid.util.setup_book(config_path)
logger = cupid.util.setup_logging(config_path)

component_options = {
"atm": atmosphere,
Expand Down Expand Up @@ -180,6 +180,7 @@ def run(
timeseries_params[component]["level"],
num_procs,
serial,
logger,
)
# fmt: on
# pylint: enable=line-too-long
Expand Down Expand Up @@ -245,7 +246,7 @@ def run(
all_nbs[nb]["nb_path_root"] = nb_path_root + "/" + comp_name
all_nbs[nb]["output_dir"] = output_dir + "/" + comp_name
elif comp_bool and not all:
warnings.warn(
logger.warning(
f"No notebooks for {comp_name} component specified in config file.",
)

Expand All @@ -254,7 +255,7 @@ def run(
for nb, info in all_nbs.copy().items():
if not control["env_check"][info["kernel_name"]]:
bad_env = info["kernel_name"]
warnings.warn(
logger.warning(
f"Environment {bad_env} specified for {nb}.ipynb could not be found;" +
f" {nb}.ipynb will not be run." +
"See README.md for environment installation instructions.",
Expand Down Expand Up @@ -288,7 +289,7 @@ def run(
all_scripts[script] = info
all_scripts[script]["nb_path_root"] = nb_path_root + "/" + comp_name
elif comp_bool and not all:
warnings.warn(
logger.warning(
f"No scripts for {comp_name} component specified in config file.",
)

Expand All @@ -297,7 +298,7 @@ def run(
for script, info in all_scripts.copy().items():
if not control["env_check"][info["kernel_name"]]:
bad_env = info["kernel_name"]
warnings.warn(
logger.warning(
f"Environment {bad_env} specified for {script}.py could not be found;" +
f"{script}.py will not be run.",
)
Expand Down
40 changes: 22 additions & 18 deletions cupid/timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ def create_time_series(
height_dim,
num_procs,
serial,
logger,
):
"""
Generate time series versions of the history file data.
Expand Down Expand Up @@ -81,11 +82,11 @@ def create_time_series(

# Don't do anything if list of requested diagnostics is empty
if not diag_var_list:
print(f"\n No time series files requested for {component}...")
logger.info(f"\n No time series files requested for {component}...")
return

# Notify user that script has started:
print(f"\n Generating {component} time series files...")
logger.info(f"\n Generating {component} time series files...")

# Loop over cases:
for case_idx, case_name in enumerate(case_names):
Expand All @@ -95,11 +96,11 @@ def create_time_series(
"Configuration file indicates time series files have been pre-computed"
)
emsg += f" for case '{case_name}'. Will rely on those files directly."
print(emsg)
logger.info(emsg)
continue
# End if

print(f"\t Processing time series for case '{case_name}' :")
logger.info(f"\t Processing time series for case '{case_name}' :")

# Extract start and end year values:
start_year = start_years[case_idx]
Expand Down Expand Up @@ -184,7 +185,7 @@ def create_time_series(
wmsg += (
f" transferred beyond the {height_dim} dimension itself."
)
print(wmsg)
logger.warning(wmsg)

vert_coord_type = None
# End if
Expand All @@ -195,7 +196,7 @@ def create_time_series(
)
wmsg += " so no additional vertical coordinate information will be"
wmsg += f" transferred beyond the {height_dim} dimension itself."
print(wmsg)
logger.warning(wmsg)

vert_coord_type = None
# End if (long name)
Expand Down Expand Up @@ -228,13 +229,13 @@ def create_time_series(
vars_to_derive = []
# create copy of var list that can be modified for derivable variables
if diag_var_list == ["process_all"]:
print("generating time series for all variables")
logger.info("generating time series for all variables")
# TODO: this does not seem to be working for ocn...
diag_var_list = hist_file_var_list
for var in diag_var_list:
if var not in hist_file_var_list:
if component == "ocn":
print(
logger.warning(
"ocean vars seem to not be present in all files and thus cause errors",
)
continue
Expand All @@ -249,7 +250,7 @@ def create_time_series(
continue
msg = f"WARNING: {var} is not in the file {hist_files[0]}."
msg += " No time series will be generated."
print(msg)
logger.warning(msg)
continue

# Check if variable has a height_dim (eg, 'lev') dimension according to first file:
Expand All @@ -274,7 +275,7 @@ def create_time_series(
continue

# Notify user of new time series file:
print(f"\t - time series for {var}")
logger.info(f"\t - time series for {var}")

# Variable list starts with just the variable
ncrcat_var_list = f"{var}"
Expand All @@ -293,11 +294,11 @@ def create_time_series(

if "PS" in hist_file_var_list:
ncrcat_var_list = ncrcat_var_list + ",PS"
print("Adding PS to file")
logger.info("Adding PS to file")
else:
wmsg = "WARNING: PS not found in history file."
wmsg += " It might be needed at some point."
print(wmsg)
logger.warning(wmsg)
# End if

if vert_coord_type == "height":
Expand All @@ -309,11 +310,11 @@ def create_time_series(
# PMID file to each one of those targets separately. -JN
if "PMID" in hist_file_var_list:
ncrcat_var_list = ncrcat_var_list + ",PMID"
print("Adding PMID to file")
logger.info("Adding PMID to file")
else:
wmsg = "WARNING: PMID not found in history file."
wmsg += " It might be needed at some point."
print(wmsg)
logger.warning(wmsg)
# End if PMID
# End if height
# End if cam
Expand All @@ -333,6 +334,7 @@ def create_time_series(
if vars_to_derive:
if component == "atm":
derive_cam_variables(
logger,
vars_to_derive=vars_to_derive,
ts_dir=ts_dir[case_idx],
)
Expand All @@ -347,10 +349,12 @@ def create_time_series(
# End cases loop

# Notify user that script has ended:
print(f" ... {component} time series file generation has finished successfully.")
logger.info(
f" ... {component} time series file generation has finished successfully.",
)


def derive_cam_variables(vars_to_derive=None, ts_dir=None, overwrite=None):
def derive_cam_variables(logger, vars_to_derive=None, ts_dir=None, overwrite=None):
"""
Derive variables acccording to steps given here. Since derivations will depend on the
variable, each variable to derive will need its own set of steps below.
Expand Down Expand Up @@ -381,7 +385,7 @@ def derive_cam_variables(vars_to_derive=None, ts_dir=None, overwrite=None):
if overwrite:
Path(prect_file).unlink()
else:
print(
logger.warning(
f"[{__name__}] Warning: PRECT file was found and overwrite is False"
+ "Will use existing file.",
)
Expand Down Expand Up @@ -415,7 +419,7 @@ def derive_cam_variables(vars_to_derive=None, ts_dir=None, overwrite=None):
if overwrite:
Path(derived_file).unlink()
else:
print(
logger.warning(
f"[{__name__}] Warning: RESTOM file was found and overwrite is False."
+ "Will use existing file.",
)
Expand Down
39 changes: 38 additions & 1 deletion cupid/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
Functions:
- get_control_dict(): Get the control dictionary from a configuration file.
- setup_book(): Setup run dir and output Jupyter book based on config.yaml
- setup_logging(): Set up logging based on configuration file log level.
- setup_book(): Setup run dir and output Jupyter book based on config.yaml.
- get_toc_files(): Return a list of files in the '_toc.yml'.
- create_ploomber_nb_task(): Create a Ploomber task for running a notebook.
- create_ploomber_script_task(): Create a Ploomber task for running a script.
Expand All @@ -15,6 +16,7 @@
"""
from __future__ import annotations

import logging
import os
import sys
import warnings
Expand Down Expand Up @@ -46,6 +48,7 @@ def execute_managed_notebook(cls, nb_man, kernel_name, **kwargs):

def get_control_dict(config_path):
"""Get control dictionary from configuration file"""

try:
with open(config_path) as fid:
control = yaml.safe_load(fid)
Expand Down Expand Up @@ -93,6 +96,40 @@ def get_control_dict(config_path):
return control


def setup_logging(config_path):
"""
Set up logging based on configuration file log level
Options for log levels include debug, info, warning, and error.
Returns logger object
"""
control = get_control_dict(config_path)
# default level is info if log level is not set in config
log_level = control["computation_config"].get("log_level", "info")
if log_level == "debug" or log_level == "DEBUG":
logging.basicConfig(
level=logging.DEBUG,
)
elif log_level == "info" or log_level == "INFO":
logging.basicConfig(
level=logging.INFO,
)
elif log_level == "warning" or log_level == "WARNING":
logging.basicConfig(
level=logging.WARNING,
)
elif log_level == "error" or log_level == "ERROR":
logging.basicConfig(
level=logging.ERROR,
)
else:
print('setting log_level to "info" because invalid log level')
logging.basicConfig(
level=logging.INFO,
)

return logging.getLogger(__name__)


def setup_book(config_path):
"""Setup run directory and output jupyter book"""

Expand Down
5 changes: 4 additions & 1 deletion examples/coupled_model/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,12 @@ computation_config:
### It must already be installed on your machine. You can also
### specify a different environment than the default for any
### notebook in NOTEBOOK CONFIG

default_kernel_name: cupid-analysis

# log level sets the level of how verbose logging will be.
# options include: debug, info, warning, error
log_level: 'info'

############# NOTEBOOK CONFIG #############

############################
Expand Down
5 changes: 4 additions & 1 deletion examples/key_metrics/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,12 @@ computation_config:
### It must already be installed on your machine. You can also
### specify a different environment than the default for any
### notebook in NOTEBOOK CONFIG

default_kernel_name: cupid-analysis

# log level sets the level of how verbose logging will be.
# options include: debug, info, warning, error
log_level: 'info'

############# NOTEBOOK CONFIG #############

############################
Expand Down

0 comments on commit ac54079

Please sign in to comment.