I\'ve read a few posts on this but I\'m still confused. I have this logging setup:
import logging
class MongoHandler(logging.Handler):
def __init__(sel
You can set a different logging level for each logging handler but it seems you will have to set the logger's level to the "lowest". In the example below I set the logger to DEBUG, the stream handler to INFO and the TimedRotatingFileHandler to DEBUG. So the file has DEBUG entries and the stream outputs only INFO. You can't direct only DEBUG to one and only INFO to another handler. For that you'll need another logger.
logger = logging.getLogger("mylog")
formatter = logging.Formatter(
'%(asctime)s | %(name)s | %(levelname)s: %(message)s')
logger.setLevel(logging.DEBUG)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter)
logFilePath = "my.log"
file_handler = logging.handlers.TimedRotatingFileHandler(
filename=logFilePath, when='midnight', backupCount=30)
file_handler.setFormatter(formatter)
file_handler.setLevel(logging.DEBUG)
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
logger.info("Started");
try:
x = 14
y = 0
z = x / y
except Exception as ex:
logger.error("Operation failed.")
logger.debug(
"Encountered {0} when trying to perform calculation.".format(ex))
logger.info("Ended");
An addition to GrantVS's answer:
I had to use
logging.basicConfig(level=logging.DEBUG)
in order for it to work. Otherwise great answer, thanks!
Mario
PS: For some reason the system doesn't let me comment GrantVS's answer directly.
Had the same problem but the solution didn't work for iPython as the QtConsole automatically creates a handler with no level set:
import logging
root = logging.getLogger()
root.handlers
Out: [<StreamHandler <stderr> (NOTSET)>]
As a result iPython printed both DEBUG and INFO to console in spite of having different levels for my file handler and stream handler.
This thread pointed out this issue for me: Logging module does not print in IPython
I made a helper module (helped greatly by this stack thread!) called custom_logging.py to make logging more convenient in other modules:
import logging
from pathlib import Path
import sys
def _add_stream_handler(logger: logging.Logger):
stream_handler = logging.StreamHandler()
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
stream_handler.setFormatter(formatter)
stream_handler.setLevel(logging.INFO)
logger.addHandler(stream_handler)
return logger
def _add_file_handler(logger: logging.Logger, log_path: Path):
file_handler = logging.FileHandler(log_path, mode='w')
formatter = logging.Formatter(
fmt='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M')
file_handler.setFormatter(formatter)
file_handler.setLevel(logging.DEBUG)
logger.addHandler(file_handler)
return logger
def create_logger(root_dir: Path, caller: str) -> logging.Logger:
log_path = root_dir / 'logs' / f'{caller}.log'
logger = logging.getLogger(caller)
root = logging.getLogger()
logger.setLevel(logging.DEBUG)
# If haven't already launched handlers...
if not len(logger.handlers):
_add_file_handler(logger=logger, log_path=log_path)
_add_stream_handler(logger=logger)
logger.info('Logging started.')
# Delete the Qtconsole stderr handler
# ... as it automatically logs both DEBUG & INFO to stderr
if root.handlers:
root.handlers = []
return logger
def log_dataframe(df, logger: logging.Logger, name: str = "DataFrame") -> None:
logger.debug(
f'''{name} head:\n {df.head()}\n----------\n''')
def log_dataframes(*args, logger: logging.Logger) -> None:
for gdf in args:
logger.debug(
f'''DataFrame head:\n {gdf.head()}\n----------\n''')
Can use its functions via:
from custom_logging import create_logger, log_dataframe
Or import custom_logging and custom_logging.create_logger() etc.
Also see sections 'Multiple handlers and formatters' and 'Logging to multiple destinations' in the official logging cookbook at: https://docs.python.org/3/howto/logging-cookbook.html#logging-cookbook
I needed a time to understand the point