-
-
Save kingspp/9451566a5555fb022215ca2b7b802f19 to your computer and use it in GitHub Desktop.
import os | |
import yaml | |
import logging.config | |
import logging | |
import coloredlogs | |
def setup_logging(default_path='logging.yaml', default_level=logging.INFO, env_key='LOG_CFG'): | |
""" | |
| **@author:** Prathyush SP | |
| Logging Setup | |
""" | |
path = default_path | |
value = os.getenv(env_key, None) | |
if value: | |
path = value | |
if os.path.exists(path): | |
with open(path, 'rt') as f: | |
try: | |
config = yaml.safe_load(f.read()) | |
logging.config.dictConfig(config) | |
coloredlogs.install() | |
except Exception as e: | |
print(e) | |
print('Error in Logging Configuration. Using default configs') | |
logging.basicConfig(level=default_level) | |
coloredlogs.install(level=default_level) | |
else: | |
logging.basicConfig(level=default_level) | |
coloredlogs.install(level=default_level) | |
print('Failed to load configuration file. Using default configs') |
version: 1 | |
disable_existing_loggers: true | |
formatters: | |
standard: | |
format: "%(asctime)s - %(name)s - %(levelname)s - %(message)s" | |
error: | |
format: "%(levelname)s <PID %(process)d:%(processName)s> %(name)s.%(funcName)s(): %(message)s" | |
handlers: | |
console: | |
class: logging.StreamHandler | |
level: DEBUG | |
formatter: standard | |
stream: ext://sys.stdout | |
info_file_handler: | |
class: logging.handlers.RotatingFileHandler | |
level: INFO | |
formatter: standard | |
filename: /tmp/info.log | |
maxBytes: 10485760 # 10MB | |
backupCount: 20 | |
encoding: utf8 | |
error_file_handler: | |
class: logging.handlers.RotatingFileHandler | |
level: ERROR | |
formatter: error | |
filename: /tmp/errors.log | |
maxBytes: 10485760 # 10MB | |
backupCount: 20 | |
encoding: utf8 | |
debug_file_handler: | |
class: logging.handlers.RotatingFileHandler | |
level: DEBUG | |
formatter: standard | |
filename: /tmp/debug.log | |
maxBytes: 10485760 # 10MB | |
backupCount: 20 | |
encoding: utf8 | |
critical_file_handler: | |
class: logging.handlers.RotatingFileHandler | |
level: CRITICAL | |
formatter: standard | |
filename: /tmp/critical.log | |
maxBytes: 10485760 # 10MB | |
backupCount: 20 | |
encoding: utf8 | |
warn_file_handler: | |
class: logging.handlers.RotatingFileHandler | |
level: WARN | |
formatter: standard | |
filename: /tmp/warn.log | |
maxBytes: 10485760 # 10MB | |
backupCount: 20 | |
encoding: utf8 | |
root: | |
level: NOTSET | |
handlers: [console] | |
propogate: yes | |
loggers: | |
<module>: | |
level: INFO | |
handlers: [console, info_file_handler, error_file_handler, critical_file_handler, debug_file_handler, warn_file_handler] | |
propogate: no | |
<module.x>: | |
level: DEBUG | |
handlers: [info_file_handler, error_file_handler, critical_file_handler, debug_file_handler, warn_file_handler] | |
propogate: yes |
propogate
-> propagate
@alexandertsema Maybe it's not the best time for you, but I spent some time trying to make it work and here is my two cents.
Hope it helps other people in the wild.
Create classes for the filters. This one logs only the selected level, INFO (on a setupLogging.py along with the configuration loading e.g.).
One must return True
for the filter
def for the result you want, this is how the filtering knows the cases it will select.
The filter
def cannot be renamed or it will not work.
import logging
import logging.config
import yaml
class infoFilter(logging.Filter):
def filter(self, rec):
return rec.levelno == logging.INFO
def setupLogging():
with open('loggingConfig.yaml, 'rt') as file:
config = yaml.safe_load(file.read())
logging.config.dictConfig(config)
Then create your handlers, filters, formatters (yaml file example).
filters:
infoFilter:
(): setupLogging.infoFilter
handlers:
info_file_handler:
class: logging.handlers.TimedRotatingFileHandler
level: INFO
filename: info.log
when: 'midnight'
backupCount: 4
utc: True
filters: [infoFilter]
encoding: utf8
And don't forget to add the handlers to your loggers (same yaml file)
root:
handlers: [info_file_handler]
Now call the config on your main
import logging
from setupLogging import setupLogging
setupLogging()
logger = logging.getLogger(__name__)
logger.info("logger set :D")
And wherever you have to, this will work on others files
import logging
logger = logging.getLogger(__name__)
logger.info("another logging file :D")
I tried the code by @alexandertsema above, after adding the missing quote mark in the 'with open' line, and after simplifying by commenting out the filter code. Executing code below creates the info.log file, but it is empty. I wonder if something might be wrong with my python installation, which has trouble with all logging except the basicConfig, but seems to work fine for all other code. Suggestions?
setupLogging.py:
#!/usr/bin/env python3
import logging
import logging.config
import yaml
'''
class infoFilter(logging.Filter):
def filter(self, rec):
return rec.levelno == logging.INFO
'''
def setupLogging():
with open('loggingConfig.yaml', 'rt') as file:
config = yaml.safe_load(file.read())
logging.config.dictConfig(config)
if __name__ == "__main__":
setupLogging()
logger = logging.getLogger(__name__)
logger.info("logger set :D")
logging.yaml:
---
version: 1
disable_existing_loggers: False
#filters:
# infoFilter:
# (): setupLogging.infoFilter
handlers:
info_file_handler:
class: logging.handlers.TimedRotatingFileHandler
level: INFO
filename: info.log
when: 'midnight'
backupCount: 4
utc: True
# filters: [infoFilter]
encoding: utf8
root:
handlers: [info_file_handler]
...
My version of the code needs the line logger.setLevel(logging.DEBUG)
after the getting the logger.
Refer for more details:
https://kingspp.github.io/design/2017/11/06/the-head-and-tail-of-logging.html
Thanks for this nice logging-config @kingspp. It helped a lot. And also big thanks to the filters @rochamorelli. This is exactly what I was searching!
Just one change to your code.
You set the filter in the yaml with:
filters:
infoFilter:
(): setupLogging.infoFilter
This didn't work for me because Python was complaining about that setupLogging
is not a module. Since I'm creating the filter-class in a separate module logging.filters
I changed it to the following:
filters:
infoFilter:
(): logging.filters.infoFilter
However, if you setup everything within a main-script in the root-folder of your program (also with defining the respective filter) it should be:
filters:
infoFilter:
(): __main__.infoFilter
I think your example was referring to the latter.
Hi all,
Thanks for the logging-config,
But I have some issue Executing code below creates all files, but they are empty.
Could you please help me?
Thanks
Hi all,
Thanks for the logging-config,
But I have some issue Executing code below creates all files, but they are empty.
Could you please help me?
Thanks
Could you add your code, please?
Ok, I've tried to use this method but for some reason DEBUG messages are not getting printed out in root logger.
main_file.py
import os
import yaml
import logging.config
import logging
import coloredlogs
import setup_logging
import child_module
setup_logging.setup_logging(default_level=logging.DEBUG)
logger = logging.getLogger(__name__)
logger.info("Here's the main file.")
child_module.some_function()
child_module.py
import logging
import setup_logging
setup_logging.setup_logging(default_level=logging.DEBUG)
logger = logging.getLogger(__name__)
logger.info("Imported child module")
def some_function():
logger.debug("Debug inside the function")
logger.info("Info inside the function")
logger.warning("Warning inside the function")
logger.error("Error inside the function")
logger.critical("Critical inside the function")
And it does not display the logs inside the function of child module:
2020-09-27 22:01:45 child_module[24180] INFO Imported child module
2020-09-27 22:01:45 __main__[24180] INFO Here's the main file.
Ok, so I tinker the logging.yaml file and I enable other loggers: disable_existing_loggers: false
I'm able to get the functions INFO and higher logs but DEBUG is missing:
2020-09-27 22:02:45 child_module[22932] INFO Imported child module
2020-09-27 22:02:45 __main__[22932] INFO Here's the main file.
2020-09-27 22:02:45 child_module[22932] INFO Info inside the function
2020-09-27 22:02:45 child_module[22932] WARNING Warning inside the function
2020-09-27 22:02:45 child_module[22932] ERROR Error inside the function
2020-09-27 22:02:45 child_module[22932] CRITICAL Critical inside the function
Can someone tell me what's wrong, please?
FYI:
root:
level: NOTSET
handlers: [console]
propogate: no
For some reason, one cannot set propagate
to no
in the root logger. It's always yes
.
This can be verified easily by running the following print statement after logging.config.dictConfig(config)
is made:
rl = logging.getLogger()
print(rl.propagate)
print(rl.getEffectiveLevel)
print(rl.handlers)
Python logging does not allow disabling propagation for root
logger.
Ref: root logger section in dictConfig
What changes in configuration should I make to allow different Formatters for console, based on their level
formatters:
humanDebug:
(): flask_logify.formatters.RequestFormatter
format: \x1b[91m"[%(asctime)s] [%(levelname)s] : %(message)s"\x1b[0m
humanInfo:
(): flask_logify.formatters.RequestFormatter
format: \x1b[93m"[%(asctime)s] [%(levelname)s] : %(message)s"\x1b[0m
handlers:
console:
class: logging.StreamHandler
stream: ext://sys.stdout
consoleDebug:
class: logging.StreamHandler
stream: ext://sys.stdout
formatter: humanDebug
level: DEBUG
consoleInfo:
class: logging.StreamHandler
stream: ext://sys.stdout
formatter: humanInfo
level: INFO
loggers:
flask-development:
handlers: [ consoleDebug,consoleInfo,consoleWarn]
level: DEBUG
propogate: yes
The current configuration would send out multiple messages, as a consoleDebug handler emits debug & above messages and so on and so forth. Can I limit a handler to specific level ? Not like DEBUG and above
Actually LOG_CFG is not needed, if we will use local search for logging.yml:
file = os.path.join(str(Path().absolute()), *__path__.split("/"), "logging.yaml")
I still struggle to make this work., though promising.
I did try to digest https://kingspp.github.io/design/2017/11/06/the-head-and-tail-of-logging.html which the OP has posted.
We have defined a function which can be used for configuring Root Logger, which stands atop logging hierarchy, now let see how we can use this function, Open __init__.py of main module. Why we need to use __init__.py will be discussed in upcoming posts, for the time being bear with me,
and the code therein,
`# init.py
import os
from logging_config_manager import setup_logging
setup_logging(default_path=os.path.join("/".join(file.split('/')[:-1]), 'config', 'logging_config.yaml'))`
these seem to indicate that if one calls teh setup_logging() each time at each module which one wish to log, it will treat each module as "root" hence all the lovely customisation wouldn't kick in at all, (which what I have seen playing at my side)
Does anyone had a true "working" example that one can share? please?
https://gist.github.com/glenfant/4358668
is a simple example on line. There you can see "logging" only been imported once.
but for a layered multi-module package, how would one possibly achieve that please?
any thoughts are much apprieciated please.
thanks
My version of the code needs the line
logger.setLevel(logging.DEBUG)
after the getting the logger.
i have the same issue of "missing the DEBUG" level. I have tried many many different ways
more importantly, here is my yaml file:
version: 1
disable_existing_loggers: True
formatters:
standard:
format: "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
error:
format: "%(levelname)s <PID %(process)d:%(processName)s> %(name)s.%(funcName)s(): %(message)s"
filters:
infoFilter:
(): setup.logging_config_manager.infoFilter
debugFilter:
(): setup.logging_config_manager.debugFilter
errorFilter:
(): setup.logging_config_manager.errorFilter
warningFilter:
(): setup.logging_config_manager.warningFilter
criticalFilter:
(): setup.logging_config_manager.criticalFilter
handlers:
console:
class: logging.StreamHandler
level: INFO
formatter: standard
stream: ext://sys.stdout
info_file_handler:
class: logging.handlers.RotatingFileHandler
level: INFO
formatter: standard
filename: /tmp/info.log
maxBytes: 10485760 # 10MB
backupCount: 20
filters: [infoFilter]
encoding: utf8
error_file_handler:
class: logging.handlers.RotatingFileHandler
level: ERROR
formatter: error
filename: /tmp/errors.log
maxBytes: 10485760 # 10MB
backupCount: 20
filters: [errorFilter]
encoding: utf8
debug_file_handler:
class: logging.handlers.RotatingFileHandler
level: DEBUG
formatter: standard
filename: /tmp/debug.log
maxBytes: 10485760 # 10MB
backupCount: 20
encoding: utf8
critical_file_handler:
class: logging.handlers.RotatingFileHandler
level: CRITICAL
formatter: standard
filename: /tmp/critical.log
maxBytes: 10485760 # 10MB
backupCount: 20
filters: [criticalFilter]
encoding: utf8
warn_file_handler:
class: logging.handlers.RotatingFileHandler
level: WARN
formatter: standard
filename: /tmp/warn.log
maxBytes: 10485760 # 10MB
backupCount: 20
filters: [warningFilter]
encoding: utf8
root:
level: DEBUG
handlers: [console, info_file_handler]
propagate: no
loggers:
<__main__>:
level: INFO
handlers: [ info_file_handler, debug_file_handler, error_file_handler, critical_file_handler, warn_file_handler]
propagate: no
<module1>:
level: INFO
handlers: [ info_file_handler, debug_file_handler, error_file_handler, critical_file_handler, warn_file_handler]
propagate: no
<module1.x>:
level: INFO
handlers: [error_file_handler]
propagate: no
so at "root" level, i should have DEBUG as starting point.
and I was trying to achieve that
for the main, module1, module1.x the logger will have different handlers. but those handlers do not seem kick in at all. in fact all that matters seems to be what i put in at "root" level. the logs are produced on console and for the Info.log file.
p.s., i have played around the propagte, disable_existing_loggers, and see no effects.
pps, i have done this logging_setup ONLY at the main level. as suggested from other stackoverflow sites; to avoid not loading the logging.config.dictConfig() multiple times at different module levels. (which is what Icmtcf has done in the previous post)
version: 1
disable_existing_loggers: no
formatters:
standard:
format: "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
error:
format: "%(asctime)s - %(name)s - %(levelname)s <PID %(process)d:%(processName)s> %(name)s.%(funcName)s(): %(message)s"
handlers:
info_file_handler:
class: logging.handlers.RotatingFileHandler
level: INFO
formatter: standard
filename: logs/info.log
maxBytes: 10485760 # 10MB
backupCount: 20
encoding: utf8
warn_file_handler:
class: logging.handlers.RotatingFileHandler
level: WARN
formatter: standard
filename: logs/warn.log
maxBytes: 10485760 # 10MB
backupCount: 20
encoding: utf8
error_file_handler:
class: logging.handlers.RotatingFileHandler
level: ERROR
formatter: error
filename: logs/errors.log
maxBytes: 10485760 # 10MB
backupCount: 20
encoding: utf8
critical_file_handler:
class: logging.handlers.RotatingFileHandler
level: CRITICAL
formatter: standard
filename: logs/critical.log
maxBytes: 10485760 # 10MB
backupCount: 20
encoding: utf8
debug_file_handler:
class: logging.handlers.RotatingFileHandler
level: DEBUG
formatter: standard
filename: logs/debug.log
maxBytes: 10485760 # 10MB
backupCount: 20
encoding: utf8
root_file_handler:
class: logging.handlers.RotatingFileHandler
level: DEBUG
formatter: standard
filename: logs/logs.log
maxBytes: 10485760 # 10MB
backupCount: 20
encoding: utf8
console:
class: logging.StreamHandler
level: DEBUG
formatter: standard
stream: ext://sys.stdout
error_console:
class: logging.StreamHandler
level: ERROR
formatter: error
stream: ext://sys.stderr
root:
level: DEBUG
handlers: [console, error_console, root_file_handler]
propagate: yes
loggers:
main:
level: DEBUG
handlers: [info_file_handler, warn_file_handler, error_file_handler, critical_file_handler, debug_file_handler]
propagate: no
werkzeug:
level: DEBUG
handlers: [info_file_handler, warn_file_handler, error_file_handler, critical_file_handler, debug_file_handler]
propagate: yes
api.app_server:
level: DEBUG
handlers: [info_file_handler, warn_file_handler, error_file_handler, critical_file_handler, debug_file_handler]
propagate: yes
Can you please give an example of how to write yaml file for aiologger?
What about custom filters?