-
-
Save dirkakrid/25bc6d0fe150058c4109898cc1ed8d33 to your computer and use it in GitHub Desktop.
Proper way to manage logging for multiple processes.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -*- coding: utf-8 -*- | |
""" | |
Created on Fri Jun 26 13:16:22 2015 | |
@author: zah | |
""" | |
import multiprocessing | |
import logging | |
from logging.handlers import QueueHandler, QueueListener | |
def work(q, level): | |
handler = QueueHandler(q) | |
logging.getLogger().level = level | |
logging.getLogger().handlers = [handler,] | |
logging.info("process!!!") | |
if __name__ == '__main__': | |
multiprocessing.set_start_method('spawn') | |
logging.basicConfig(format="PATATA: %(message)s", level=logging.INFO) | |
logging.warn('xxx') | |
#http://stackoverflow.com/questions/9908781/sharing-a-result-queue-among-several-processes | |
m = multiprocessing.Manager() | |
q = m.Queue(-1) | |
#https://docs.python.org/3/howto/logging-cookbook.html | |
listener = QueueListener(q, *logging.getLogger().handlers) | |
listener.start() | |
pool = multiprocessing.Pool(4) | |
pool.apply(work, (q,logging.getLogger().level)) | |
listener.stop() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment