Skip to content

Instantly share code, notes, and snippets.

@Zaharid
Created June 26, 2015 11:42
Show Gist options
  • Save Zaharid/d4f8c9a44ce7941b0c37 to your computer and use it in GitHub Desktop.
Save Zaharid/d4f8c9a44ce7941b0c37 to your computer and use it in GitHub Desktop.
Proper way to manage logging for multiple processes.
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 26 13:16:22 2015
@author: zah
"""
import multiprocessing
import logging
from logging.handlers import QueueHandler, QueueListener
def work(q, level):
handler = QueueHandler(q)
logging.getLogger().level = level
logging.getLogger().handlers = [handler,]
logging.info("process!!!")
if __name__ == '__main__':
multiprocessing.set_start_method('spawn')
logging.basicConfig(format="PATATA: %(message)s", level=logging.INFO)
logging.warn('xxx')
#http://stackoverflow.com/questions/9908781/sharing-a-result-queue-among-several-processes
m = multiprocessing.Manager()
q = m.Queue(-1)
#https://docs.python.org/3/howto/logging-cookbook.html
listener = QueueListener(q, *logging.getLogger().handlers)
listener.start()
pool = multiprocessing.Pool(4)
pool.apply(work, (q,logging.getLogger().level))
listener.stop()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment