Recently I've realize that my application generate less log records than I expected. After some experiments I've found that problem is in RotatingFileHandler and multiprocessing.
import logging
from logging import handlers
from multiprocessing import Pool
import os
log_file_name = 'log.txt'
def make_logger():
logger = logging.getLogger('my_logger')
logger.setLevel(logging.INFO)
current_handler_names = {handler.name for handler in logger.handlers}
handler_name = 'my_handler'
if handler_name in current_handler_names:
return logger
handler = handlers.RotatingFileHandler(
log_file_name, maxBytes=10 * 2 ** 10, backupCount=0)
handler.setLevel(logging.INFO)
handler.set_name(handler_name)
logger.addHandler(handler)
return logger
def f(x):
logger = make_logger()
logger.info('hey %s' % x)
if os.path.exists(log_file_name):
os.unlink(log_file_name)
p = Pool(processes=30)
N = 1000
p.map(f, range(N))
with open(log_file_name, 'r') as f:
print 'expected: %s, real: %s' % (N, f.read().count('hey'))
Output:
$ python main.py
expected: 1000, real: 943
What I did wrong?