Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add "reinstall()" method to make it easier in spawn multiprocessing #1069

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
42 changes: 42 additions & 0 deletions loguru/_logger.py
Expand Up @@ -86,6 +86,7 @@
import contextlib
import functools
import logging
import os
import re
import sys
import warnings
Expand Down Expand Up @@ -230,6 +231,7 @@ class Logger:
def __init__(self, core, exception, depth, record, lazy, colors, raw, capture, patchers, extra):
self._core = core
self._options = (exception, depth, record, lazy, colors, raw, capture, patchers, extra)
self._own_pid = os.getpid()

def __repr__(self):
return "<loguru.logger handlers=%r>" % list(self._core.handlers.values())
Expand Down Expand Up @@ -1752,6 +1754,46 @@ def configure(self, *, handlers=None, levels=None, extra=None, patcher=None, act

return [self.add(**params) for params in handlers]

def _replace_core(self, core: Core):
self._core = core

def reinstall(self):
"""Reinstall the core of logger.

When using multiprocessing, you can pass logger as a parameter to the target of
``multiprocessing.Process``, and run this method once, thus you don't need to pass logger to every
function you called in the same process with spawn multiprocessing.

Examples
--------
>>> def subworker(logger_):
... logger_.reinstall()
monchin marked this conversation as resolved.
Show resolved Hide resolved
... logger.info("Child")
... deeper_subworker()

>>> def deeper_subworker():
... logger.info("Grandchild")

>>> def test_process_spawn():
... spawn_context = multiprocessing.get_context("spawn")
... logger.add("file.log", context=spawn_context, enqueue=True, catch=False)
...
... process = spawn_context.Process(target=subworker, args=(logger,))
... process.start()
... process.join()

... assert process.exitcode == 0

... logger.info("Main")
... logger.remove()
"""
if self._own_pid == os.getpid(): # same process
monchin marked this conversation as resolved.
Show resolved Hide resolved
return
from loguru import logger

logger._replace_core(self._core)
monchin marked this conversation as resolved.
Show resolved Hide resolved


def _change_activation(self, name, status):
if not (name is None or isinstance(name, str)):
raise TypeError(
Expand Down
14 changes: 12 additions & 2 deletions tests/test_multiprocessing.py
Expand Up @@ -36,6 +36,16 @@ def subworker(logger_):
logger_.info("Child")


def subworker_spawn(logger_):
logger_.reinstall()
logger.info("Child")
deeper_subworker()


def deeper_subworker():
logger.info("Grandchild")


def subworker_inheritance():
logger.info("Child")

Expand Down Expand Up @@ -209,7 +219,7 @@ def test_process_spawn(spawn_context):

logger.add(writer, context=spawn_context, format="{message}", enqueue=True, catch=False)

process = spawn_context.Process(target=subworker, args=(logger,))
process = spawn_context.Process(target=subworker_spawn, args=(logger,))
monchin marked this conversation as resolved.
Show resolved Hide resolved
process.start()
process.join()

Expand All @@ -218,7 +228,7 @@ def test_process_spawn(spawn_context):
logger.info("Main")
logger.remove()

assert writer.read() == "Child\nMain\n"
assert writer.read() == "Child\nGrandchild\nMain\n"


@pytest.mark.skipif(os.name == "nt", reason="Windows does not support forking")
Expand Down