From bc9dad4f7cc33901a3d6ca903a06719326da7fcc Mon Sep 17 00:00:00 2001 From: Douglas Raillard Date: Wed, 13 Dec 2023 16:37:21 +0000 Subject: [PATCH] lisa._unshare: Ensure logging is setup before unpickling function FIX Ensure the environment (logging) is setup correctly before attempting to unpickle the function and its arguments. Otherwise, unpickling the arguments will trigger __init__/__new__ calls that will run with the default logging conf rather than what we want. --- lisa/_unshare.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/lisa/_unshare.py b/lisa/_unshare.py index a82f4da450..1dfb68644a 100644 --- a/lisa/_unshare.py +++ b/lisa/_unshare.py @@ -33,6 +33,7 @@ import multiprocessing import threading import logging +import logging.handlers import queue from importlib.util import module_from_spec from importlib.machinery import ModuleSpec @@ -90,7 +91,7 @@ def _do_unshare(): libc.mount(b"none", b"/", ffi.NULL, mount_flags, ffi.NULL); -def _unshare_wrapper(args): +def _unshare_wrapper(configure, f): # If we are already root, we don't need to do anything. This will increase # the odds of all that working in a CI environment inside an existing # container. @@ -102,12 +103,17 @@ def _unshare_wrapper(args): # pickle would import all the necessary modules to deserialize the objects, # leading to importing modules like pyarrow that create a background # thread, preventing the unshare(CLONE_NEWUSER) syscall from working. - f, log_configure, args, kwargs = pickle.loads(args) + configure = pickle.loads(configure) # Configure logging module to get the records back in the parent thread - # where they will be processed as usual. - log_configure() - return f(*args, **kwargs) + # where they will be processed as usual. We need to do this before + # unpickling the function and its data, as unpickling will trigger some + # __new__/__init__ calls that Otherwise would run with the wrong logging + # setup + configure() + + f = pickle.loads(f) + return f() @contextlib.contextmanager @@ -190,10 +196,11 @@ def _with_unshare(f, args=tuple(), kwargs={}): # are unpickled (triggering imports) by pickling them ourselves. ctx = multiprocessing.get_context('spawn') with _empty_main(), ctx.Pool(processes=1) as pool: - data = pickle.dumps((f, configure, args, kwargs)) + configure = pickle.dumps(configure) + f = pickle.dumps(functools.partial(f, *args, **kwargs)) return pool.apply( _unshare_wrapper, - args=(data,), + args=(configure, f), )