Compare commits

..

6 Commits

Author SHA1 Message Date
Tyler Goodlet 5cdb2ca68a Hide `._rpc._invoke()` frame, again.. 2025-09-05 12:35:35 -04:00
Tyler Goodlet 5c0924bdea Explain the `infect_asyncio: bool` param to pass in RTE msg 2025-09-05 12:33:43 -04:00
Tyler Goodlet 79eb9be69f Add "ignore-case-handling" to exc unmasker
Since it turns out there's even case(s) in `trio` core that are guilty
(of implementing things like checkpoints in exc handlers), this adds
facility for ignoring explicit cases via `inspect.FrameInfo` field
matching from the unmasked `exc_ctx` within
`maybe_raise_from_masking_exc()`.

Impl deats,
- use `inspect.getinnerframes()/getmodule()` to extract the equivalent
  "guilty place in code" which raised the masked error which we'd like
  to ignore and **not unmask**.
- start a `_mask_cases: dict` which describes the entries to ignore
  by matching against a specific `FrameInfo`'s fields from indexed
  from `getinnerframes()`.
- describe in that table the case i hit with `trio.WouldBlock` being
  always masked by a `Cancelled` due to way `trio.Lock.acquire()`
  implements the blocking case in the would-block handler..
- always call into a new `is_expected_masking_case()` predicate (from
  `maybe_raise_from_masking_exc()`) on matching `exc_ctx` types.
2025-09-04 14:51:25 -04:00
Tyler Goodlet 6ada6fab77 Use new `pkg_name` in log-sys test suites 2025-09-04 11:55:38 -04:00
Tyler Goodlet a379aa5975 Implicitly name sub-logs by caller's mod
That is when no `name` is passed to `get_logger()`, try to introspect
the caller's `module.__name__` and use it to infer/get the "namespace
path" to that module the same as if using `name=__name__` as in the most
common usage.

Further, change the `_root_name` to be `pkg_name: str`, a public and
more obvious param name, and deprecate the former. This obviously adds
the necessary impl to make the new
`test_sys_log::test_implicit_mod_name_applied_for_child` test pass.

Impl detalles for `get_logger()`,
- add `pkg_name` and deprecate `_root_name`, include failover logic
  and a warning.
- implement calling module introspection using
  `inspect.stack()/getmodule()` to get both the `.__name__` and
  `.__package__` info alongside adjusted logic to set the `name`
  when not provided but only when a new `mk_sublog: bool` is set.
- tweak the `name` processing for implicitly set case,
  - rename `sub_name` -> `pkg_path: str` which is the path
    to the calling module minus that module's name component.
  - only partition `name` if `pkg_name` is `in` it.
  - use the `_root_log` for `pkg_name` duplication warnings.

Other/related,
- add types to various public mod vars missing them.
- rename `.log.log` -> `.log._root_log`.
2025-09-04 11:54:54 -04:00
Tyler Goodlet 3137654453 Add an implicit-pkg-path-as-logger-name test
A bit of test driven dev to anticipate support  of `.log.get_logger()`
usage such that it can be called from arbitrary sub-modules, themselves
embedded in arbitrary sub-pkgs, of some project; the when not provided,
the `sub_name` passed to the `Logger.getChild(<sub_name>)` will be set
as the sub-pkg path "down to" the calling module.

IOW if you call something like,

`log = tractor.log.get_logger(pkg_name='mypylib')`

from some `submod.py` in a project-dir that looks like,

mypylib/
  mod.py
  subpkg/
    submod.py  <- calling module

the `log: StackLevelAdapter` child-`Logger` instance will have a
`.name: str = 'mypylib.subpkg'`, discluding the `submod` part since this
already rendered as the `{filename}` header in `log.LOG_FORMAT`.

Previously similar behaviour would be obtained by passing
`get_logger(name=__name__)` in the calling module and so much so it
motivated me to make this the default, presuming we can introspect for
the info.

Impl deats,
- duplicated a `load_module_from_path()` from `modden` to load the
  `testdir` rendered py project dir from its path.
 |_should prolly factor it down to this lib anyway bc we're going to
   need it for hot code reload? (well that and `watchfiles` Bp)
- in each of `mod.py` and `submod.py` render the `get_logger()` code
  sin `name`, expecting the (coming shortly) implicit introspection
  feat to do this.
- do `.name` and `.parent` checks against expected sub-logger values
  from `StackLevelAdapter.logger.getChildren()`.
2025-09-04 10:03:11 -04:00
5 changed files with 337 additions and 47 deletions

View File

@ -2,28 +2,143 @@
`tractor.log`-wrapping unit tests.
'''
import importlib
from pathlib import Path
import shutil
import sys
from types import ModuleType
import pytest
import tractor
def test_root_pkg_not_duplicated():
def test_root_pkg_not_duplicated_in_logger_name():
'''
When both `pkg_name` and `name` are passed and they have
a common `<root_name>.< >` prefix, ensure that it is not
duplicated in the child's `StackLevelAdapter.name: str`.
'''
project_name: str = 'pylib'
pkg_path: str = 'pylib.subpkg.mod'
log = tractor.log.get_logger(
_root_name=project_name,
proj_log = tractor.log.get_logger(
pkg_name=project_name,
mk_sublog=False,
)
sublog = tractor.log.get_logger(
_root_name=project_name,
pkg_name=project_name,
name=pkg_path,
)
assert log is not sublog
assert sublog.name.count(log.name) == 1
assert proj_log is not sublog
assert sublog.name.count(proj_log.name) == 1
assert 'mod' not in sublog.name
# ?TODO, move this into internal libs?
# -[ ] we already use it in `modden.config._pymod` as well
def load_module_from_path(
path: Path,
module_name: str|None = None,
) -> ModuleType:
'''
Taken from SO,
https://stackoverflow.com/a/67208147
which is based on stdlib docs,
https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
'''
module_name = module_name or path.stem
spec = importlib.util.spec_from_file_location(
module_name,
str(path),
)
module = importlib.util.module_from_spec(spec)
sys.modules[module_name] = module
spec.loader.exec_module(module)
return module
def test_implicit_mod_name_applied_for_child(
testdir: pytest.Pytester,
loglevel: str,
):
'''
Verify that when `.log.get_logger(pkg_name='pylib')` is called
from a given sub-mod from within the `pylib` pkg-path, we
implicitly set the equiv of `name=__name__` from the caller's
module.
'''
# tractor.log.get_console_log(level=loglevel)
proj_name: str = 'snakelib'
mod_code: str = (
f'import tractor\n'
f'\n'
f'log = tractor.log.get_logger(pkg_name="{proj_name}")\n'
)
# create a sub-module for each pkg layer
_lib = testdir.mkpydir(proj_name)
pkg: Path = Path(_lib)
subpkg: Path = pkg / 'subpkg'
subpkg.mkdir()
pkgmod: Path = subpkg / "__init__.py"
pkgmod.touch()
_submod: Path = testdir.makepyfile(
_mod=mod_code,
)
pkg_mod = pkg / 'mod.py'
pkg_subpkg_submod = subpkg / 'submod.py'
shutil.copyfile(
_submod,
pkg_mod,
)
shutil.copyfile(
_submod,
pkg_subpkg_submod,
)
testdir.chdir()
# XXX NOTE, once the "top level" pkg mod has been
# imported, we can then use `import` syntax to
# import it's sub-pkgs and modules.
pkgmod = load_module_from_path(
Path(pkg / '__init__.py'),
module_name=proj_name,
)
pkg_root_log = tractor.log.get_logger(
pkg_name=proj_name,
mk_sublog=False,
)
assert pkg_root_log.name == proj_name
assert not pkg_root_log.logger.getChildren()
from snakelib import mod
assert mod.log.name == proj_name
from snakelib.subpkg import submod
assert (
submod.log.name
==
submod.__package__ # ?TODO, use this in `.get_logger()` instead?
==
f'{proj_name}.subpkg'
)
sub_logs = pkg_root_log.logger.getChildren()
assert len(sub_logs) == 1 # only one nested sub-pkg module
assert submod.log.logger in sub_logs
# breakpoint()
# TODO, moar tests against existing feats:
# ------ - ------
# - [ ] color settings?

View File

@ -469,7 +469,7 @@ async def _invoke(
kwargs: dict[str, Any],
is_rpc: bool = True,
hide_tb: bool = False,
hide_tb: bool = True,
return_msg_type: Return|CancelAck = Return,
task_status: TaskStatus[

View File

@ -14,11 +14,22 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Log like a forester!
'''
An enhanced logging subsys.
"""
An extended logging layer using (for now) the stdlib's `logging`
+ `colorlog` which embeds concurrency-primitive/runtime info into
records (headers) to help you better grok your distributed systems
built on `tractor`.
'''
from collections.abc import Mapping
from inspect import (
FrameInfo,
getmodule,
stack,
)
import sys
import logging
from logging import (
@ -26,8 +37,10 @@ from logging import (
Logger,
StreamHandler,
)
import colorlog # type: ignore
from types import ModuleType
import warnings
import colorlog # type: ignore
import trio
from ._state import current_actor
@ -39,7 +52,7 @@ _default_loglevel: str = 'ERROR'
# Super sexy formatting thanks to ``colorlog``.
# (NOTE: we use the '{' format style)
# Here, `thin_white` is just the layperson's gray.
LOG_FORMAT = (
LOG_FORMAT: str = (
# "{bold_white}{log_color}{asctime}{reset}"
"{log_color}{asctime}{reset}"
" {bold_white}{thin_white}({reset}"
@ -51,7 +64,7 @@ LOG_FORMAT = (
" {reset}{bold_white}{thin_white}{message}"
)
DATE_FORMAT = '%b %d %H:%M:%S'
DATE_FORMAT: str = '%b %d %H:%M:%S'
# FYI, ERROR is 40
# TODO: use a `bidict` to avoid the :155 check?
@ -75,7 +88,10 @@ STD_PALETTE = {
'TRANSPORT': 'cyan',
}
BOLD_PALETTE = {
BOLD_PALETTE: dict[
str,
dict[int, str],
] = {
'bold': {
level: f"bold_{color}" for level, color in STD_PALETTE.items()}
}
@ -290,11 +306,10 @@ class ActorContextInfo(Mapping):
def get_logger(
# ?TODO, could we just grab the caller's `mod.__name__`?
# -[ ] do it with `inspect` says SO,
# |_https://stackoverflow.com/a/1095621
name: str|None = None,
_root_name: str = _proj_name,
pkg_name: str = _proj_name,
# XXX, deprecated, use ^
_root_name: str|None = None,
logger: Logger|None = None,
@ -303,23 +318,89 @@ def get_logger(
# |_https://stackoverflow.com/questions/7507825/where-is-a-complete-example-of-logging-config-dictconfig
# |_https://docs.python.org/3/library/logging.config.html#configuration-dictionary-schema
subsys_spec: str|None = None,
mk_sublog: bool = True,
) -> StackLevelAdapter:
'''
Return the `tractor`-library root logger or a sub-logger for
`name` if provided.
When `name` is left null we try to auto-detect the caller's
`mod.__name__` and use that as a the sub-logger key.
This allows for example creating a module level instance like,
.. code:: python
log = tractor.log.get_logger(_root_name='mylib')
and by default all console record headers will show the caller's
(of any `log.<level>()`-method) correct sub-pkg's
+ py-module-file.
'''
if _root_name:
msg: str = (
'The `_root_name: str` param of `get_logger()` is now deprecated.\n'
'Use the new `pkg_name: str` instead, it is the same usage.\n'
)
warnings.warn(
msg,
DeprecationWarning,
stacklevel=2,
)
pkg_name: str = _root_name or pkg_name
log: Logger
log = rlog = logger or logging.getLogger(_root_name)
log = rlog = logger or logging.getLogger(pkg_name)
# Implicitly introspect the caller's module-name whenever `name`
# if left as the null default.
#
# When the `pkg_name` is `in` in the `mod.__name__` we presume
# this instance can be created as a sub-`StackLevelAdapter` and
# that the intention is get free module-path tracing and
# filtering (well once we implement that) oriented around the
# py-module code hierarchy of the consuming project.
if (
pkg_name != _proj_name
and
name is None
and
mk_sublog
):
callstack: list[FrameInfo] = stack()
caller_fi: FrameInfo = callstack[1]
caller_mod: ModuleType = getmodule(caller_fi.frame)
if caller_mod:
# ?how is this `mod.__name__` defined?
# -> well by how the mod is imported..
# |_https://stackoverflow.com/a/15883682
mod_name: str = caller_mod.__name__
mod_pkg: str = caller_mod.__package__
log.info(
f'Generating sub-logger name,\n'
f'{mod_pkg}.{mod_name}\n'
)
# if pkg_name in caller_mod.__package__:
# from tractor.devx.debug import mk_pdb
# mk_pdb().set_trace()
if (
pkg_name
# and
# pkg_name in mod_name
):
name = mod_name
# XXX, lowlevel debuggin..
# if pkg_name != _proj_name:
# from tractor.devx.debug import mk_pdb
# mk_pdb().set_trace()
if (
name != _proj_name
and
name
# ^TODO? see caller_mod.__name__ as default above?
):
# NOTE: for handling for modules that use `get_logger(__name__)`
# we make the following stylistic choice:
# - always avoid duplicate project-package token
@ -329,21 +410,23 @@ def get_logger(
# since in python the {filename} is always this same
# module-file.
sub_name: None|str = None
rname: str = pkg_name
pkg_path: str = name
# ex. modden.runtime.progman
# -> rname='modden', _, sub_name='runtime.progman'
rname, _, sub_name = name.partition('.')
# -> rname='modden', _, pkg_path='runtime.progman'
if pkg_name in name:
rname, _, pkg_path = name.partition('.')
# ex. modden.runtime.progman
# -> pkgpath='runtime', _, leaf_mod='progman'
subpkg_path, _, leaf_mod = sub_name.rpartition('.')
subpkg_path, _, leaf_mod = pkg_path.rpartition('.')
# NOTE: special usage for passing `name=__name__`,
#
# - remove duplication of any root-pkg-name in the
# (sub/child-)logger name; i.e. never include the
# `_root_name` *twice* in the top-most-pkg-name/level
# `pkg_name` *twice* in the top-most-pkg-name/level
#
# -> this happens normally since it is added to `.getChild()`
# and as the name of its root-logger.
@ -353,35 +436,37 @@ def get_logger(
# only includes the first 2 sub-pkg name-tokens in the
# child-logger's name; the colored "pkg-namespace" header
# will then correctly show the same value as `name`.
if rname == _root_name:
sub_name = subpkg_path
if rname == pkg_name:
pkg_path = subpkg_path
# XXX, do some double-checks for duplication of,
# - root-pkg-name, already in root logger
# - leaf-module-name already in `{filename}` header-field
if _root_name in sub_name:
_duplicate, _, sub_name = sub_name.partition('.')
if pkg_name in pkg_path:
_duplicate, _, pkg_path = pkg_path.partition('.')
if _duplicate:
assert _duplicate == rname
log.warning(
# assert _duplicate == rname
_root_log.warning(
f'Duplicate pkg-name in sub-logger key?\n'
f'_root_name = {_root_name!r}\n'
f'sub_name = {sub_name!r}\n'
f'pkg_name = {pkg_name!r}\n'
f'pkg_path = {pkg_path!r}\n'
)
if leaf_mod in sub_name:
# XXX, for debuggin..
# import pdbp; pdbp.set_trace()
log.warning(
if (
leaf_mod
and
leaf_mod in pkg_path
):
_root_log.warning(
f'Duplicate leaf-module-name in sub-logger key?\n'
f'leaf_mod = {leaf_mod!r}\n'
f'sub_name = {sub_name!r}\n'
f'pkg_path = {pkg_path!r}\n'
)
if not sub_name:
if not pkg_path:
log = rlog
else:
log = rlog.getChild(sub_name)
elif mk_sublog:
log = rlog.getChild(pkg_path)
log.level = rlog.level
@ -396,8 +481,13 @@ def get_logger(
for name, val in CUSTOM_LEVELS.items():
logging.addLevelName(val, name)
# ensure customs levels exist as methods
assert getattr(logger, name.lower()), f'Logger does not define {name}'
# ensure our custom adapter levels exist as methods
assert getattr(
logger,
name.lower()
), (
f'Logger does not define {name}'
)
return logger
@ -471,4 +561,4 @@ def get_loglevel() -> str:
# global module logger for tractor itself
log: StackLevelAdapter = get_logger('tractor')
_root_log: StackLevelAdapter = get_logger('tractor')

View File

@ -337,9 +337,12 @@ def _run_asyncio_task(
'''
__tracebackhide__: bool = hide_tb
if not tractor.current_actor().is_infected_aio():
if not (actor := tractor.current_actor()).is_infected_aio():
raise RuntimeError(
"`infect_asyncio` mode is not enabled!?"
f'`infect_asyncio: bool` mode is not enabled ??\n'
f'Ensure you pass `ActorNursery.start_actor(infect_asyncio=True)`\n'
f'\n'
f'{actor}\n'
)
# ITC (inter task comms), these channel/queue names are mostly from

View File

@ -22,6 +22,10 @@ from __future__ import annotations
from contextlib import (
asynccontextmanager as acm,
)
import inspect
from types import (
TracebackType,
)
from typing import (
Type,
TYPE_CHECKING,
@ -63,6 +67,66 @@ def find_masked_excs(
return None
_mask_cases: dict[
Type[Exception], # masked exc type
dict[
int, # inner-frame index into `inspect.getinnerframes()`
# `FrameInfo.function/filename: str`s to match
tuple[str, str],
],
] = {
trio.WouldBlock: {
# `trio.Lock.acquire()` has a checkpoint inside the
# `WouldBlock`-no_wait path's handler..
-5: { # "5th frame up" from checkpoint
'filename': 'trio/_sync.py',
'function': 'acquire',
# 'lineno': 605, # matters?
},
}
}
def is_expected_masking_case(
cases: dict,
exc_ctx: Exception,
exc_match: BaseException,
) -> bool|inspect.FrameInfo:
'''
Determine whether the provided masked exception is from a known
bug/special/unintentional-`trio`-impl case which we do not wish
to unmask.
Return any guilty `inspect.FrameInfo` ow `False`.
'''
exc_tb: TracebackType = exc_match.__traceback__
if cases := _mask_cases.get(type(exc_ctx)):
inner: list[inspect.FrameInfo] = inspect.getinnerframes(exc_tb)
# from tractor.devx.debug import mk_pdb
# mk_pdb().set_trace()
for iframe, matchon in cases.items():
try:
masker_frame: inspect.FrameInfo = inner[iframe]
except IndexError:
continue
for field, in_field in matchon.items():
val = getattr(
masker_frame,
field,
)
if in_field not in val:
break
else:
return masker_frame
return False
# XXX, relevant discussion @ `trio`-core,
# https://github.com/python-trio/trio/issues/455
#
@ -197,6 +261,24 @@ async def maybe_raise_from_masking_exc(
raise_unmasked
):
if len(masked) < 2:
# don't unmask already known "special" cases..
if (
(cases := _mask_cases.get(type(exc_ctx)))
and
(masker_frame := is_expected_masking_case(
cases,
exc_ctx,
exc_match,
))
):
log.warning(
f'Ignoring already-known/non-ideally-valid masker code @\n'
f'{masker_frame}\n'
f'\n'
f'NOT raising {exc_ctx} from masker {exc_match!r}\n'
)
raise exc_match
raise exc_ctx from exc_match
# ??TODO, see above but, possibly unmasking sub-exc