Compare commits
17 Commits
a26fa1e743
...
69a52d6fb1
| Author | SHA1 | Date |
|---|---|---|
|
|
69a52d6fb1 | |
|
|
095d712443 | |
|
|
cb2974f405 | |
|
|
5e7dac2b90 | |
|
|
9e877e0666 | |
|
|
8aaaeb19ed | |
|
|
c5125e7955 | |
|
|
7bb80e85b0 | |
|
|
46004b2fc3 | |
|
|
0b63a73954 | |
|
|
8fb47f761a | |
|
|
ad37ebabb2 | |
|
|
5020266bd5 | |
|
|
d6a56d87bf | |
|
|
e8152b8534 | |
|
|
bb81c74353 | |
|
|
7eaf28479c |
|
|
@ -19,10 +19,8 @@
|
||||||
for tendiez.
|
for tendiez.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from piker.log import (
|
from ..log import get_logger
|
||||||
get_console_log,
|
|
||||||
get_logger,
|
|
||||||
)
|
|
||||||
from .calc import (
|
from .calc import (
|
||||||
iter_by_dt,
|
iter_by_dt,
|
||||||
)
|
)
|
||||||
|
|
@ -53,17 +51,7 @@ from ._allocate import (
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
# ?TODO, enable console on import
|
|
||||||
# [ ] necessary? or `open_brokerd_dialog()` doing it is sufficient?
|
|
||||||
#
|
|
||||||
# bc might as well enable whenev imported by
|
|
||||||
# other sub-sys code (namely `.clearing`).
|
|
||||||
get_console_log(
|
|
||||||
level='warning',
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO, the `as <samename>` style?
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'Account',
|
'Account',
|
||||||
'Allocator',
|
'Allocator',
|
||||||
|
|
|
||||||
|
|
@ -60,16 +60,12 @@ from ..clearing._messages import (
|
||||||
BrokerdPosition,
|
BrokerdPosition,
|
||||||
)
|
)
|
||||||
from piker.types import Struct
|
from piker.types import Struct
|
||||||
from piker.log import (
|
from piker.log import get_logger
|
||||||
get_logger,
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from piker.data._symcache import SymbologyCache
|
from piker.data._symcache import SymbologyCache
|
||||||
|
|
||||||
log = get_logger(
|
log = get_logger(__name__)
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Position(Struct):
|
class Position(Struct):
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@ CLI front end for trades ledger and position tracking management.
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
|
||||||
|
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from rich.markdown import Markdown
|
from rich.markdown import Markdown
|
||||||
import polars as pl
|
import polars as pl
|
||||||
|
|
@ -28,10 +29,7 @@ import tractor
|
||||||
import trio
|
import trio
|
||||||
import typer
|
import typer
|
||||||
|
|
||||||
from piker.log import (
|
from ..log import get_logger
|
||||||
get_console_log,
|
|
||||||
get_logger,
|
|
||||||
)
|
|
||||||
from ..service import (
|
from ..service import (
|
||||||
open_piker_runtime,
|
open_piker_runtime,
|
||||||
)
|
)
|
||||||
|
|
@ -47,7 +45,6 @@ from .calc import (
|
||||||
open_ledger_dfs,
|
open_ledger_dfs,
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
|
||||||
|
|
||||||
ledger = typer.Typer()
|
ledger = typer.Typer()
|
||||||
|
|
||||||
|
|
@ -82,10 +79,7 @@ def sync(
|
||||||
"-l",
|
"-l",
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
log = get_console_log(
|
log = get_logger(loglevel)
|
||||||
level=loglevel,
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
console = Console()
|
console = Console()
|
||||||
|
|
||||||
pair: tuple[str, str]
|
pair: tuple[str, str]
|
||||||
|
|
|
||||||
|
|
@ -25,16 +25,15 @@ from types import ModuleType
|
||||||
|
|
||||||
from tractor.trionics import maybe_open_context
|
from tractor.trionics import maybe_open_context
|
||||||
|
|
||||||
from piker.log import (
|
|
||||||
get_logger,
|
|
||||||
)
|
|
||||||
from ._util import (
|
from ._util import (
|
||||||
|
log,
|
||||||
BrokerError,
|
BrokerError,
|
||||||
SymbolNotFound,
|
SymbolNotFound,
|
||||||
NoData,
|
NoData,
|
||||||
DataUnavailable,
|
DataUnavailable,
|
||||||
DataThrottle,
|
DataThrottle,
|
||||||
resproc,
|
resproc,
|
||||||
|
get_logger,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__: list[str] = [
|
__all__: list[str] = [
|
||||||
|
|
@ -44,6 +43,7 @@ __all__: list[str] = [
|
||||||
'DataUnavailable',
|
'DataUnavailable',
|
||||||
'DataThrottle',
|
'DataThrottle',
|
||||||
'resproc',
|
'resproc',
|
||||||
|
'get_logger',
|
||||||
]
|
]
|
||||||
|
|
||||||
__brokers__: list[str] = [
|
__brokers__: list[str] = [
|
||||||
|
|
@ -65,10 +65,6 @@ __brokers__: list[str] = [
|
||||||
# bitso
|
# bitso
|
||||||
]
|
]
|
||||||
|
|
||||||
log = get_logger(
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_brokermod(brokername: str) -> ModuleType:
|
def get_brokermod(brokername: str) -> ModuleType:
|
||||||
'''
|
'''
|
||||||
|
|
|
||||||
|
|
@ -33,18 +33,12 @@ import exceptiongroup as eg
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from piker.log import (
|
|
||||||
get_logger,
|
|
||||||
get_console_log,
|
|
||||||
)
|
|
||||||
from . import _util
|
from . import _util
|
||||||
from . import get_brokermod
|
from . import get_brokermod
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ..data import _FeedsBus
|
from ..data import _FeedsBus
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
|
||||||
|
|
||||||
# `brokerd` enabled modules
|
# `brokerd` enabled modules
|
||||||
# TODO: move this def to the `.data` subpkg..
|
# TODO: move this def to the `.data` subpkg..
|
||||||
# NOTE: keeping this list as small as possible is part of our caps-sec
|
# NOTE: keeping this list as small as possible is part of our caps-sec
|
||||||
|
|
@ -65,7 +59,7 @@ _data_mods: str = [
|
||||||
async def _setup_persistent_brokerd(
|
async def _setup_persistent_brokerd(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
brokername: str,
|
brokername: str,
|
||||||
loglevel: str|None = None,
|
loglevel: str | None = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
|
@ -78,14 +72,13 @@ async def _setup_persistent_brokerd(
|
||||||
# since all hosted daemon tasks will reference this same
|
# since all hosted daemon tasks will reference this same
|
||||||
# log instance's (actor local) state and thus don't require
|
# log instance's (actor local) state and thus don't require
|
||||||
# any further (level) configuration on their own B)
|
# any further (level) configuration on their own B)
|
||||||
actor: tractor.Actor = tractor.current_actor()
|
log = _util.get_console_log(
|
||||||
tll: str = actor.loglevel
|
loglevel or tractor.current_actor().loglevel,
|
||||||
log = get_console_log(
|
|
||||||
level=loglevel or tll,
|
|
||||||
name=f'{_util.subsys}.{brokername}',
|
name=f'{_util.subsys}.{brokername}',
|
||||||
with_tractor_log=bool(tll),
|
|
||||||
)
|
)
|
||||||
assert log.name == _util.subsys
|
|
||||||
|
# set global for this actor to this new process-wide instance B)
|
||||||
|
_util.log = log
|
||||||
|
|
||||||
# further, set the log level on any broker broker specific
|
# further, set the log level on any broker broker specific
|
||||||
# logger instance.
|
# logger instance.
|
||||||
|
|
@ -104,7 +97,7 @@ async def _setup_persistent_brokerd(
|
||||||
# NOTE: see ep invocation details inside `.data.feed`.
|
# NOTE: see ep invocation details inside `.data.feed`.
|
||||||
try:
|
try:
|
||||||
async with (
|
async with (
|
||||||
# tractor.trionics.collapse_eg(),
|
tractor.trionics.collapse_eg(),
|
||||||
trio.open_nursery() as service_nursery
|
trio.open_nursery() as service_nursery
|
||||||
):
|
):
|
||||||
bus: _FeedsBus = feed.get_feed_bus(
|
bus: _FeedsBus = feed.get_feed_bus(
|
||||||
|
|
@ -200,6 +193,7 @@ def broker_init(
|
||||||
|
|
||||||
|
|
||||||
async def spawn_brokerd(
|
async def spawn_brokerd(
|
||||||
|
|
||||||
brokername: str,
|
brokername: str,
|
||||||
loglevel: str | None = None,
|
loglevel: str | None = None,
|
||||||
|
|
||||||
|
|
@ -207,10 +201,8 @@ async def spawn_brokerd(
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
|
|
||||||
log.info(
|
from piker.service._util import log # use service mngr log
|
||||||
f'Spawning broker-daemon,\n'
|
log.info(f'Spawning {brokername} broker daemon')
|
||||||
f'backend: {brokername!r}'
|
|
||||||
)
|
|
||||||
|
|
||||||
(
|
(
|
||||||
brokermode,
|
brokermode,
|
||||||
|
|
@ -257,7 +249,7 @@ async def spawn_brokerd(
|
||||||
async def maybe_spawn_brokerd(
|
async def maybe_spawn_brokerd(
|
||||||
|
|
||||||
brokername: str,
|
brokername: str,
|
||||||
loglevel: str|None = None,
|
loglevel: str | None = None,
|
||||||
|
|
||||||
**pikerd_kwargs,
|
**pikerd_kwargs,
|
||||||
|
|
||||||
|
|
@ -272,11 +264,6 @@ async def maybe_spawn_brokerd(
|
||||||
'''
|
'''
|
||||||
from piker.service import maybe_spawn_daemon
|
from piker.service import maybe_spawn_daemon
|
||||||
|
|
||||||
# if (
|
|
||||||
# loglevel != 'info'
|
|
||||||
# ):
|
|
||||||
# await tractor.pause()
|
|
||||||
|
|
||||||
async with maybe_spawn_daemon(
|
async with maybe_spawn_daemon(
|
||||||
|
|
||||||
f'brokerd.{brokername}',
|
f'brokerd.{brokername}',
|
||||||
|
|
|
||||||
|
|
@ -19,13 +19,15 @@ Handy cross-broker utils.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
# from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import httpx
|
import httpx
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from piker.log import (
|
from ..log import (
|
||||||
|
get_logger,
|
||||||
|
get_console_log,
|
||||||
colorize_json,
|
colorize_json,
|
||||||
)
|
)
|
||||||
subsys: str = 'piker.brokers'
|
subsys: str = 'piker.brokers'
|
||||||
|
|
@ -33,22 +35,12 @@ subsys: str = 'piker.brokers'
|
||||||
# NOTE: level should be reset by any actor that is spawned
|
# NOTE: level should be reset by any actor that is spawned
|
||||||
# as well as given a (more) explicit name/key such
|
# as well as given a (more) explicit name/key such
|
||||||
# as `piker.brokers.binance` matching the subpkg.
|
# as `piker.brokers.binance` matching the subpkg.
|
||||||
# log = get_logger(subsys)
|
log = get_logger(subsys)
|
||||||
|
|
||||||
# ?TODO?? we could use this approach, but we need to be able
|
get_console_log = partial(
|
||||||
# to pass multiple `name=` values so for example we can include the
|
get_console_log,
|
||||||
# emissions in `.accounting._pos` and others!
|
name=subsys,
|
||||||
# [ ] maybe we could do the `log = get_logger()` above,
|
)
|
||||||
# then cycle through the list of subsys mods we depend on
|
|
||||||
# and then get all their loggers and pass them to
|
|
||||||
# `get_console_log(logger=)`??
|
|
||||||
# [ ] OR just write THIS `get_console_log()` as a hook which does
|
|
||||||
# that based on who calls it?.. i dunno
|
|
||||||
#
|
|
||||||
# get_console_log = partial(
|
|
||||||
# get_console_log,
|
|
||||||
# name=subsys,
|
|
||||||
# )
|
|
||||||
|
|
||||||
|
|
||||||
class BrokerError(Exception):
|
class BrokerError(Exception):
|
||||||
|
|
|
||||||
|
|
@ -37,9 +37,8 @@ import trio
|
||||||
from piker.accounting import (
|
from piker.accounting import (
|
||||||
Asset,
|
Asset,
|
||||||
)
|
)
|
||||||
from piker.log import (
|
from piker.brokers._util import (
|
||||||
get_logger,
|
get_logger,
|
||||||
get_console_log,
|
|
||||||
)
|
)
|
||||||
from piker.data._web_bs import (
|
from piker.data._web_bs import (
|
||||||
open_autorecon_ws,
|
open_autorecon_ws,
|
||||||
|
|
@ -70,9 +69,7 @@ from .venues import (
|
||||||
)
|
)
|
||||||
from .api import Client
|
from .api import Client
|
||||||
|
|
||||||
log = get_logger(
|
log = get_logger('piker.brokers.binance')
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Fee schedule template, mostly for paper engine fees modelling.
|
# Fee schedule template, mostly for paper engine fees modelling.
|
||||||
|
|
@ -248,16 +245,9 @@ async def handle_order_requests(
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def open_trade_dialog(
|
async def open_trade_dialog(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
loglevel: str = 'warning',
|
|
||||||
|
|
||||||
) -> AsyncIterator[dict[str, Any]]:
|
) -> AsyncIterator[dict[str, Any]]:
|
||||||
|
|
||||||
# enable piker.clearing console log for *this* `brokerd` subactor
|
|
||||||
get_console_log(
|
|
||||||
level=loglevel,
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: how do we set this from the EMS such that
|
# TODO: how do we set this from the EMS such that
|
||||||
# positions are loaded from the correct venue on the user
|
# positions are loaded from the correct venue on the user
|
||||||
# stream at startup? (that is in an attempt to support both
|
# stream at startup? (that is in an attempt to support both
|
||||||
|
|
|
||||||
|
|
@ -64,9 +64,9 @@ from piker.data._web_bs import (
|
||||||
open_autorecon_ws,
|
open_autorecon_ws,
|
||||||
NoBsWs,
|
NoBsWs,
|
||||||
)
|
)
|
||||||
from piker.log import get_logger
|
|
||||||
from piker.brokers._util import (
|
from piker.brokers._util import (
|
||||||
DataUnavailable,
|
DataUnavailable,
|
||||||
|
get_logger,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .api import (
|
from .api import (
|
||||||
|
|
@ -78,7 +78,7 @@ from .venues import (
|
||||||
get_api_eps,
|
get_api_eps,
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
log = get_logger('piker.brokers.binance')
|
||||||
|
|
||||||
|
|
||||||
class L1(Struct):
|
class L1(Struct):
|
||||||
|
|
@ -275,15 +275,9 @@ async def open_history_client(
|
||||||
f'{times}'
|
f'{times}'
|
||||||
)
|
)
|
||||||
|
|
||||||
# XXX, debug any case where the latest 1m bar we get is
|
|
||||||
# already another "sample's-step-old"..
|
|
||||||
if end_dt is None:
|
if end_dt is None:
|
||||||
inow: int = round(time.time())
|
inow: int = round(time.time())
|
||||||
if (
|
if (inow - times[-1]) > 60:
|
||||||
_time_step := (inow - times[-1])
|
|
||||||
>
|
|
||||||
timeframe * 2
|
|
||||||
):
|
|
||||||
await tractor.pause()
|
await tractor.pause()
|
||||||
|
|
||||||
start_dt = from_timestamp(times[0])
|
start_dt = from_timestamp(times[0])
|
||||||
|
|
|
||||||
|
|
@ -27,12 +27,14 @@ import click
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from piker.cli import cli
|
from ..cli import cli
|
||||||
from piker import watchlists as wl
|
from .. import watchlists as wl
|
||||||
from piker.log import (
|
from ..log import (
|
||||||
colorize_json,
|
colorize_json,
|
||||||
|
)
|
||||||
|
from ._util import (
|
||||||
|
log,
|
||||||
get_console_log,
|
get_console_log,
|
||||||
get_logger,
|
|
||||||
)
|
)
|
||||||
from ..service import (
|
from ..service import (
|
||||||
maybe_spawn_brokerd,
|
maybe_spawn_brokerd,
|
||||||
|
|
@ -43,15 +45,12 @@ from ..brokers import (
|
||||||
get_brokermod,
|
get_brokermod,
|
||||||
data,
|
data,
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
DEFAULT_BROKER = 'binance'
|
DEFAULT_BROKER = 'binance'
|
||||||
|
|
||||||
_config_dir = click.get_app_dir('piker')
|
_config_dir = click.get_app_dir('piker')
|
||||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||||
|
|
||||||
|
|
||||||
OK = '\033[92m'
|
OK = '\033[92m'
|
||||||
WARNING = '\033[93m'
|
WARNING = '\033[93m'
|
||||||
FAIL = '\033[91m'
|
FAIL = '\033[91m'
|
||||||
|
|
@ -346,10 +345,7 @@ def contracts(ctx, loglevel, broker, symbol, ids):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
brokermod = get_brokermod(broker)
|
brokermod = get_brokermod(broker)
|
||||||
get_console_log(
|
get_console_log(loglevel)
|
||||||
level=loglevel,
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
contracts = trio.run(partial(core.contracts, brokermod, symbol))
|
contracts = trio.run(partial(core.contracts, brokermod, symbol))
|
||||||
if not ids:
|
if not ids:
|
||||||
|
|
@ -481,12 +477,11 @@ def search(
|
||||||
# the `piker --pdb` XD ..
|
# the `piker --pdb` XD ..
|
||||||
# -[ ] pull from the parent click ctx's values..dumdum
|
# -[ ] pull from the parent click ctx's values..dumdum
|
||||||
# assert pdb
|
# assert pdb
|
||||||
loglevel: str = config['loglevel']
|
|
||||||
|
|
||||||
# define tractor entrypoint
|
# define tractor entrypoint
|
||||||
async def main(func):
|
async def main(func):
|
||||||
async with maybe_open_pikerd(
|
async with maybe_open_pikerd(
|
||||||
loglevel=loglevel,
|
loglevel=config['loglevel'],
|
||||||
debug_mode=pdb,
|
debug_mode=pdb,
|
||||||
):
|
):
|
||||||
return await func()
|
return await func()
|
||||||
|
|
@ -499,7 +494,6 @@ def search(
|
||||||
core.symbol_search,
|
core.symbol_search,
|
||||||
brokermods,
|
brokermods,
|
||||||
pattern,
|
pattern,
|
||||||
loglevel=loglevel,
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -28,14 +28,12 @@ from typing import (
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from piker.log import get_logger
|
from ._util import log
|
||||||
from . import get_brokermod
|
from . import get_brokermod
|
||||||
from ..service import maybe_spawn_brokerd
|
from ..service import maybe_spawn_brokerd
|
||||||
from . import open_cached_client
|
from . import open_cached_client
|
||||||
from ..accounting import MktPair
|
from ..accounting import MktPair
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def api(brokername: str, methname: str, **kwargs) -> dict:
|
async def api(brokername: str, methname: str, **kwargs) -> dict:
|
||||||
'''
|
'''
|
||||||
|
|
@ -149,7 +147,6 @@ async def search_w_brokerd(
|
||||||
async def symbol_search(
|
async def symbol_search(
|
||||||
brokermods: list[ModuleType],
|
brokermods: list[ModuleType],
|
||||||
pattern: str,
|
pattern: str,
|
||||||
loglevel: str = 'warning',
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> dict[str, dict[str, dict[str, Any]]]:
|
) -> dict[str, dict[str, dict[str, Any]]]:
|
||||||
|
|
@ -179,7 +176,6 @@ async def symbol_search(
|
||||||
'_infect_asyncio',
|
'_infect_asyncio',
|
||||||
False,
|
False,
|
||||||
),
|
),
|
||||||
loglevel=loglevel
|
|
||||||
) as portal:
|
) as portal:
|
||||||
|
|
||||||
results.append((
|
results.append((
|
||||||
|
|
|
||||||
|
|
@ -41,15 +41,12 @@ import tractor
|
||||||
from tractor.experimental import msgpub
|
from tractor.experimental import msgpub
|
||||||
from async_generator import asynccontextmanager
|
from async_generator import asynccontextmanager
|
||||||
|
|
||||||
from piker.log import(
|
from ._util import (
|
||||||
get_logger,
|
log,
|
||||||
get_console_log,
|
get_console_log,
|
||||||
)
|
)
|
||||||
from . import get_brokermod
|
from . import get_brokermod
|
||||||
|
|
||||||
log = get_logger(
|
|
||||||
name='piker.brokers.binance',
|
|
||||||
)
|
|
||||||
|
|
||||||
async def wait_for_network(
|
async def wait_for_network(
|
||||||
net_func: Callable,
|
net_func: Callable,
|
||||||
|
|
@ -246,10 +243,7 @@ async def start_quote_stream(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# XXX: why do we need this again?
|
# XXX: why do we need this again?
|
||||||
get_console_log(
|
get_console_log(tractor.current_actor().loglevel)
|
||||||
level=tractor.current_actor().loglevel,
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
# pull global vars from local actor
|
# pull global vars from local actor
|
||||||
symbols = list(symbols)
|
symbols = list(symbols)
|
||||||
|
|
|
||||||
|
|
@ -34,13 +34,13 @@ import subprocess
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from piker.log import get_logger
|
from piker.brokers._util import get_logger
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .api import Client
|
from .api import Client
|
||||||
import i3ipc
|
import i3ipc
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
log = get_logger('piker.brokers.ib')
|
||||||
|
|
||||||
_reset_tech: Literal[
|
_reset_tech: Literal[
|
||||||
'vnc',
|
'vnc',
|
||||||
|
|
@ -250,9 +250,7 @@ async def vnc_click_hack(
|
||||||
'connection': 'r'
|
'connection': 'r'
|
||||||
}[reset_type]
|
}[reset_type]
|
||||||
|
|
||||||
with tractor.devx.open_crash_handler(
|
with tractor.devx.open_crash_handler():
|
||||||
ignore={TimeoutError,},
|
|
||||||
):
|
|
||||||
client = await AsyncVNCClient.connect(
|
client = await AsyncVNCClient.connect(
|
||||||
VNCConfig(
|
VNCConfig(
|
||||||
host=host,
|
host=host,
|
||||||
|
|
@ -333,14 +331,7 @@ def i3ipc_xdotool_manual_click_hack() -> None:
|
||||||
|
|
||||||
'''
|
'''
|
||||||
focussed, matches = i3ipc_fin_wins_titled()
|
focussed, matches = i3ipc_fin_wins_titled()
|
||||||
try:
|
|
||||||
orig_win_id = focussed.window
|
orig_win_id = focussed.window
|
||||||
except AttributeError:
|
|
||||||
# XXX if .window cucks we prolly aren't intending to
|
|
||||||
# use this and/or just woke up from suspend..
|
|
||||||
log.exception('xdotool invalid usage ya ??\n')
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for name, con in matches:
|
for name, con in matches:
|
||||||
print(f'Resetting data feed for {name}')
|
print(f'Resetting data feed for {name}')
|
||||||
|
|
|
||||||
|
|
@ -1187,7 +1187,7 @@ async def load_aio_clients(
|
||||||
# the API TCP in `ib_insync` connection can be flaky af so instead
|
# the API TCP in `ib_insync` connection can be flaky af so instead
|
||||||
# retry a few times to get the client going..
|
# retry a few times to get the client going..
|
||||||
connect_retries: int = 3,
|
connect_retries: int = 3,
|
||||||
connect_timeout: float = 30, # in case a remote-host
|
connect_timeout: float = 10,
|
||||||
disconnect_on_exit: bool = True,
|
disconnect_on_exit: bool = True,
|
||||||
|
|
||||||
) -> dict[str, Client]:
|
) -> dict[str, Client]:
|
||||||
|
|
|
||||||
|
|
@ -50,10 +50,6 @@ from ib_insync.objects import (
|
||||||
)
|
)
|
||||||
|
|
||||||
from piker import config
|
from piker import config
|
||||||
from piker.log import (
|
|
||||||
get_logger,
|
|
||||||
get_console_log,
|
|
||||||
)
|
|
||||||
from piker.types import Struct
|
from piker.types import Struct
|
||||||
from piker.accounting import (
|
from piker.accounting import (
|
||||||
Position,
|
Position,
|
||||||
|
|
@ -81,6 +77,7 @@ from piker.clearing._messages import (
|
||||||
BrokerdFill,
|
BrokerdFill,
|
||||||
BrokerdError,
|
BrokerdError,
|
||||||
)
|
)
|
||||||
|
from ._util import log
|
||||||
from .api import (
|
from .api import (
|
||||||
_accounts2clients,
|
_accounts2clients,
|
||||||
get_config,
|
get_config,
|
||||||
|
|
@ -98,10 +95,6 @@ from .ledger import (
|
||||||
update_ledger_from_api_trades,
|
update_ledger_from_api_trades,
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def pack_position(
|
def pack_position(
|
||||||
pos: IbPosition,
|
pos: IbPosition,
|
||||||
|
|
@ -543,15 +536,9 @@ class IbAcnt(Struct):
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def open_trade_dialog(
|
async def open_trade_dialog(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
loglevel: str = 'warning',
|
|
||||||
|
|
||||||
) -> AsyncIterator[dict[str, Any]]:
|
) -> AsyncIterator[dict[str, Any]]:
|
||||||
|
|
||||||
get_console_log(
|
|
||||||
level=loglevel,
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
# task local msg dialog tracking
|
# task local msg dialog tracking
|
||||||
flows = OrderDialogs()
|
flows = OrderDialogs()
|
||||||
accounts_def = config.load_accounts(['ib'])
|
accounts_def = config.load_accounts(['ib'])
|
||||||
|
|
|
||||||
|
|
@ -56,11 +56,11 @@ from piker.brokers._util import (
|
||||||
NoData,
|
NoData,
|
||||||
DataUnavailable,
|
DataUnavailable,
|
||||||
)
|
)
|
||||||
from piker.log import get_logger
|
|
||||||
from .api import (
|
from .api import (
|
||||||
# _adhoc_futes_set,
|
# _adhoc_futes_set,
|
||||||
Client,
|
Client,
|
||||||
con2fqme,
|
con2fqme,
|
||||||
|
log,
|
||||||
load_aio_clients,
|
load_aio_clients,
|
||||||
MethodProxy,
|
MethodProxy,
|
||||||
open_client_proxies,
|
open_client_proxies,
|
||||||
|
|
@ -78,9 +78,6 @@ from .symbols import get_mkt_info
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from trio._core._run import Task
|
from trio._core._run import Task
|
||||||
|
|
||||||
log = get_logger(
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
# XXX NOTE: See available types table docs:
|
# XXX NOTE: See available types table docs:
|
||||||
# https://interactivebrokers.github.io/tws-api/tick_types.html
|
# https://interactivebrokers.github.io/tws-api/tick_types.html
|
||||||
|
|
@ -181,8 +178,8 @@ async def open_history_client(
|
||||||
|
|
||||||
async def get_hist(
|
async def get_hist(
|
||||||
timeframe: float,
|
timeframe: float,
|
||||||
end_dt: datetime|None = None,
|
end_dt: datetime | None = None,
|
||||||
start_dt: datetime|None = None,
|
start_dt: datetime | None = None,
|
||||||
|
|
||||||
) -> tuple[np.ndarray, str]:
|
) -> tuple[np.ndarray, str]:
|
||||||
|
|
||||||
|
|
@ -265,38 +262,7 @@ async def open_history_client(
|
||||||
vlm = bars_array['volume']
|
vlm = bars_array['volume']
|
||||||
vlm[vlm < 0] = 0
|
vlm[vlm < 0] = 0
|
||||||
|
|
||||||
# XXX, if a start-limit was passed ensure we only
|
return bars_array, first_dt, last_dt
|
||||||
# return history that far back!
|
|
||||||
if (
|
|
||||||
start_dt
|
|
||||||
and
|
|
||||||
first_dt < start_dt
|
|
||||||
):
|
|
||||||
trimmed_bars = bars_array[
|
|
||||||
bars_array['time'] >= start_dt.timestamp()
|
|
||||||
]
|
|
||||||
if (
|
|
||||||
trimmed_first_dt := from_timestamp(trimmed_bars['time'][0])
|
|
||||||
!=
|
|
||||||
start_dt
|
|
||||||
):
|
|
||||||
# TODO! rm this once we're more confident it never hits!
|
|
||||||
breakpoint()
|
|
||||||
raise RuntimeError(
|
|
||||||
f'OHLC-bars array start is gt `start_dt` limit !!\n'
|
|
||||||
f'start_dt: {start_dt}\n'
|
|
||||||
f'first_dt: {first_dt}\n'
|
|
||||||
f'trimmed_first_dt: {trimmed_first_dt}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
# XXX, overwrite with start_dt-limited frame
|
|
||||||
bars_array = trimmed_bars
|
|
||||||
|
|
||||||
return (
|
|
||||||
bars_array,
|
|
||||||
first_dt,
|
|
||||||
last_dt,
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: it seems like we can do async queries for ohlc
|
# TODO: it seems like we can do async queries for ohlc
|
||||||
# but getting the order right still isn't working and I'm not
|
# but getting the order right still isn't working and I'm not
|
||||||
|
|
@ -431,7 +397,7 @@ async def get_bars(
|
||||||
|
|
||||||
# blank to start which tells ib to look up the latest datum
|
# blank to start which tells ib to look up the latest datum
|
||||||
end_dt: str = '',
|
end_dt: str = '',
|
||||||
start_dt: str|None = '',
|
start_dt: str | None = '',
|
||||||
|
|
||||||
# TODO: make this more dynamic based on measured frame rx latency?
|
# TODO: make this more dynamic based on measured frame rx latency?
|
||||||
# how long before we trigger a feed reset (seconds)
|
# how long before we trigger a feed reset (seconds)
|
||||||
|
|
@ -485,8 +451,6 @@ async def get_bars(
|
||||||
dt_duration,
|
dt_duration,
|
||||||
) = await proxy.bars(
|
) = await proxy.bars(
|
||||||
fqme=fqme,
|
fqme=fqme,
|
||||||
# XXX TODO! lol we're not using this..
|
|
||||||
# start_dt=start_dt,
|
|
||||||
end_dt=end_dt,
|
end_dt=end_dt,
|
||||||
sample_period_s=timeframe,
|
sample_period_s=timeframe,
|
||||||
|
|
||||||
|
|
@ -1118,7 +1082,6 @@ async def stream_quotes(
|
||||||
|
|
||||||
con: Contract = details.contract
|
con: Contract = details.contract
|
||||||
first_ticker: Ticker|None = None
|
first_ticker: Ticker|None = None
|
||||||
first_quote: dict[str, Any] = {}
|
|
||||||
|
|
||||||
timeout: float = 1.6
|
timeout: float = 1.6
|
||||||
with trio.move_on_after(timeout) as quote_cs:
|
with trio.move_on_after(timeout) as quote_cs:
|
||||||
|
|
@ -1171,14 +1134,15 @@ async def stream_quotes(
|
||||||
first_quote,
|
first_quote,
|
||||||
))
|
))
|
||||||
|
|
||||||
|
# it's not really live but this will unblock
|
||||||
|
# the brokerd feed task to tell the ui to update?
|
||||||
|
feed_is_live.set()
|
||||||
|
|
||||||
# block and let data history backfill code run.
|
# block and let data history backfill code run.
|
||||||
# XXX obvi given the venue is closed, we never expect feed
|
# XXX obvi given the venue is closed, we never expect feed
|
||||||
# to come up; a taskc should be the only way to
|
# to come up; a taskc should be the only way to
|
||||||
# terminate this task.
|
# terminate this task.
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
#
|
|
||||||
# ^^XXX^^TODO! INSTEAD impl a `trio.sleep()` for the
|
|
||||||
# duration until the venue opens!!
|
|
||||||
|
|
||||||
# ?TODO, we could instead spawn a task that waits on a feed
|
# ?TODO, we could instead spawn a task that waits on a feed
|
||||||
# to start and let it wait indefinitely..instead of this
|
# to start and let it wait indefinitely..instead of this
|
||||||
|
|
@ -1202,9 +1166,6 @@ async def stream_quotes(
|
||||||
'Rxed init quote:\n'
|
'Rxed init quote:\n'
|
||||||
f'{pformat(first_quote)}'
|
f'{pformat(first_quote)}'
|
||||||
)
|
)
|
||||||
# signal `.data.feed` layer that mkt quotes are LIVE
|
|
||||||
feed_is_live.set()
|
|
||||||
|
|
||||||
cs: trio.CancelScope|None = None
|
cs: trio.CancelScope|None = None
|
||||||
startup: bool = True
|
startup: bool = True
|
||||||
iter_quotes: trio.abc.Channel
|
iter_quotes: trio.abc.Channel
|
||||||
|
|
@ -1252,12 +1213,55 @@ async def stream_quotes(
|
||||||
tn.start_soon(reset_on_feed)
|
tn.start_soon(reset_on_feed)
|
||||||
|
|
||||||
async with aclosing(iter_quotes):
|
async with aclosing(iter_quotes):
|
||||||
|
# if syminfo.get('no_vlm', False):
|
||||||
|
if not init_msg.shm_write_opts['has_vlm']:
|
||||||
|
|
||||||
|
# generally speaking these feeds don't
|
||||||
|
# include vlm data.
|
||||||
|
atype: str = mkt.dst.atype
|
||||||
|
log.info(
|
||||||
|
f'No-vlm {mkt.fqme}@{atype}, skipping quote poll'
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# wait for real volume on feed (trading might be
|
||||||
|
# closed)
|
||||||
|
while True:
|
||||||
|
ticker = await iter_quotes.receive()
|
||||||
|
|
||||||
|
# for a real volume contract we rait for
|
||||||
|
# the first "real" trade to take place
|
||||||
|
if (
|
||||||
|
# not calc_price
|
||||||
|
# and not ticker.rtTime
|
||||||
|
False
|
||||||
|
# not ticker.rtTime
|
||||||
|
):
|
||||||
|
# spin consuming tickers until we
|
||||||
|
# get a real market datum
|
||||||
|
log.debug(f"New unsent ticker: {ticker}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
else:
|
||||||
|
log.debug("Received first volume tick")
|
||||||
|
# ugh, clear ticks since we've
|
||||||
|
# consumed them (ahem, ib_insync is
|
||||||
|
# truly stateful trash)
|
||||||
|
# ticker.ticks = []
|
||||||
|
|
||||||
|
# XXX: this works because we don't use
|
||||||
|
# ``aclosing()`` above?
|
||||||
|
break
|
||||||
|
|
||||||
|
quote = normalize(ticker)
|
||||||
|
log.debug(f"First ticker received {quote}")
|
||||||
|
|
||||||
# tell data-layer spawner-caller that live
|
# tell data-layer spawner-caller that live
|
||||||
# quotes are now active desptie not having
|
# quotes are now active desptie not having
|
||||||
# necessarily received a first vlm/clearing
|
# necessarily received a first vlm/clearing
|
||||||
# tick.
|
# tick.
|
||||||
ticker = await iter_quotes.receive()
|
ticker = await iter_quotes.receive()
|
||||||
quote = normalize(ticker)
|
feed_is_live.set()
|
||||||
fqme: str = quote['fqme']
|
fqme: str = quote['fqme']
|
||||||
await send_chan.send({fqme: quote})
|
await send_chan.send({fqme: quote})
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -44,7 +44,6 @@ from ib_insync import (
|
||||||
CommissionReport,
|
CommissionReport,
|
||||||
)
|
)
|
||||||
|
|
||||||
from piker.log import get_logger
|
|
||||||
from piker.types import Struct
|
from piker.types import Struct
|
||||||
from piker.data import (
|
from piker.data import (
|
||||||
SymbologyCache,
|
SymbologyCache,
|
||||||
|
|
@ -58,6 +57,7 @@ from piker.accounting import (
|
||||||
iter_by_dt,
|
iter_by_dt,
|
||||||
)
|
)
|
||||||
from ._flex_reports import parse_flex_dt
|
from ._flex_reports import parse_flex_dt
|
||||||
|
from ._util import log
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .api import (
|
from .api import (
|
||||||
|
|
@ -65,9 +65,6 @@ if TYPE_CHECKING:
|
||||||
MethodProxy,
|
MethodProxy,
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
tx_sort: Callable = partial(
|
tx_sort: Callable = partial(
|
||||||
iter_by_dt,
|
iter_by_dt,
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,10 @@ from piker.accounting import (
|
||||||
from piker._cacheables import (
|
from piker._cacheables import (
|
||||||
async_lifo_cache,
|
async_lifo_cache,
|
||||||
)
|
)
|
||||||
from piker.log import get_logger
|
|
||||||
|
from ._util import (
|
||||||
|
log,
|
||||||
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .api import (
|
from .api import (
|
||||||
|
|
@ -50,10 +53,6 @@ if TYPE_CHECKING:
|
||||||
Client,
|
Client,
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
_futes_venues = (
|
_futes_venues = (
|
||||||
'GLOBEX',
|
'GLOBEX',
|
||||||
'NYMEX',
|
'NYMEX',
|
||||||
|
|
|
||||||
|
|
@ -62,12 +62,9 @@ from piker.clearing._messages import (
|
||||||
from piker.brokers import (
|
from piker.brokers import (
|
||||||
open_cached_client,
|
open_cached_client,
|
||||||
)
|
)
|
||||||
from piker.log import (
|
|
||||||
get_console_log,
|
|
||||||
get_logger,
|
|
||||||
)
|
|
||||||
from piker.data import open_symcache
|
from piker.data import open_symcache
|
||||||
from .api import (
|
from .api import (
|
||||||
|
log,
|
||||||
Client,
|
Client,
|
||||||
BrokerError,
|
BrokerError,
|
||||||
)
|
)
|
||||||
|
|
@ -81,8 +78,6 @@ from .ledger import (
|
||||||
verify_balances,
|
verify_balances,
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
|
||||||
|
|
||||||
MsgUnion = Union[
|
MsgUnion = Union[
|
||||||
BrokerdCancel,
|
BrokerdCancel,
|
||||||
BrokerdError,
|
BrokerdError,
|
||||||
|
|
@ -436,15 +431,9 @@ def trades2pps(
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def open_trade_dialog(
|
async def open_trade_dialog(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
loglevel: str = 'warning',
|
|
||||||
|
|
||||||
) -> AsyncIterator[dict[str, Any]]:
|
) -> AsyncIterator[dict[str, Any]]:
|
||||||
|
|
||||||
get_console_log(
|
|
||||||
level=loglevel,
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
# TODO: maybe bind these together and deliver
|
# TODO: maybe bind these together and deliver
|
||||||
# a tuple from `.open_cached_client()`?
|
# a tuple from `.open_cached_client()`?
|
||||||
|
|
|
||||||
|
|
@ -50,19 +50,13 @@ from . import open_cached_client
|
||||||
from piker._cacheables import async_lifo_cache
|
from piker._cacheables import async_lifo_cache
|
||||||
from .. import config
|
from .. import config
|
||||||
from ._util import resproc, BrokerError, SymbolNotFound
|
from ._util import resproc, BrokerError, SymbolNotFound
|
||||||
from piker.log import (
|
from ..log import (
|
||||||
colorize_json,
|
colorize_json,
|
||||||
|
)
|
||||||
|
from ._util import (
|
||||||
|
log,
|
||||||
get_console_log,
|
get_console_log,
|
||||||
)
|
)
|
||||||
from piker.log import (
|
|
||||||
get_logger,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_use_practice_account = False
|
_use_practice_account = False
|
||||||
_refresh_token_ep = 'https://{}login.questrade.com/oauth2/'
|
_refresh_token_ep = 'https://{}login.questrade.com/oauth2/'
|
||||||
|
|
@ -1211,10 +1205,7 @@ async def stream_quotes(
|
||||||
# feed_type: str = 'stock',
|
# feed_type: str = 'stock',
|
||||||
) -> AsyncGenerator[str, Dict[str, Any]]:
|
) -> AsyncGenerator[str, Dict[str, Any]]:
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
get_console_log(
|
get_console_log(loglevel)
|
||||||
level=loglevel,
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
async with open_cached_client('questrade') as client:
|
async with open_cached_client('questrade') as client:
|
||||||
if feed_type == 'stock':
|
if feed_type == 'stock':
|
||||||
|
|
|
||||||
|
|
@ -30,16 +30,9 @@ import asks
|
||||||
from ._util import (
|
from ._util import (
|
||||||
resproc,
|
resproc,
|
||||||
BrokerError,
|
BrokerError,
|
||||||
|
log,
|
||||||
)
|
)
|
||||||
from piker.calc import percent_change
|
from ..calc import percent_change
|
||||||
from piker.log import (
|
|
||||||
get_logger,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_service_ep = 'https://api.robinhood.com'
|
_service_ep = 'https://api.robinhood.com'
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -215,7 +215,7 @@ async def relay_orders_from_sync_code(
|
||||||
async def open_ems(
|
async def open_ems(
|
||||||
fqme: str,
|
fqme: str,
|
||||||
mode: str = 'live',
|
mode: str = 'live',
|
||||||
loglevel: str = 'warning',
|
loglevel: str = 'error',
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
OrderClient, # client
|
OrderClient, # client
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,6 @@ from tractor import trionics
|
||||||
from ._util import (
|
from ._util import (
|
||||||
log, # sub-sys logger
|
log, # sub-sys logger
|
||||||
get_console_log,
|
get_console_log,
|
||||||
subsys,
|
|
||||||
)
|
)
|
||||||
from ..accounting._mktinfo import (
|
from ..accounting._mktinfo import (
|
||||||
unpack_fqme,
|
unpack_fqme,
|
||||||
|
|
@ -137,7 +136,7 @@ class DarkBook(Struct):
|
||||||
tuple[
|
tuple[
|
||||||
Callable[[float], bool], # predicate
|
Callable[[float], bool], # predicate
|
||||||
tuple[str, ...], # tickfilter
|
tuple[str, ...], # tickfilter
|
||||||
dict|Order, # cmd / msg type
|
dict | Order, # cmd / msg type
|
||||||
|
|
||||||
# live submission constraint parameters
|
# live submission constraint parameters
|
||||||
float, # percent_away max price diff
|
float, # percent_away max price diff
|
||||||
|
|
@ -279,7 +278,7 @@ async def clear_dark_triggers(
|
||||||
|
|
||||||
# remove exec-condition from set
|
# remove exec-condition from set
|
||||||
log.info(f'Removing trigger for {oid}')
|
log.info(f'Removing trigger for {oid}')
|
||||||
trigger: tuple|None = execs.pop(oid, None)
|
trigger: tuple | None = execs.pop(oid, None)
|
||||||
if not trigger:
|
if not trigger:
|
||||||
log.warning(
|
log.warning(
|
||||||
f'trigger for {oid} was already removed!?'
|
f'trigger for {oid} was already removed!?'
|
||||||
|
|
@ -337,8 +336,8 @@ async def open_brokerd_dialog(
|
||||||
brokermod: ModuleType,
|
brokermod: ModuleType,
|
||||||
portal: tractor.Portal,
|
portal: tractor.Portal,
|
||||||
exec_mode: str,
|
exec_mode: str,
|
||||||
fqme: str|None = None,
|
fqme: str | None = None,
|
||||||
loglevel: str|None = None,
|
loglevel: str | None = None,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
tractor.MsgStream,
|
tractor.MsgStream,
|
||||||
|
|
@ -352,21 +351,9 @@ async def open_brokerd_dialog(
|
||||||
broker backend, configuration, or client code usage.
|
broker backend, configuration, or client code usage.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
get_console_log(
|
|
||||||
level=loglevel,
|
|
||||||
name='clearing',
|
|
||||||
)
|
|
||||||
# enable `.accounting` console since normally used by
|
|
||||||
# each `brokerd`.
|
|
||||||
get_console_log(
|
|
||||||
level=loglevel,
|
|
||||||
name='piker.accounting',
|
|
||||||
)
|
|
||||||
broker: str = brokermod.name
|
broker: str = brokermod.name
|
||||||
|
|
||||||
def mk_paper_ep(
|
def mk_paper_ep():
|
||||||
loglevel: str,
|
|
||||||
):
|
|
||||||
from . import _paper_engine as paper_mod
|
from . import _paper_engine as paper_mod
|
||||||
|
|
||||||
nonlocal brokermod, exec_mode
|
nonlocal brokermod, exec_mode
|
||||||
|
|
@ -418,21 +405,17 @@ async def open_brokerd_dialog(
|
||||||
|
|
||||||
if (
|
if (
|
||||||
trades_endpoint is not None
|
trades_endpoint is not None
|
||||||
or
|
or exec_mode != 'paper'
|
||||||
exec_mode != 'paper'
|
|
||||||
):
|
):
|
||||||
# open live brokerd trades endpoint
|
# open live brokerd trades endpoint
|
||||||
open_trades_endpoint = portal.open_context(
|
open_trades_endpoint = portal.open_context(
|
||||||
trades_endpoint,
|
trades_endpoint,
|
||||||
loglevel=loglevel,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def maybe_open_paper_ep():
|
async def maybe_open_paper_ep():
|
||||||
if exec_mode == 'paper':
|
if exec_mode == 'paper':
|
||||||
async with mk_paper_ep(
|
async with mk_paper_ep() as msg:
|
||||||
loglevel=loglevel,
|
|
||||||
) as msg:
|
|
||||||
yield msg
|
yield msg
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
@ -443,9 +426,7 @@ async def open_brokerd_dialog(
|
||||||
# runtime indication that the backend can't support live
|
# runtime indication that the backend can't support live
|
||||||
# order ctrl yet, so boot the paperboi B0
|
# order ctrl yet, so boot the paperboi B0
|
||||||
if first == 'paper':
|
if first == 'paper':
|
||||||
async with mk_paper_ep(
|
async with mk_paper_ep() as msg:
|
||||||
loglevel=loglevel,
|
|
||||||
) as msg:
|
|
||||||
yield msg
|
yield msg
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
|
|
@ -780,16 +761,12 @@ _router: Router = None
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def _setup_persistent_emsd(
|
async def _setup_persistent_emsd(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
loglevel: str|None = None,
|
loglevel: str | None = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
if loglevel:
|
if loglevel:
|
||||||
_log = get_console_log(
|
get_console_log(loglevel)
|
||||||
level=loglevel,
|
|
||||||
name=subsys,
|
|
||||||
)
|
|
||||||
assert _log.name == 'piker.clearing'
|
|
||||||
|
|
||||||
global _router
|
global _router
|
||||||
|
|
||||||
|
|
@ -845,7 +822,7 @@ async def translate_and_relay_brokerd_events(
|
||||||
f'Rx brokerd trade msg:\n'
|
f'Rx brokerd trade msg:\n'
|
||||||
f'{fmsg}'
|
f'{fmsg}'
|
||||||
)
|
)
|
||||||
status_msg: Status|None = None
|
status_msg: Status | None = None
|
||||||
|
|
||||||
match brokerd_msg:
|
match brokerd_msg:
|
||||||
# BrokerdPosition
|
# BrokerdPosition
|
||||||
|
|
@ -1306,7 +1283,7 @@ async def process_client_order_cmds(
|
||||||
and status.resp == 'dark_open'
|
and status.resp == 'dark_open'
|
||||||
):
|
):
|
||||||
# remove from dark book clearing
|
# remove from dark book clearing
|
||||||
entry: tuple|None = dark_book.triggers[fqme].pop(oid, None)
|
entry: tuple | None = dark_book.triggers[fqme].pop(oid, None)
|
||||||
if entry:
|
if entry:
|
||||||
(
|
(
|
||||||
pred,
|
pred,
|
||||||
|
|
|
||||||
|
|
@ -59,9 +59,9 @@ from piker.data import (
|
||||||
open_symcache,
|
open_symcache,
|
||||||
)
|
)
|
||||||
from piker.types import Struct
|
from piker.types import Struct
|
||||||
from piker.log import (
|
from ._util import (
|
||||||
|
log, # sub-sys logger
|
||||||
get_console_log,
|
get_console_log,
|
||||||
get_logger,
|
|
||||||
)
|
)
|
||||||
from ._messages import (
|
from ._messages import (
|
||||||
BrokerdCancel,
|
BrokerdCancel,
|
||||||
|
|
@ -73,8 +73,6 @@ from ._messages import (
|
||||||
BrokerdError,
|
BrokerdError,
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class PaperBoi(Struct):
|
class PaperBoi(Struct):
|
||||||
'''
|
'''
|
||||||
|
|
@ -552,18 +550,16 @@ _sells: defaultdict[
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def open_trade_dialog(
|
async def open_trade_dialog(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
broker: str,
|
broker: str,
|
||||||
fqme: str|None = None, # if empty, we only boot broker mode
|
fqme: str | None = None, # if empty, we only boot broker mode
|
||||||
loglevel: str = 'warning',
|
loglevel: str = 'warning',
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# enable piker.clearing console log for *this* `brokerd` subactor
|
# enable piker.clearing console log for *this* subactor
|
||||||
get_console_log(
|
get_console_log(loglevel)
|
||||||
level=loglevel,
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
symcache: SymbologyCache
|
symcache: SymbologyCache
|
||||||
async with open_symcache(get_brokermod(broker)) as symcache:
|
async with open_symcache(get_brokermod(broker)) as symcache:
|
||||||
|
|
|
||||||
|
|
@ -28,14 +28,12 @@ from ..log import (
|
||||||
from piker.types import Struct
|
from piker.types import Struct
|
||||||
subsys: str = 'piker.clearing'
|
subsys: str = 'piker.clearing'
|
||||||
|
|
||||||
log = get_logger(
|
log = get_logger(subsys)
|
||||||
name='piker.clearing',
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO, oof doesn't this ignore the `loglevel` then???
|
# TODO, oof doesn't this ignore the `loglevel` then???
|
||||||
get_console_log = partial(
|
get_console_log = partial(
|
||||||
get_console_log,
|
get_console_log,
|
||||||
name='clearing',
|
name=subsys,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -130,10 +130,7 @@ def pikerd(
|
||||||
'''
|
'''
|
||||||
# from tractor.devx import maybe_open_crash_handler
|
# from tractor.devx import maybe_open_crash_handler
|
||||||
# with maybe_open_crash_handler(pdb=False):
|
# with maybe_open_crash_handler(pdb=False):
|
||||||
log = get_console_log(
|
log = get_console_log(loglevel, name='cli')
|
||||||
level=loglevel,
|
|
||||||
with_tractor_log=tl,
|
|
||||||
)
|
|
||||||
|
|
||||||
if pdb:
|
if pdb:
|
||||||
log.warning((
|
log.warning((
|
||||||
|
|
@ -354,11 +351,7 @@ def services(
|
||||||
async with (
|
async with (
|
||||||
open_piker_runtime(
|
open_piker_runtime(
|
||||||
name='service_query',
|
name='service_query',
|
||||||
loglevel=(
|
loglevel=config['loglevel'] if tl else None,
|
||||||
config['loglevel']
|
|
||||||
if tl
|
|
||||||
else None
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
tractor.get_registry(
|
tractor.get_registry(
|
||||||
addr=addr,
|
addr=addr,
|
||||||
|
|
|
||||||
|
|
@ -80,20 +80,20 @@ class Sampler:
|
||||||
This non-instantiated type is meant to be a singleton within
|
This non-instantiated type is meant to be a singleton within
|
||||||
a `samplerd` actor-service spawned once by the user wishing to
|
a `samplerd` actor-service spawned once by the user wishing to
|
||||||
time-step-sample (real-time) quote feeds, see
|
time-step-sample (real-time) quote feeds, see
|
||||||
`.service.maybe_open_samplerd()` and the below
|
``.service.maybe_open_samplerd()`` and the below
|
||||||
`register_with_sampler()`.
|
``register_with_sampler()``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
service_nursery: None|trio.Nursery = None
|
service_nursery: None | trio.Nursery = None
|
||||||
|
|
||||||
# TODO: we could stick these in a composed type to avoid angering
|
# TODO: we could stick these in a composed type to avoid
|
||||||
# the "i hate module scoped variables crowd" (yawn).
|
# angering the "i hate module scoped variables crowd" (yawn).
|
||||||
ohlcv_shms: dict[float, list[ShmArray]] = {}
|
ohlcv_shms: dict[float, list[ShmArray]] = {}
|
||||||
|
|
||||||
# holds one-task-per-sample-period tasks which are spawned as-needed by
|
# holds one-task-per-sample-period tasks which are spawned as-needed by
|
||||||
# data feed requests with a given detected time step usually from
|
# data feed requests with a given detected time step usually from
|
||||||
# history loading.
|
# history loading.
|
||||||
incr_task_cs: trio.CancelScope|None = None
|
incr_task_cs: trio.CancelScope | None = None
|
||||||
|
|
||||||
bcast_errors: tuple[Exception] = (
|
bcast_errors: tuple[Exception] = (
|
||||||
trio.BrokenResourceError,
|
trio.BrokenResourceError,
|
||||||
|
|
@ -249,8 +249,8 @@ class Sampler:
|
||||||
async def broadcast(
|
async def broadcast(
|
||||||
self,
|
self,
|
||||||
period_s: float,
|
period_s: float,
|
||||||
time_stamp: float|None = None,
|
time_stamp: float | None = None,
|
||||||
info: dict|None = None,
|
info: dict | None = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
|
@ -315,7 +315,7 @@ class Sampler:
|
||||||
@classmethod
|
@classmethod
|
||||||
async def broadcast_all(
|
async def broadcast_all(
|
||||||
self,
|
self,
|
||||||
info: dict|None = None,
|
info: dict | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# NOTE: take a copy of subs since removals can happen
|
# NOTE: take a copy of subs since removals can happen
|
||||||
|
|
@ -332,22 +332,14 @@ class Sampler:
|
||||||
async def register_with_sampler(
|
async def register_with_sampler(
|
||||||
ctx: Context,
|
ctx: Context,
|
||||||
period_s: float,
|
period_s: float,
|
||||||
shms_by_period: dict[float, dict]|None = None,
|
shms_by_period: dict[float, dict] | None = None,
|
||||||
|
|
||||||
open_index_stream: bool = True, # open a 2way stream for sample step msgs?
|
open_index_stream: bool = True, # open a 2way stream for sample step msgs?
|
||||||
sub_for_broadcasts: bool = True, # sampler side to send step updates?
|
sub_for_broadcasts: bool = True, # sampler side to send step updates?
|
||||||
loglevel: str|None = None,
|
|
||||||
|
|
||||||
) -> set[int]:
|
) -> None:
|
||||||
|
|
||||||
get_console_log(
|
get_console_log(tractor.current_actor().loglevel)
|
||||||
level=(
|
|
||||||
loglevel
|
|
||||||
or
|
|
||||||
tractor.current_actor().loglevel
|
|
||||||
),
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
incr_was_started: bool = False
|
incr_was_started: bool = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -372,12 +364,7 @@ async def register_with_sampler(
|
||||||
|
|
||||||
# insert the base 1s period (for OHLC style sampling) into
|
# insert the base 1s period (for OHLC style sampling) into
|
||||||
# the increment buffer set to update and shift every second.
|
# the increment buffer set to update and shift every second.
|
||||||
if (
|
if shms_by_period is not None:
|
||||||
shms_by_period is not None
|
|
||||||
# and
|
|
||||||
# feed_is_live.is_set()
|
|
||||||
# ^TODO? pass it in instead?
|
|
||||||
):
|
|
||||||
from ._sharedmem import (
|
from ._sharedmem import (
|
||||||
attach_shm_array,
|
attach_shm_array,
|
||||||
_Token,
|
_Token,
|
||||||
|
|
@ -391,17 +378,12 @@ async def register_with_sampler(
|
||||||
readonly=False,
|
readonly=False,
|
||||||
)
|
)
|
||||||
shms_by_period[period] = shm
|
shms_by_period[period] = shm
|
||||||
Sampler.ohlcv_shms.setdefault(
|
Sampler.ohlcv_shms.setdefault(period, []).append(shm)
|
||||||
period,
|
|
||||||
[],
|
|
||||||
).append(shm)
|
|
||||||
|
|
||||||
assert Sampler.ohlcv_shms
|
assert Sampler.ohlcv_shms
|
||||||
|
|
||||||
# unblock caller
|
# unblock caller
|
||||||
await ctx.started(
|
await ctx.started(set(Sampler.ohlcv_shms.keys()))
|
||||||
set(Sampler.ohlcv_shms.keys())
|
|
||||||
)
|
|
||||||
|
|
||||||
if open_index_stream:
|
if open_index_stream:
|
||||||
try:
|
try:
|
||||||
|
|
@ -447,7 +429,7 @@ async def register_with_sampler(
|
||||||
|
|
||||||
async def spawn_samplerd(
|
async def spawn_samplerd(
|
||||||
|
|
||||||
loglevel: str|None = None,
|
loglevel: str | None = None,
|
||||||
**extra_tractor_kwargs
|
**extra_tractor_kwargs
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
|
|
@ -484,7 +466,6 @@ async def spawn_samplerd(
|
||||||
register_with_sampler,
|
register_with_sampler,
|
||||||
period_s=1,
|
period_s=1,
|
||||||
sub_for_broadcasts=False,
|
sub_for_broadcasts=False,
|
||||||
loglevel=loglevel,
|
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
@ -493,7 +474,8 @@ async def spawn_samplerd(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def maybe_open_samplerd(
|
async def maybe_open_samplerd(
|
||||||
loglevel: str|None = None,
|
|
||||||
|
loglevel: str | None = None,
|
||||||
**pikerd_kwargs,
|
**pikerd_kwargs,
|
||||||
|
|
||||||
) -> tractor.Portal: # noqa
|
) -> tractor.Portal: # noqa
|
||||||
|
|
@ -518,13 +500,13 @@ async def maybe_open_samplerd(
|
||||||
@acm
|
@acm
|
||||||
async def open_sample_stream(
|
async def open_sample_stream(
|
||||||
period_s: float,
|
period_s: float,
|
||||||
shms_by_period: dict[float, dict]|None = None,
|
shms_by_period: dict[float, dict] | None = None,
|
||||||
open_index_stream: bool = True,
|
open_index_stream: bool = True,
|
||||||
sub_for_broadcasts: bool = True,
|
sub_for_broadcasts: bool = True,
|
||||||
loglevel: str|None = None,
|
|
||||||
|
|
||||||
# cache_key: str|None = None,
|
cache_key: str | None = None,
|
||||||
# allow_new_sampler: bool = True,
|
allow_new_sampler: bool = True,
|
||||||
|
|
||||||
ensure_is_active: bool = False,
|
ensure_is_active: bool = False,
|
||||||
|
|
||||||
) -> AsyncIterator[dict[str, float]]:
|
) -> AsyncIterator[dict[str, float]]:
|
||||||
|
|
@ -553,15 +535,11 @@ async def open_sample_stream(
|
||||||
# yield bistream
|
# yield bistream
|
||||||
# else:
|
# else:
|
||||||
|
|
||||||
ctx: tractor.Context
|
|
||||||
shm_periods: set[int] # in `int`-seconds
|
|
||||||
async with (
|
async with (
|
||||||
# XXX: this should be singleton on a host,
|
# XXX: this should be singleton on a host,
|
||||||
# a lone broker-daemon per provider should be
|
# a lone broker-daemon per provider should be
|
||||||
# created for all practical purposes
|
# created for all practical purposes
|
||||||
maybe_open_samplerd(
|
maybe_open_samplerd() as portal,
|
||||||
loglevel=loglevel,
|
|
||||||
) as portal,
|
|
||||||
|
|
||||||
portal.open_context(
|
portal.open_context(
|
||||||
register_with_sampler,
|
register_with_sampler,
|
||||||
|
|
@ -570,12 +548,11 @@ async def open_sample_stream(
|
||||||
'shms_by_period': shms_by_period,
|
'shms_by_period': shms_by_period,
|
||||||
'open_index_stream': open_index_stream,
|
'open_index_stream': open_index_stream,
|
||||||
'sub_for_broadcasts': sub_for_broadcasts,
|
'sub_for_broadcasts': sub_for_broadcasts,
|
||||||
'loglevel': loglevel,
|
|
||||||
},
|
},
|
||||||
) as (ctx, shm_periods)
|
) as (ctx, first)
|
||||||
):
|
):
|
||||||
if ensure_is_active:
|
if ensure_is_active:
|
||||||
assert len(shm_periods) > 1
|
assert len(first) > 1
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream(
|
ctx.open_stream(
|
||||||
|
|
|
||||||
|
|
@ -520,10 +520,7 @@ def open_shm_array(
|
||||||
|
|
||||||
# "unlink" created shm on process teardown by
|
# "unlink" created shm on process teardown by
|
||||||
# pushing teardown calls onto actor context stack
|
# pushing teardown calls onto actor context stack
|
||||||
stack = tractor.current_actor(
|
stack = tractor.current_actor().lifetime_stack
|
||||||
err_on_no_runtime=False,
|
|
||||||
).lifetime_stack
|
|
||||||
if stack:
|
|
||||||
stack.callback(shmarr.close)
|
stack.callback(shmarr.close)
|
||||||
stack.callback(shmarr.destroy)
|
stack.callback(shmarr.destroy)
|
||||||
|
|
||||||
|
|
@ -610,10 +607,7 @@ def attach_shm_array(
|
||||||
_known_tokens[key] = token
|
_known_tokens[key] = token
|
||||||
|
|
||||||
# "close" attached shm on actor teardown
|
# "close" attached shm on actor teardown
|
||||||
if (actor := tractor.current_actor(
|
tractor.current_actor().lifetime_stack.callback(sha.close)
|
||||||
err_on_no_runtime=False,
|
|
||||||
)):
|
|
||||||
actor.lifetime_stack.callback(sha.close)
|
|
||||||
|
|
||||||
return sha
|
return sha
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -26,9 +26,7 @@ from ..log import (
|
||||||
)
|
)
|
||||||
subsys: str = 'piker.data'
|
subsys: str = 'piker.data'
|
||||||
|
|
||||||
log = get_logger(
|
log = get_logger(subsys)
|
||||||
name=subsys,
|
|
||||||
)
|
|
||||||
|
|
||||||
get_console_log = partial(
|
get_console_log = partial(
|
||||||
get_console_log,
|
get_console_log,
|
||||||
|
|
|
||||||
|
|
@ -239,6 +239,7 @@ async def allocate_persistent_feed(
|
||||||
|
|
||||||
brokername: str,
|
brokername: str,
|
||||||
symstr: str,
|
symstr: str,
|
||||||
|
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
start_stream: bool = True,
|
start_stream: bool = True,
|
||||||
init_timeout: float = 616,
|
init_timeout: float = 616,
|
||||||
|
|
@ -277,7 +278,7 @@ async def allocate_persistent_feed(
|
||||||
# ``stream_quotes()``, a required broker backend endpoint.
|
# ``stream_quotes()``, a required broker backend endpoint.
|
||||||
init_msgs: (
|
init_msgs: (
|
||||||
list[FeedInit] # new
|
list[FeedInit] # new
|
||||||
|dict[str, dict[str, str]] # legacy / deprecated
|
| dict[str, dict[str, str]] # legacy / deprecated
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: probably make a struct msg type for this as well
|
# TODO: probably make a struct msg type for this as well
|
||||||
|
|
@ -347,14 +348,11 @@ async def allocate_persistent_feed(
|
||||||
izero_rt,
|
izero_rt,
|
||||||
rt_shm,
|
rt_shm,
|
||||||
) = await bus.nursery.start(
|
) = await bus.nursery.start(
|
||||||
partial(
|
|
||||||
manage_history,
|
manage_history,
|
||||||
mod=mod,
|
mod,
|
||||||
mkt=mkt,
|
mkt,
|
||||||
some_data_ready=some_data_ready,
|
some_data_ready,
|
||||||
feed_is_live=feed_is_live,
|
feed_is_live,
|
||||||
loglevel=loglevel,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# yield back control to starting nursery once we receive either
|
# yield back control to starting nursery once we receive either
|
||||||
|
|
@ -462,6 +460,7 @@ async def allocate_persistent_feed(
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def open_feed_bus(
|
async def open_feed_bus(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
brokername: str,
|
brokername: str,
|
||||||
symbols: list[str], # normally expected to the broker-specific fqme
|
symbols: list[str], # normally expected to the broker-specific fqme
|
||||||
|
|
@ -482,16 +481,13 @@ async def open_feed_bus(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if loglevel is None:
|
if loglevel is None:
|
||||||
loglevel: str = tractor.current_actor().loglevel
|
loglevel = tractor.current_actor().loglevel
|
||||||
|
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker
|
# XXX: required to propagate ``tractor`` loglevel to piker
|
||||||
# logging
|
# logging
|
||||||
get_console_log(
|
get_console_log(
|
||||||
level=(loglevel
|
loglevel
|
||||||
or
|
or tractor.current_actor().loglevel
|
||||||
tractor.current_actor().loglevel
|
|
||||||
),
|
|
||||||
name=__name__,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# local state sanity checks
|
# local state sanity checks
|
||||||
|
|
@ -803,7 +799,7 @@ async def install_brokerd_search(
|
||||||
@acm
|
@acm
|
||||||
async def maybe_open_feed(
|
async def maybe_open_feed(
|
||||||
fqmes: list[str],
|
fqmes: list[str],
|
||||||
loglevel: str|None = None,
|
loglevel: str | None = None,
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
|
|
@ -819,11 +815,6 @@ async def maybe_open_feed(
|
||||||
'''
|
'''
|
||||||
fqme = fqmes[0]
|
fqme = fqmes[0]
|
||||||
|
|
||||||
# if (
|
|
||||||
# loglevel != 'info'
|
|
||||||
# ):
|
|
||||||
# await tractor.pause()
|
|
||||||
|
|
||||||
async with trionics.maybe_open_context(
|
async with trionics.maybe_open_context(
|
||||||
acm_func=open_feed,
|
acm_func=open_feed,
|
||||||
kwargs={
|
kwargs={
|
||||||
|
|
@ -890,12 +881,9 @@ async def open_feed(
|
||||||
providers.setdefault(mod, []).append(bs_fqme)
|
providers.setdefault(mod, []).append(bs_fqme)
|
||||||
feed.mods[mod.name] = mod
|
feed.mods[mod.name] = mod
|
||||||
|
|
||||||
if (
|
|
||||||
loglevel != 'info'
|
|
||||||
):
|
|
||||||
await tractor.pause()
|
|
||||||
# one actor per brokerd for now
|
# one actor per brokerd for now
|
||||||
brokerd_ctxs = []
|
brokerd_ctxs = []
|
||||||
|
|
||||||
for brokermod, bfqmes in providers.items():
|
for brokermod, bfqmes in providers.items():
|
||||||
|
|
||||||
# if no `brokerd` for this backend exists yet we spawn
|
# if no `brokerd` for this backend exists yet we spawn
|
||||||
|
|
|
||||||
|
|
@ -484,8 +484,7 @@ async def cascade(
|
||||||
# open a data feed stream with requested broker
|
# open a data feed stream with requested broker
|
||||||
feed: Feed
|
feed: Feed
|
||||||
async with data.feed.maybe_open_feed(
|
async with data.feed.maybe_open_feed(
|
||||||
fqmes=[fqme],
|
[fqme],
|
||||||
loglevel=loglevel,
|
|
||||||
|
|
||||||
# TODO throttle tick outputs from *this* daemon since
|
# TODO throttle tick outputs from *this* daemon since
|
||||||
# it'll emit tons of ticks due to the throttle only
|
# it'll emit tons of ticks due to the throttle only
|
||||||
|
|
@ -583,8 +582,7 @@ async def cascade(
|
||||||
# on every step msg received from the global `samplerd`
|
# on every step msg received from the global `samplerd`
|
||||||
# service.
|
# service.
|
||||||
async with open_sample_stream(
|
async with open_sample_stream(
|
||||||
period_s=float(delay_s),
|
float(delay_s)
|
||||||
loglevel=loglevel,
|
|
||||||
) as istream:
|
) as istream:
|
||||||
|
|
||||||
profiler(f'{func_name}: sample stream up')
|
profiler(f'{func_name}: sample stream up')
|
||||||
|
|
|
||||||
71
piker/log.py
71
piker/log.py
|
|
@ -37,84 +37,35 @@ _proj_name: str = 'piker'
|
||||||
|
|
||||||
|
|
||||||
def get_logger(
|
def get_logger(
|
||||||
name: str|None = None,
|
name: str = None,
|
||||||
**tractor_log_kwargs,
|
|
||||||
) -> logging.Logger:
|
) -> logging.Logger:
|
||||||
'''
|
'''
|
||||||
Return the package log or a sub-logger if a `name=` is provided,
|
Return the package log or a sub-log for `name` if provided.
|
||||||
which defaults to the calling module's pkg-namespace path.
|
|
||||||
|
|
||||||
See `tractor.log.get_logger()` for details.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
pkg_name: str = _proj_name
|
|
||||||
if (
|
|
||||||
name
|
|
||||||
and
|
|
||||||
pkg_name in name
|
|
||||||
):
|
|
||||||
name: str = name.lstrip(f'{_proj_name}.')
|
|
||||||
|
|
||||||
return tractor.log.get_logger(
|
return tractor.log.get_logger(
|
||||||
name=name,
|
name=name,
|
||||||
pkg_name=pkg_name,
|
_root_name=_proj_name,
|
||||||
**tractor_log_kwargs,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_console_log(
|
def get_console_log(
|
||||||
level: str|None = None,
|
level: str | None = None,
|
||||||
name: str|None = None,
|
name: str | None = None,
|
||||||
pkg_name: str|None = None,
|
|
||||||
with_tractor_log: bool = False,
|
|
||||||
# ?TODO, support a "log-spec" style `str|dict[str, str]` which
|
|
||||||
# dictates both the sublogger-key and a level?
|
|
||||||
# -> see similar idea in `modden`'s usage.
|
|
||||||
**tractor_log_kwargs,
|
|
||||||
|
|
||||||
) -> logging.Logger:
|
) -> logging.Logger:
|
||||||
'''
|
'''
|
||||||
Get the package logger and enable a handler which writes to
|
Get the package logger and enable a handler which writes to stderr.
|
||||||
stderr.
|
|
||||||
|
|
||||||
Yeah yeah, i know we can use `DictConfig`.
|
Yeah yeah, i know we can use ``DictConfig``. You do it...
|
||||||
You do it.. Bp
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
pkg_name: str = _proj_name
|
|
||||||
if (
|
|
||||||
name
|
|
||||||
and
|
|
||||||
pkg_name in name
|
|
||||||
):
|
|
||||||
name: str = name.lstrip(f'{_proj_name}.')
|
|
||||||
|
|
||||||
tll: str|None = None
|
|
||||||
if (
|
|
||||||
with_tractor_log is not False
|
|
||||||
):
|
|
||||||
tll = level
|
|
||||||
|
|
||||||
elif maybe_actor := tractor.current_actor(
|
|
||||||
err_on_no_runtime=False,
|
|
||||||
):
|
|
||||||
tll = maybe_actor.loglevel
|
|
||||||
|
|
||||||
if tll:
|
|
||||||
t_log = tractor.log.get_console_log(
|
|
||||||
level=tll,
|
|
||||||
name='tractor', # <- XXX, force root tractor log!
|
|
||||||
**tractor_log_kwargs,
|
|
||||||
)
|
|
||||||
# TODO/ allow only enabling certain tractor sub-logs?
|
|
||||||
assert t_log.name == 'tractor'
|
|
||||||
|
|
||||||
return tractor.log.get_console_log(
|
return tractor.log.get_console_log(
|
||||||
level=level,
|
level,
|
||||||
name=name,
|
name=name,
|
||||||
pkg_name=pkg_name,
|
_root_name=_proj_name,
|
||||||
**tractor_log_kwargs,
|
) # our root logger
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def colorize_json(
|
def colorize_json(
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import os
|
import os
|
||||||
from typing import (
|
from typing import (
|
||||||
|
Optional,
|
||||||
Any,
|
Any,
|
||||||
ClassVar,
|
ClassVar,
|
||||||
)
|
)
|
||||||
|
|
@ -31,11 +32,8 @@ from contextlib import (
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from piker.log import (
|
|
||||||
get_console_log,
|
|
||||||
)
|
|
||||||
from ._util import (
|
from ._util import (
|
||||||
subsys,
|
get_console_log,
|
||||||
)
|
)
|
||||||
from ._mngr import (
|
from ._mngr import (
|
||||||
Services,
|
Services,
|
||||||
|
|
@ -61,7 +59,7 @@ async def open_piker_runtime(
|
||||||
registry_addrs: list[tuple[str, int]] = [],
|
registry_addrs: list[tuple[str, int]] = [],
|
||||||
|
|
||||||
enable_modules: list[str] = [],
|
enable_modules: list[str] = [],
|
||||||
loglevel: str|None = None,
|
loglevel: Optional[str] = None,
|
||||||
|
|
||||||
# XXX NOTE XXX: you should pretty much never want debug mode
|
# XXX NOTE XXX: you should pretty much never want debug mode
|
||||||
# for data daemons when running in production.
|
# for data daemons when running in production.
|
||||||
|
|
@ -71,7 +69,7 @@ async def open_piker_runtime(
|
||||||
# and spawn the service tree distributed per that.
|
# and spawn the service tree distributed per that.
|
||||||
start_method: str = 'trio',
|
start_method: str = 'trio',
|
||||||
|
|
||||||
tractor_runtime_overrides: dict|None = None,
|
tractor_runtime_overrides: dict | None = None,
|
||||||
**tractor_kwargs,
|
**tractor_kwargs,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
|
|
@ -99,8 +97,7 @@ async def open_piker_runtime(
|
||||||
# setting it as the root actor on localhost.
|
# setting it as the root actor on localhost.
|
||||||
registry_addrs = (
|
registry_addrs = (
|
||||||
registry_addrs
|
registry_addrs
|
||||||
or
|
or [_default_reg_addr]
|
||||||
[_default_reg_addr]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if ems := tractor_kwargs.pop('enable_modules', None):
|
if ems := tractor_kwargs.pop('enable_modules', None):
|
||||||
|
|
@ -166,7 +163,8 @@ _root_modules: list[str] = [
|
||||||
@acm
|
@acm
|
||||||
async def open_pikerd(
|
async def open_pikerd(
|
||||||
registry_addrs: list[tuple[str, int]],
|
registry_addrs: list[tuple[str, int]],
|
||||||
loglevel: str|None = None,
|
|
||||||
|
loglevel: str | None = None,
|
||||||
|
|
||||||
# XXX: you should pretty much never want debug mode
|
# XXX: you should pretty much never want debug mode
|
||||||
# for data daemons when running in production.
|
# for data daemons when running in production.
|
||||||
|
|
@ -194,6 +192,7 @@ async def open_pikerd(
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_piker_runtime(
|
open_piker_runtime(
|
||||||
|
|
||||||
name=_root_dname,
|
name=_root_dname,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=debug_mode,
|
debug_mode=debug_mode,
|
||||||
|
|
@ -274,10 +273,7 @@ async def maybe_open_pikerd(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if loglevel:
|
if loglevel:
|
||||||
get_console_log(
|
get_console_log(loglevel)
|
||||||
name=subsys,
|
|
||||||
level=loglevel
|
|
||||||
)
|
|
||||||
|
|
||||||
# subtle, we must have the runtime up here or portal lookup will fail
|
# subtle, we must have the runtime up here or portal lookup will fail
|
||||||
query_name = kwargs.pop(
|
query_name = kwargs.pop(
|
||||||
|
|
|
||||||
|
|
@ -49,15 +49,13 @@ from requests.exceptions import (
|
||||||
ReadTimeout,
|
ReadTimeout,
|
||||||
)
|
)
|
||||||
|
|
||||||
from piker.log import (
|
|
||||||
get_console_log,
|
|
||||||
get_logger,
|
|
||||||
)
|
|
||||||
from ._mngr import Services
|
from ._mngr import Services
|
||||||
|
from ._util import (
|
||||||
|
log, # sub-sys logger
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
from .. import config
|
from .. import config
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class DockerNotStarted(Exception):
|
class DockerNotStarted(Exception):
|
||||||
'Prolly you dint start da daemon bruh'
|
'Prolly you dint start da daemon bruh'
|
||||||
|
|
@ -338,16 +336,13 @@ class Container:
|
||||||
async def open_ahabd(
|
async def open_ahabd(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
endpoint: str, # ns-pointer str-msg-type
|
endpoint: str, # ns-pointer str-msg-type
|
||||||
loglevel: str = 'cancel',
|
loglevel: str | None = None,
|
||||||
|
|
||||||
**ep_kwargs,
|
**ep_kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
log = get_console_log(
|
log = get_console_log(loglevel or 'cancel')
|
||||||
level=loglevel,
|
|
||||||
name='piker.service',
|
|
||||||
)
|
|
||||||
|
|
||||||
async with open_docker() as client:
|
async with open_docker() as client:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -30,9 +30,8 @@ from contextlib import (
|
||||||
import tractor
|
import tractor
|
||||||
from trio.lowlevel import current_task
|
from trio.lowlevel import current_task
|
||||||
|
|
||||||
from piker.log import (
|
from ._util import (
|
||||||
get_console_log,
|
log, # sub-sys logger
|
||||||
get_logger,
|
|
||||||
)
|
)
|
||||||
from ._mngr import (
|
from ._mngr import (
|
||||||
Services,
|
Services,
|
||||||
|
|
@ -40,17 +39,16 @@ from ._mngr import (
|
||||||
from ._actor_runtime import maybe_open_pikerd
|
from ._actor_runtime import maybe_open_pikerd
|
||||||
from ._registry import find_service
|
from ._registry import find_service
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def maybe_spawn_daemon(
|
async def maybe_spawn_daemon(
|
||||||
|
|
||||||
service_name: str,
|
service_name: str,
|
||||||
service_task_target: Callable,
|
service_task_target: Callable,
|
||||||
|
|
||||||
spawn_args: dict[str, Any],
|
spawn_args: dict[str, Any],
|
||||||
|
|
||||||
loglevel: str|None = None,
|
loglevel: str | None = None,
|
||||||
singleton: bool = False,
|
singleton: bool = False,
|
||||||
|
|
||||||
**pikerd_kwargs,
|
**pikerd_kwargs,
|
||||||
|
|
@ -68,12 +66,6 @@ async def maybe_spawn_daemon(
|
||||||
clients.
|
clients.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
log = get_console_log(
|
|
||||||
level=loglevel,
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
assert log.name == 'piker.service'
|
|
||||||
|
|
||||||
# serialize access to this section to avoid
|
# serialize access to this section to avoid
|
||||||
# 2 or more tasks racing to create a daemon
|
# 2 or more tasks racing to create a daemon
|
||||||
lock = Services.locks[service_name]
|
lock = Services.locks[service_name]
|
||||||
|
|
@ -160,7 +152,8 @@ async def maybe_spawn_daemon(
|
||||||
|
|
||||||
|
|
||||||
async def spawn_emsd(
|
async def spawn_emsd(
|
||||||
loglevel: str|None = None,
|
|
||||||
|
loglevel: str | None = None,
|
||||||
**extra_tractor_kwargs
|
**extra_tractor_kwargs
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
|
|
@ -197,8 +190,9 @@ async def spawn_emsd(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def maybe_open_emsd(
|
async def maybe_open_emsd(
|
||||||
|
|
||||||
brokername: str,
|
brokername: str,
|
||||||
loglevel: str|None = None,
|
loglevel: str | None = None,
|
||||||
|
|
||||||
**pikerd_kwargs,
|
**pikerd_kwargs,
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -34,9 +34,9 @@ from tractor import (
|
||||||
Portal,
|
Portal,
|
||||||
)
|
)
|
||||||
|
|
||||||
from piker.log import get_logger
|
from ._util import (
|
||||||
|
log, # sub-sys logger
|
||||||
log = get_logger(name=__name__)
|
)
|
||||||
|
|
||||||
|
|
||||||
# TODO: we need remote wrapping and a general soln:
|
# TODO: we need remote wrapping and a general soln:
|
||||||
|
|
|
||||||
|
|
@ -27,29 +27,15 @@ from typing import (
|
||||||
)
|
)
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
from tractor import (
|
from tractor import Portal
|
||||||
msg,
|
|
||||||
Actor,
|
from ._util import (
|
||||||
Portal,
|
log, # sub-sys logger
|
||||||
)
|
)
|
||||||
|
|
||||||
from piker.log import get_logger
|
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
|
||||||
|
|
||||||
# TODO? default path-space for UDS registry?
|
|
||||||
# [ ] needs to be Xplatform tho!
|
|
||||||
# _default_registry_path: Path = (
|
|
||||||
# Path(os.environ['XDG_RUNTIME_DIR'])
|
|
||||||
# /'piker'
|
|
||||||
# )
|
|
||||||
|
|
||||||
_default_registry_host: str = '127.0.0.1'
|
_default_registry_host: str = '127.0.0.1'
|
||||||
_default_registry_port: int = 6116
|
_default_registry_port: int = 6116
|
||||||
_default_reg_addr: tuple[
|
_default_reg_addr: tuple[str, int] = (
|
||||||
str,
|
|
||||||
int, # |str TODO, once we support UDS, see above.
|
|
||||||
] = (
|
|
||||||
_default_registry_host,
|
_default_registry_host,
|
||||||
_default_registry_port,
|
_default_registry_port,
|
||||||
)
|
)
|
||||||
|
|
@ -89,22 +75,16 @@ async def open_registry(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
global _tractor_kwargs
|
global _tractor_kwargs
|
||||||
actor: Actor = tractor.current_actor()
|
actor = tractor.current_actor()
|
||||||
aid: msg.Aid = actor.aid
|
uid = actor.uid
|
||||||
uid: tuple[str, str] = aid.uid
|
preset_reg_addrs: list[tuple[str, int]] = Registry.addrs
|
||||||
preset_reg_addrs: list[
|
|
||||||
tuple[str, int]
|
|
||||||
] = Registry.addrs
|
|
||||||
if (
|
if (
|
||||||
preset_reg_addrs
|
preset_reg_addrs
|
||||||
and
|
and addrs
|
||||||
addrs
|
|
||||||
):
|
):
|
||||||
if preset_reg_addrs != addrs:
|
if preset_reg_addrs != addrs:
|
||||||
# if any(addr in preset_reg_addrs for addr in addrs):
|
# if any(addr in preset_reg_addrs for addr in addrs):
|
||||||
diff: set[
|
diff: set[tuple[str, int]] = set(preset_reg_addrs) - set(addrs)
|
||||||
tuple[str, int]
|
|
||||||
] = set(preset_reg_addrs) - set(addrs)
|
|
||||||
if diff:
|
if diff:
|
||||||
log.warning(
|
log.warning(
|
||||||
f'`{uid}` requested only subset of registrars: {addrs}\n'
|
f'`{uid}` requested only subset of registrars: {addrs}\n'
|
||||||
|
|
@ -118,6 +98,7 @@ async def open_registry(
|
||||||
)
|
)
|
||||||
|
|
||||||
was_set: bool = False
|
was_set: bool = False
|
||||||
|
|
||||||
if (
|
if (
|
||||||
not tractor.is_root_process()
|
not tractor.is_root_process()
|
||||||
and
|
and
|
||||||
|
|
@ -134,23 +115,16 @@ async def open_registry(
|
||||||
f"`{uid}` registry should already exist but doesn't?"
|
f"`{uid}` registry should already exist but doesn't?"
|
||||||
)
|
)
|
||||||
|
|
||||||
if not Registry.addrs:
|
if (
|
||||||
|
not Registry.addrs
|
||||||
|
):
|
||||||
was_set = True
|
was_set = True
|
||||||
Registry.addrs = (
|
Registry.addrs = addrs or [_default_reg_addr]
|
||||||
addrs
|
|
||||||
or
|
|
||||||
[_default_reg_addr]
|
|
||||||
)
|
|
||||||
|
|
||||||
# NOTE: only spot this seems currently used is inside
|
# NOTE: only spot this seems currently used is inside
|
||||||
# `.ui._exec` which is the (eventual qtloops) bootstrapping
|
# `.ui._exec` which is the (eventual qtloops) bootstrapping
|
||||||
# with guest mode.
|
# with guest mode.
|
||||||
reg_addrs: list[tuple[str, str|int]] = Registry.addrs
|
_tractor_kwargs['registry_addrs'] = Registry.addrs
|
||||||
# !TODO, a struct-API to stringently allow this only in special
|
|
||||||
# cases?
|
|
||||||
# -> better would be to have some way to (atomically) rewrite
|
|
||||||
# and entire `RuntimeVars`?? ideas welcome obvi..
|
|
||||||
_tractor_kwargs['registry_addrs'] = reg_addrs
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield Registry.addrs
|
yield Registry.addrs
|
||||||
|
|
@ -175,7 +149,7 @@ async def find_service(
|
||||||
| None
|
| None
|
||||||
):
|
):
|
||||||
# try:
|
# try:
|
||||||
reg_addrs: list[tuple[str, int|str]]
|
reg_addrs: list[tuple[str, int]]
|
||||||
async with open_registry(
|
async with open_registry(
|
||||||
addrs=(
|
addrs=(
|
||||||
registry_addrs
|
registry_addrs
|
||||||
|
|
@ -198,13 +172,15 @@ async def find_service(
|
||||||
only_first=first_only, # if set only returns single ref
|
only_first=first_only, # if set only returns single ref
|
||||||
) as maybe_portals:
|
) as maybe_portals:
|
||||||
if not maybe_portals:
|
if not maybe_portals:
|
||||||
log.info(
|
# log.info(
|
||||||
|
print(
|
||||||
f'Could NOT find service {service_name!r} -> {maybe_portals!r}'
|
f'Could NOT find service {service_name!r} -> {maybe_portals!r}'
|
||||||
)
|
)
|
||||||
yield None
|
yield None
|
||||||
return
|
return
|
||||||
|
|
||||||
log.info(
|
# log.info(
|
||||||
|
print(
|
||||||
f'Found service {service_name!r} -> {maybe_portals}'
|
f'Found service {service_name!r} -> {maybe_portals}'
|
||||||
)
|
)
|
||||||
yield maybe_portals
|
yield maybe_portals
|
||||||
|
|
@ -219,7 +195,8 @@ async def find_service(
|
||||||
|
|
||||||
async def check_for_service(
|
async def check_for_service(
|
||||||
service_name: str,
|
service_name: str,
|
||||||
) -> None|tuple[str, int]:
|
|
||||||
|
) -> None | tuple[str, int]:
|
||||||
'''
|
'''
|
||||||
Service daemon "liveness" predicate.
|
Service daemon "liveness" predicate.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,12 +14,20 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
"""
|
"""
|
||||||
Sub-sys module commons (if any ?? Bp).
|
Sub-sys module commons.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from ..log import (
|
||||||
|
get_logger,
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
subsys: str = 'piker.service'
|
subsys: str = 'piker.service'
|
||||||
|
|
||||||
# ?TODO, if we were going to keep a `get_console_log()` in here to be
|
log = get_logger(subsys)
|
||||||
# invoked at `import`-time, how do we dynamically hand in the
|
|
||||||
# `level=` value? seems too early in the runtime to be injected
|
get_console_log = partial(
|
||||||
# right?
|
get_console_log,
|
||||||
|
name=subsys,
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,6 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from pprint import pformat
|
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
|
|
@ -27,17 +26,12 @@ import asks
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
import docker
|
import docker
|
||||||
from ._ahab import DockerContainer
|
from ._ahab import DockerContainer
|
||||||
from . import (
|
|
||||||
Services,
|
|
||||||
)
|
|
||||||
|
|
||||||
from piker.log import (
|
from ._util import log # sub-sys logger
|
||||||
|
from ._util import (
|
||||||
get_console_log,
|
get_console_log,
|
||||||
get_logger,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# container level config
|
# container level config
|
||||||
_config = {
|
_config = {
|
||||||
|
|
@ -73,10 +67,7 @@ def start_elasticsearch(
|
||||||
elastic
|
elastic
|
||||||
|
|
||||||
'''
|
'''
|
||||||
get_console_log(
|
get_console_log('info', name=__name__)
|
||||||
level='info',
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
dcntr: DockerContainer = client.containers.run(
|
dcntr: DockerContainer = client.containers.run(
|
||||||
'piker:elastic',
|
'piker:elastic',
|
||||||
|
|
|
||||||
|
|
@ -52,18 +52,17 @@ import pendulum
|
||||||
# TODO: import this for specific error set expected by mkts client
|
# TODO: import this for specific error set expected by mkts client
|
||||||
# import purerpc
|
# import purerpc
|
||||||
|
|
||||||
from piker.data.feed import maybe_open_feed
|
from ..data.feed import maybe_open_feed
|
||||||
from . import Services
|
from . import Services
|
||||||
from piker.log import (
|
from ._util import (
|
||||||
|
log, # sub-sys logger
|
||||||
get_console_log,
|
get_console_log,
|
||||||
get_logger,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
import docker
|
import docker
|
||||||
from ._ahab import DockerContainer
|
from ._ahab import DockerContainer
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# ahabd-supervisor and container level config
|
# ahabd-supervisor and container level config
|
||||||
|
|
|
||||||
|
|
@ -43,6 +43,7 @@ from typing import (
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
from .. import config
|
from .. import config
|
||||||
from ..service import (
|
from ..service import (
|
||||||
check_for_service,
|
check_for_service,
|
||||||
|
|
@ -151,10 +152,7 @@ class StorageConnectionError(ConnectionError):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def get_storagemod(
|
def get_storagemod(name: str) -> ModuleType:
|
||||||
name: str,
|
|
||||||
|
|
||||||
) -> ModuleType:
|
|
||||||
mod: ModuleType = import_module(
|
mod: ModuleType = import_module(
|
||||||
'.' + name,
|
'.' + name,
|
||||||
'piker.storage',
|
'piker.storage',
|
||||||
|
|
@ -167,12 +165,9 @@ def get_storagemod(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_storage_client(
|
async def open_storage_client(
|
||||||
backend: str|None = None,
|
backend: str | None = None,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[ModuleType, StorageClient]:
|
||||||
ModuleType,
|
|
||||||
StorageClient,
|
|
||||||
]:
|
|
||||||
'''
|
'''
|
||||||
Load the ``StorageClient`` for named backend.
|
Load the ``StorageClient`` for named backend.
|
||||||
|
|
||||||
|
|
@ -272,10 +267,7 @@ async def open_tsdb_client(
|
||||||
from ..data.feed import maybe_open_feed
|
from ..data.feed import maybe_open_feed
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_storage_client() as (
|
open_storage_client() as (_, storage),
|
||||||
_,
|
|
||||||
storage,
|
|
||||||
),
|
|
||||||
|
|
||||||
maybe_open_feed(
|
maybe_open_feed(
|
||||||
[fqme],
|
[fqme],
|
||||||
|
|
@ -283,7 +275,7 @@ async def open_tsdb_client(
|
||||||
|
|
||||||
) as feed,
|
) as feed,
|
||||||
):
|
):
|
||||||
profiler(f'opened feed for {fqme!r}')
|
profiler(f'opened feed for {fqme}')
|
||||||
|
|
||||||
# to_append = feed.hist_shm.array
|
# to_append = feed.hist_shm.array
|
||||||
# to_prepend = None
|
# to_prepend = None
|
||||||
|
|
|
||||||
|
|
@ -19,10 +19,16 @@ Storage middle-ware CLIs.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
# from datetime import datetime
|
||||||
|
# from contextlib import (
|
||||||
|
# AsyncExitStack,
|
||||||
|
# )
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from math import copysign
|
||||||
import time
|
import time
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import (
|
from typing import (
|
||||||
|
Any,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -41,6 +47,7 @@ from piker.data import (
|
||||||
ShmArray,
|
ShmArray,
|
||||||
)
|
)
|
||||||
from piker import tsp
|
from piker import tsp
|
||||||
|
from piker.data._formatters import BGM
|
||||||
from . import log
|
from . import log
|
||||||
from . import (
|
from . import (
|
||||||
__tsdbs__,
|
__tsdbs__,
|
||||||
|
|
@ -235,12 +242,122 @@ def anal(
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
async def markup_gaps(
|
||||||
|
fqme: str,
|
||||||
|
timeframe: float,
|
||||||
|
actl: AnnotCtl,
|
||||||
|
wdts: pl.DataFrame,
|
||||||
|
gaps: pl.DataFrame,
|
||||||
|
|
||||||
|
) -> dict[int, dict]:
|
||||||
|
'''
|
||||||
|
Remote annotate time-gaps in a dt-fielded ts (normally OHLC)
|
||||||
|
with rectangles.
|
||||||
|
|
||||||
|
'''
|
||||||
|
aids: dict[int] = {}
|
||||||
|
for i in range(gaps.height):
|
||||||
|
|
||||||
|
row: pl.DataFrame = gaps[i]
|
||||||
|
|
||||||
|
# the gap's RIGHT-most bar's OPEN value
|
||||||
|
# at that time (sample) step.
|
||||||
|
iend: int = row['index'][0]
|
||||||
|
# dt: datetime = row['dt'][0]
|
||||||
|
# dt_prev: datetime = row['dt_prev'][0]
|
||||||
|
# dt_end_t: float = dt.timestamp()
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: can we eventually remove this
|
||||||
|
# once we figure out why the epoch cols
|
||||||
|
# don't match?
|
||||||
|
# TODO: FIX HOW/WHY these aren't matching
|
||||||
|
# and are instead off by 4hours (EST
|
||||||
|
# vs. UTC?!?!)
|
||||||
|
# end_t: float = row['time']
|
||||||
|
# assert (
|
||||||
|
# dt.timestamp()
|
||||||
|
# ==
|
||||||
|
# end_t
|
||||||
|
# )
|
||||||
|
|
||||||
|
# the gap's LEFT-most bar's CLOSE value
|
||||||
|
# at that time (sample) step.
|
||||||
|
prev_r: pl.DataFrame = wdts.filter(
|
||||||
|
pl.col('index') == iend - 1
|
||||||
|
)
|
||||||
|
# XXX: probably a gap in the (newly sorted or de-duplicated)
|
||||||
|
# dt-df, so we might need to re-index first..
|
||||||
|
if prev_r.is_empty():
|
||||||
|
await tractor.pause()
|
||||||
|
|
||||||
|
istart: int = prev_r['index'][0]
|
||||||
|
# dt_start_t: float = dt_prev.timestamp()
|
||||||
|
|
||||||
|
# start_t: float = prev_r['time']
|
||||||
|
# assert (
|
||||||
|
# dt_start_t
|
||||||
|
# ==
|
||||||
|
# start_t
|
||||||
|
# )
|
||||||
|
|
||||||
|
# TODO: implement px-col width measure
|
||||||
|
# and ensure at least as many px-cols
|
||||||
|
# shown per rect as configured by user.
|
||||||
|
# gap_w: float = abs((iend - istart))
|
||||||
|
# if gap_w < 6:
|
||||||
|
# margin: float = 6
|
||||||
|
# iend += margin
|
||||||
|
# istart -= margin
|
||||||
|
|
||||||
|
rect_gap: float = BGM*3/8
|
||||||
|
opn: float = row['open'][0]
|
||||||
|
ro: tuple[float, float] = (
|
||||||
|
# dt_end_t,
|
||||||
|
iend + rect_gap + 1,
|
||||||
|
opn,
|
||||||
|
)
|
||||||
|
cls: float = prev_r['close'][0]
|
||||||
|
lc: tuple[float, float] = (
|
||||||
|
# dt_start_t,
|
||||||
|
istart - rect_gap, # + 1 ,
|
||||||
|
cls,
|
||||||
|
)
|
||||||
|
|
||||||
|
color: str = 'dad_blue'
|
||||||
|
diff: float = cls - opn
|
||||||
|
sgn: float = copysign(1, diff)
|
||||||
|
color: str = {
|
||||||
|
-1: 'buy_green',
|
||||||
|
1: 'sell_red',
|
||||||
|
}[sgn]
|
||||||
|
|
||||||
|
rect_kwargs: dict[str, Any] = dict(
|
||||||
|
fqme=fqme,
|
||||||
|
timeframe=timeframe,
|
||||||
|
start_pos=lc,
|
||||||
|
end_pos=ro,
|
||||||
|
color=color,
|
||||||
|
)
|
||||||
|
|
||||||
|
aid: int = await actl.add_rect(**rect_kwargs)
|
||||||
|
assert aid
|
||||||
|
aids[aid] = rect_kwargs
|
||||||
|
|
||||||
|
# tell chart to redraw all its
|
||||||
|
# graphics view layers Bo
|
||||||
|
await actl.redraw(
|
||||||
|
fqme=fqme,
|
||||||
|
timeframe=timeframe,
|
||||||
|
)
|
||||||
|
return aids
|
||||||
|
|
||||||
|
|
||||||
@store.command()
|
@store.command()
|
||||||
def ldshm(
|
def ldshm(
|
||||||
fqme: str,
|
fqme: str,
|
||||||
write_parquet: bool = True,
|
write_parquet: bool = True,
|
||||||
reload_parquet_to_shm: bool = True,
|
reload_parquet_to_shm: bool = True,
|
||||||
pdb: bool = False, # --pdb passed?
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
|
@ -260,7 +377,7 @@ def ldshm(
|
||||||
open_piker_runtime(
|
open_piker_runtime(
|
||||||
'polars_boi',
|
'polars_boi',
|
||||||
enable_modules=['piker.data._sharedmem'],
|
enable_modules=['piker.data._sharedmem'],
|
||||||
debug_mode=pdb,
|
debug_mode=True,
|
||||||
),
|
),
|
||||||
open_storage_client() as (
|
open_storage_client() as (
|
||||||
mod,
|
mod,
|
||||||
|
|
@ -280,9 +397,6 @@ def ldshm(
|
||||||
|
|
||||||
times: np.ndarray = shm.array['time']
|
times: np.ndarray = shm.array['time']
|
||||||
d1: float = float(times[-1] - times[-2])
|
d1: float = float(times[-1] - times[-2])
|
||||||
d2: float = 0
|
|
||||||
# XXX, take a median sample rate if sufficient data
|
|
||||||
if times.size > 2:
|
|
||||||
d2: float = float(times[-2] - times[-3])
|
d2: float = float(times[-2] - times[-3])
|
||||||
med: float = np.median(np.diff(times))
|
med: float = np.median(np.diff(times))
|
||||||
if (
|
if (
|
||||||
|
|
@ -293,6 +407,7 @@ def ldshm(
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f'Something is wrong with time period for {shm}:\n{times}'
|
f'Something is wrong with time period for {shm}:\n{times}'
|
||||||
)
|
)
|
||||||
|
|
||||||
period_s: float = float(max(d1, d2, med))
|
period_s: float = float(max(d1, d2, med))
|
||||||
|
|
||||||
null_segs: tuple = tsp.get_null_segs(
|
null_segs: tuple = tsp.get_null_segs(
|
||||||
|
|
@ -302,8 +417,6 @@ def ldshm(
|
||||||
|
|
||||||
# TODO: call null-seg fixer somehow?
|
# TODO: call null-seg fixer somehow?
|
||||||
if null_segs:
|
if null_segs:
|
||||||
|
|
||||||
if tractor._state.is_debug_mode():
|
|
||||||
await tractor.pause()
|
await tractor.pause()
|
||||||
# async with (
|
# async with (
|
||||||
# trio.open_nursery() as tn,
|
# trio.open_nursery() as tn,
|
||||||
|
|
@ -328,35 +441,9 @@ def ldshm(
|
||||||
wdts,
|
wdts,
|
||||||
deduped,
|
deduped,
|
||||||
diff,
|
diff,
|
||||||
valid_races,
|
) = tsp.dedupe(
|
||||||
dq_issues,
|
|
||||||
) = tsp.dedupe_ohlcv_smart(
|
|
||||||
shm_df,
|
shm_df,
|
||||||
)
|
period=period_s,
|
||||||
|
|
||||||
# Report duplicate analysis
|
|
||||||
if diff > 0:
|
|
||||||
log.info(
|
|
||||||
f'Removed {diff} duplicate timestamp(s)\n'
|
|
||||||
)
|
|
||||||
if valid_races is not None:
|
|
||||||
identical: int = (
|
|
||||||
valid_races
|
|
||||||
.filter(pl.col('identical_bars'))
|
|
||||||
.height
|
|
||||||
)
|
|
||||||
monotonic: int = valid_races.height - identical
|
|
||||||
log.info(
|
|
||||||
f'Valid race conditions: {valid_races.height}\n'
|
|
||||||
f' - Identical bars: {identical}\n'
|
|
||||||
f' - Volume monotonic: {monotonic}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
if dq_issues is not None:
|
|
||||||
log.warning(
|
|
||||||
f'DATA QUALITY ISSUES from provider: '
|
|
||||||
f'{dq_issues.height} timestamp(s)\n'
|
|
||||||
f'{dq_issues}\n'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# detect gaps from in expected (uniform OHLC) sample period
|
# detect gaps from in expected (uniform OHLC) sample period
|
||||||
|
|
@ -373,8 +460,7 @@ def ldshm(
|
||||||
|
|
||||||
# TODO: actually pull the exact duration
|
# TODO: actually pull the exact duration
|
||||||
# expected for each venue operational period?
|
# expected for each venue operational period?
|
||||||
# gap_dt_unit='day',
|
gap_dt_unit='days',
|
||||||
gap_dt_unit='day',
|
|
||||||
gap_thresh=1,
|
gap_thresh=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -385,11 +471,8 @@ def ldshm(
|
||||||
if (
|
if (
|
||||||
not venue_gaps.is_empty()
|
not venue_gaps.is_empty()
|
||||||
or (
|
or (
|
||||||
not step_gaps.is_empty()
|
period_s < 60
|
||||||
# XXX, i presume i put this bc i was guarding
|
and not step_gaps.is_empty()
|
||||||
# for ib venue gaps?
|
|
||||||
# and
|
|
||||||
# period_s < 60
|
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
# write repaired ts to parquet-file?
|
# write repaired ts to parquet-file?
|
||||||
|
|
@ -438,7 +521,7 @@ def ldshm(
|
||||||
do_markup_gaps: bool = True
|
do_markup_gaps: bool = True
|
||||||
if do_markup_gaps:
|
if do_markup_gaps:
|
||||||
new_df: pl.DataFrame = tsp.np2pl(new)
|
new_df: pl.DataFrame = tsp.np2pl(new)
|
||||||
aids: dict = await tsp._annotate.markup_gaps(
|
aids: dict = await markup_gaps(
|
||||||
fqme,
|
fqme,
|
||||||
period_s,
|
period_s,
|
||||||
actl,
|
actl,
|
||||||
|
|
@ -447,23 +530,12 @@ def ldshm(
|
||||||
)
|
)
|
||||||
# last chance manual overwrites in REPL
|
# last chance manual overwrites in REPL
|
||||||
# await tractor.pause()
|
# await tractor.pause()
|
||||||
if not aids:
|
assert aids
|
||||||
log.warning(
|
|
||||||
f'No gaps were found !?\n'
|
|
||||||
f'fqme: {fqme!r}\n'
|
|
||||||
f'timeframe: {period_s!r}\n'
|
|
||||||
f"WELL THAT'S GOOD NOOZ!\n"
|
|
||||||
)
|
|
||||||
tf2aids[period_s] = aids
|
tf2aids[period_s] = aids
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# No significant gaps to handle, but may have had
|
# allow interaction even when no ts problems.
|
||||||
# duplicates removed (valid race conditions are ok)
|
assert not diff
|
||||||
if diff > 0 and dq_issues is not None:
|
|
||||||
log.warning(
|
|
||||||
'Found duplicates with data quality issues '
|
|
||||||
'but no significant time gaps!\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
await tractor.pause()
|
await tractor.pause()
|
||||||
log.info('Exiting TSP shm anal-izer!')
|
log.info('Exiting TSP shm anal-izer!')
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -54,10 +54,10 @@ from ..log import (
|
||||||
# for "time series processing"
|
# for "time series processing"
|
||||||
subsys: str = 'piker.tsp'
|
subsys: str = 'piker.tsp'
|
||||||
|
|
||||||
log = get_logger(name=__name__)
|
log = get_logger(subsys)
|
||||||
get_console_log = partial(
|
get_console_log = partial(
|
||||||
get_console_log,
|
get_console_log,
|
||||||
name=subsys, # activate for subsys-pkg "downward"
|
name=subsys,
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE: union type-defs to handle generic `numpy` and `polars` types
|
# NOTE: union type-defs to handle generic `numpy` and `polars` types
|
||||||
|
|
@ -275,18 +275,6 @@ def get_null_segs(
|
||||||
# diff of abs index steps between each zeroed row
|
# diff of abs index steps between each zeroed row
|
||||||
absi_zdiff: np.ndarray = np.diff(absi_zeros)
|
absi_zdiff: np.ndarray = np.diff(absi_zeros)
|
||||||
|
|
||||||
if zero_t.size < 2:
|
|
||||||
try:
|
|
||||||
breakpoint()
|
|
||||||
except RuntimeError:
|
|
||||||
# XXX, if greenback not active from
|
|
||||||
# piker store ldshm cmd..
|
|
||||||
log.exception(
|
|
||||||
"Can't debug single-sample null!\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
# scan for all frame-indices where the
|
# scan for all frame-indices where the
|
||||||
# zeroed-row-abs-index-step-diff is greater then the
|
# zeroed-row-abs-index-step-diff is greater then the
|
||||||
# expected increment of 1.
|
# expected increment of 1.
|
||||||
|
|
@ -446,8 +434,8 @@ def get_null_segs(
|
||||||
|
|
||||||
def iter_null_segs(
|
def iter_null_segs(
|
||||||
timeframe: float,
|
timeframe: float,
|
||||||
frame: Frame|None = None,
|
frame: Frame | None = None,
|
||||||
null_segs: tuple|None = None,
|
null_segs: tuple | None = None,
|
||||||
|
|
||||||
) -> Generator[
|
) -> Generator[
|
||||||
tuple[
|
tuple[
|
||||||
|
|
@ -499,8 +487,7 @@ def iter_null_segs(
|
||||||
start_dt = None
|
start_dt = None
|
||||||
if (
|
if (
|
||||||
absi_start is not None
|
absi_start is not None
|
||||||
and
|
and start_t != 0
|
||||||
start_t != 0
|
|
||||||
):
|
):
|
||||||
fi_start: int = absi_start - absi_first
|
fi_start: int = absi_start - absi_first
|
||||||
start_row: Seq = frame[fi_start]
|
start_row: Seq = frame[fi_start]
|
||||||
|
|
@ -514,8 +501,8 @@ def iter_null_segs(
|
||||||
yield (
|
yield (
|
||||||
absi_start, absi_end, # abs indices
|
absi_start, absi_end, # abs indices
|
||||||
fi_start, fi_end, # relative "frame" indices
|
fi_start, fi_end, # relative "frame" indices
|
||||||
start_t, end_t, # epoch times
|
start_t, end_t,
|
||||||
start_dt, end_dt, # dts
|
start_dt, end_dt,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -591,22 +578,11 @@ def detect_time_gaps(
|
||||||
# NOTE: this flag is to indicate that on this (sampling) time
|
# NOTE: this flag is to indicate that on this (sampling) time
|
||||||
# scale we expect to only be filtering against larger venue
|
# scale we expect to only be filtering against larger venue
|
||||||
# closures-scale time gaps.
|
# closures-scale time gaps.
|
||||||
#
|
|
||||||
# Map to total_ method since `dt_diff` is a duration type,
|
|
||||||
# not datetime - modern polars requires `total_*` methods
|
|
||||||
# for duration types (e.g. `total_days()` not `day()`)
|
|
||||||
# Ensure plural form for polars API (e.g. 'day' -> 'days')
|
|
||||||
unit_plural: str = (
|
|
||||||
gap_dt_unit
|
|
||||||
if gap_dt_unit.endswith('s')
|
|
||||||
else f'{gap_dt_unit}s'
|
|
||||||
)
|
|
||||||
duration_method: str = f'total_{unit_plural}'
|
|
||||||
return step_gaps.filter(
|
return step_gaps.filter(
|
||||||
# Second by an arbitrary dt-unit step size
|
# Second by an arbitrary dt-unit step size
|
||||||
getattr(
|
getattr(
|
||||||
pl.col('dt_diff').dt,
|
pl.col('dt_diff').dt,
|
||||||
duration_method,
|
gap_dt_unit,
|
||||||
)().abs() > gap_thresh
|
)().abs() > gap_thresh
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,306 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Time-series (remote) annotation APIs.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
from math import copysign
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
|
||||||
|
|
||||||
import polars as pl
|
|
||||||
import tractor
|
|
||||||
|
|
||||||
from piker.data._formatters import BGM
|
|
||||||
from piker.storage import log
|
|
||||||
from piker.ui._style import get_fonts
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from piker.ui._remote_ctl import AnnotCtl
|
|
||||||
|
|
||||||
|
|
||||||
def humanize_duration(
|
|
||||||
seconds: float,
|
|
||||||
) -> str:
|
|
||||||
'''
|
|
||||||
Convert duration in seconds to short human-readable form.
|
|
||||||
|
|
||||||
Uses smallest appropriate time unit:
|
|
||||||
- d: days
|
|
||||||
- h: hours
|
|
||||||
- m: minutes
|
|
||||||
- s: seconds
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
- 86400 -> "1d"
|
|
||||||
- 28800 -> "8h"
|
|
||||||
- 180 -> "3m"
|
|
||||||
- 45 -> "45s"
|
|
||||||
|
|
||||||
'''
|
|
||||||
abs_secs: float = abs(seconds)
|
|
||||||
|
|
||||||
if abs_secs >= 86400:
|
|
||||||
days: float = abs_secs / 86400
|
|
||||||
if days >= 10 or days == int(days):
|
|
||||||
return f'{int(days)}d'
|
|
||||||
return f'{days:.1f}d'
|
|
||||||
|
|
||||||
elif abs_secs >= 3600:
|
|
||||||
hours: float = abs_secs / 3600
|
|
||||||
if hours >= 10 or hours == int(hours):
|
|
||||||
return f'{int(hours)}h'
|
|
||||||
return f'{hours:.1f}h'
|
|
||||||
|
|
||||||
elif abs_secs >= 60:
|
|
||||||
mins: float = abs_secs / 60
|
|
||||||
if mins >= 10 or mins == int(mins):
|
|
||||||
return f'{int(mins)}m'
|
|
||||||
return f'{mins:.1f}m'
|
|
||||||
|
|
||||||
else:
|
|
||||||
if abs_secs >= 10 or abs_secs == int(abs_secs):
|
|
||||||
return f'{int(abs_secs)}s'
|
|
||||||
return f'{abs_secs:.1f}s'
|
|
||||||
|
|
||||||
|
|
||||||
async def markup_gaps(
|
|
||||||
fqme: str,
|
|
||||||
timeframe: float,
|
|
||||||
actl: AnnotCtl,
|
|
||||||
wdts: pl.DataFrame,
|
|
||||||
gaps: pl.DataFrame,
|
|
||||||
|
|
||||||
# XXX, switch on to see txt showing a "humanized" label of each
|
|
||||||
# gap's duration.
|
|
||||||
show_txt: bool = False,
|
|
||||||
|
|
||||||
) -> dict[int, dict]:
|
|
||||||
'''
|
|
||||||
Remote annotate time-gaps in a dt-fielded ts (normally OHLC)
|
|
||||||
with rectangles.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# XXX: force chart redraw FIRST to ensure PlotItem coordinate
|
|
||||||
# system is properly initialized before we position annotations!
|
|
||||||
# Without this, annotations may be misaligned on first creation
|
|
||||||
# due to Qt/pyqtgraph initialization race conditions.
|
|
||||||
await actl.redraw(
|
|
||||||
fqme=fqme,
|
|
||||||
timeframe=timeframe,
|
|
||||||
)
|
|
||||||
|
|
||||||
aids: dict[int] = {}
|
|
||||||
for i in range(gaps.height):
|
|
||||||
row: pl.DataFrame = gaps[i]
|
|
||||||
|
|
||||||
# the gap's RIGHT-most bar's OPEN value
|
|
||||||
# at that time (sample) step.
|
|
||||||
iend: int = row['index'][0]
|
|
||||||
|
|
||||||
# dt: datetime = row['dt'][0]
|
|
||||||
# dt_prev: datetime = row['dt_prev'][0]
|
|
||||||
# dt_end_t: float = dt.timestamp()
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: can we eventually remove this
|
|
||||||
# once we figure out why the epoch cols
|
|
||||||
# don't match?
|
|
||||||
# TODO: FIX HOW/WHY these aren't matching
|
|
||||||
# and are instead off by 4hours (EST
|
|
||||||
# vs. UTC?!?!)
|
|
||||||
# end_t: float = row['time']
|
|
||||||
# assert (
|
|
||||||
# dt.timestamp()
|
|
||||||
# ==
|
|
||||||
# end_t
|
|
||||||
# )
|
|
||||||
|
|
||||||
# the gap's LEFT-most bar's CLOSE value
|
|
||||||
# at that time (sample) step.
|
|
||||||
prev_r: pl.DataFrame = wdts.filter(
|
|
||||||
pl.col('index') == iend - 1
|
|
||||||
)
|
|
||||||
# XXX: probably a gap in the (newly sorted or de-duplicated)
|
|
||||||
# dt-df, so we might need to re-index first..
|
|
||||||
dt: pl.Series = row['dt']
|
|
||||||
dt_prev: pl.Series = row['dt_prev']
|
|
||||||
if prev_r.is_empty():
|
|
||||||
|
|
||||||
# XXX, filter out any special ignore cases,
|
|
||||||
# - UNIX-epoch stamped datums
|
|
||||||
# - first row
|
|
||||||
if (
|
|
||||||
dt_prev.dt.epoch()[0] == 0
|
|
||||||
or
|
|
||||||
dt.dt.epoch()[0] == 0
|
|
||||||
):
|
|
||||||
log.warning('Skipping row with UNIX epoch timestamp ??')
|
|
||||||
continue
|
|
||||||
|
|
||||||
if wdts[0]['index'][0] == iend: # first row
|
|
||||||
log.warning('Skipping first-row (has no previous obvi) !!')
|
|
||||||
continue
|
|
||||||
|
|
||||||
# XXX, if the previous-row by shm-index is missing,
|
|
||||||
# meaning there is a missing sample (set), get the prior
|
|
||||||
# row by df index and attempt to use it?
|
|
||||||
i_wdts: pl.DataFrame = wdts.with_row_index(name='i')
|
|
||||||
i_row: int = i_wdts.filter(pl.col('index') == iend)['i'][0]
|
|
||||||
prev_row_by_i = wdts[i_row]
|
|
||||||
prev_r: pl.DataFrame = prev_row_by_i
|
|
||||||
|
|
||||||
# debug any missing pre-row
|
|
||||||
if tractor._state.is_debug_mode():
|
|
||||||
await tractor.pause()
|
|
||||||
|
|
||||||
istart: int = prev_r['index'][0]
|
|
||||||
# TODO: implement px-col width measure
|
|
||||||
# and ensure at least as many px-cols
|
|
||||||
# shown per rect as configured by user.
|
|
||||||
# gap_w: float = abs((iend - istart))
|
|
||||||
# if gap_w < 6:
|
|
||||||
# margin: float = 6
|
|
||||||
# iend += margin
|
|
||||||
# istart -= margin
|
|
||||||
|
|
||||||
opn: float = row['open'][0]
|
|
||||||
cls: float = prev_r['close'][0]
|
|
||||||
|
|
||||||
# get gap duration for humanized label
|
|
||||||
gap_dur_s: float = row['s_diff'][0]
|
|
||||||
gap_label: str = humanize_duration(gap_dur_s)
|
|
||||||
|
|
||||||
# XXX: get timestamps for server-side index lookup
|
|
||||||
start_time: float = prev_r['time'][0]
|
|
||||||
end_time: float = row['time'][0]
|
|
||||||
|
|
||||||
# BGM=0.16 is the normal diff from overlap between bars, SO
|
|
||||||
# just go slightly "in" from that "between them".
|
|
||||||
from_idx: int = BGM - .06 # = .10
|
|
||||||
lc: tuple[float, float] = (
|
|
||||||
istart + 1 - from_idx,
|
|
||||||
cls,
|
|
||||||
)
|
|
||||||
ro: tuple[float, float] = (
|
|
||||||
iend + from_idx,
|
|
||||||
opn,
|
|
||||||
)
|
|
||||||
|
|
||||||
diff: float = cls - opn
|
|
||||||
sgn: float = copysign(1, diff)
|
|
||||||
up_gap: bool = sgn == -1
|
|
||||||
down_gap: bool = sgn == 1
|
|
||||||
flat: bool = sgn == 0
|
|
||||||
|
|
||||||
color: str = 'dad_blue'
|
|
||||||
# TODO? mks more sense to have up/down coloring?
|
|
||||||
# color: str = {
|
|
||||||
# -1: 'lilypad_green', # up-gap
|
|
||||||
# 1: 'wine', # down-gap
|
|
||||||
# }[sgn]
|
|
||||||
|
|
||||||
rect_kwargs: dict[str, Any] = dict(
|
|
||||||
fqme=fqme,
|
|
||||||
timeframe=timeframe,
|
|
||||||
start_pos=lc,
|
|
||||||
end_pos=ro,
|
|
||||||
color=color,
|
|
||||||
start_time=start_time,
|
|
||||||
end_time=end_time,
|
|
||||||
)
|
|
||||||
|
|
||||||
# add up/down rects
|
|
||||||
aid: int|None = await actl.add_rect(**rect_kwargs)
|
|
||||||
if aid is None:
|
|
||||||
log.error(
|
|
||||||
f'Failed to add rect for,\n'
|
|
||||||
f'{rect_kwargs!r}\n'
|
|
||||||
f'\n'
|
|
||||||
f'Skipping to next gap!\n'
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
assert aid
|
|
||||||
aids[aid] = rect_kwargs
|
|
||||||
direction: str = (
|
|
||||||
'down' if down_gap
|
|
||||||
else 'up'
|
|
||||||
)
|
|
||||||
# TODO! mk this a `msgspec.Struct` which we deserialize
|
|
||||||
# on the server side!
|
|
||||||
# XXX: send timestamp for server-side index lookup
|
|
||||||
# to ensure alignment with current shm state
|
|
||||||
gap_time: float = row['time'][0]
|
|
||||||
arrow_kwargs: dict[str, Any] = dict(
|
|
||||||
fqme=fqme,
|
|
||||||
timeframe=timeframe,
|
|
||||||
x=iend, # fallback if timestamp lookup fails
|
|
||||||
y=cls,
|
|
||||||
time=gap_time, # for server-side index lookup
|
|
||||||
color=color,
|
|
||||||
alpha=169,
|
|
||||||
pointing=direction,
|
|
||||||
# TODO: expose these as params to markup_gaps()?
|
|
||||||
headLen=10,
|
|
||||||
headWidth=2.222,
|
|
||||||
pxMode=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
aid: int = await actl.add_arrow(
|
|
||||||
**arrow_kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
# add duration label to RHS of arrow
|
|
||||||
if up_gap:
|
|
||||||
anchor = (0, 0)
|
|
||||||
# ^XXX? i dun get dese dims.. XD
|
|
||||||
elif down_gap:
|
|
||||||
anchor = (0, 1) # XXX y, x?
|
|
||||||
else: # no-gap?
|
|
||||||
assert flat
|
|
||||||
anchor = (0, 0) # up from bottom
|
|
||||||
|
|
||||||
# use a slightly smaller font for gap label txt.
|
|
||||||
font, small_font = get_fonts()
|
|
||||||
font_size: int = small_font.px_size - 1
|
|
||||||
assert isinstance(font_size, int)
|
|
||||||
|
|
||||||
if show_txt:
|
|
||||||
text_aid: int = await actl.add_text(
|
|
||||||
fqme=fqme,
|
|
||||||
timeframe=timeframe,
|
|
||||||
text=gap_label,
|
|
||||||
x=iend + 1, # fallback if timestamp lookup fails
|
|
||||||
y=cls,
|
|
||||||
time=gap_time, # server-side index lookup
|
|
||||||
color=color,
|
|
||||||
anchor=anchor,
|
|
||||||
font_size=font_size,
|
|
||||||
)
|
|
||||||
aids[text_aid] = {'text': gap_label}
|
|
||||||
|
|
||||||
# tell chart to redraw all its
|
|
||||||
# graphics view layers Bo
|
|
||||||
await actl.redraw(
|
|
||||||
fqme=fqme,
|
|
||||||
timeframe=timeframe,
|
|
||||||
)
|
|
||||||
return aids
|
|
||||||
|
|
@ -1,206 +0,0 @@
|
||||||
'''
|
|
||||||
Smart OHLCV deduplication with data quality validation.
|
|
||||||
|
|
||||||
Handles concurrent write conflicts by keeping the most complete bar
|
|
||||||
(highest volume) while detecting data quality anomalies.
|
|
||||||
|
|
||||||
'''
|
|
||||||
import polars as pl
|
|
||||||
|
|
||||||
from ._anal import with_dts
|
|
||||||
|
|
||||||
|
|
||||||
def dedupe_ohlcv_smart(
|
|
||||||
src_df: pl.DataFrame,
|
|
||||||
time_col: str = 'time',
|
|
||||||
volume_col: str = 'volume',
|
|
||||||
sort: bool = True,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
pl.DataFrame, # with dts
|
|
||||||
pl.DataFrame, # deduped (keeping higher volume bars)
|
|
||||||
int, # count of dupes removed
|
|
||||||
pl.DataFrame|None, # valid race conditions
|
|
||||||
pl.DataFrame|None, # data quality violations
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Smart OHLCV deduplication keeping most complete bars.
|
|
||||||
|
|
||||||
For duplicate timestamps, keeps bar with highest volume under
|
|
||||||
the assumption that higher volume indicates more complete/final
|
|
||||||
data from backfill vs partial live updates.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
Tuple of:
|
|
||||||
- wdts: original dataframe with datetime columns added
|
|
||||||
- deduped: deduplicated frame keeping highest-volume bars
|
|
||||||
- diff: number of duplicate rows removed
|
|
||||||
- valid_races: duplicates meeting expected race condition pattern
|
|
||||||
(volume monotonic, OHLC ranges valid)
|
|
||||||
- data_quality_issues: duplicates violating expected relationships
|
|
||||||
indicating provider data problems
|
|
||||||
|
|
||||||
'''
|
|
||||||
wdts: pl.DataFrame = with_dts(src_df)
|
|
||||||
|
|
||||||
# Find duplicate timestamps
|
|
||||||
dupes: pl.DataFrame = wdts.filter(
|
|
||||||
pl.col(time_col).is_duplicated()
|
|
||||||
)
|
|
||||||
|
|
||||||
if dupes.is_empty():
|
|
||||||
# No duplicates, return as-is
|
|
||||||
return (wdts, wdts, 0, None, None)
|
|
||||||
|
|
||||||
# Analyze duplicate groups for validation
|
|
||||||
dupe_analysis: pl.DataFrame = (
|
|
||||||
dupes
|
|
||||||
.sort([time_col, 'index'])
|
|
||||||
.group_by(time_col, maintain_order=True)
|
|
||||||
.agg([
|
|
||||||
pl.col('index').alias('indices'),
|
|
||||||
pl.col('volume').alias('volumes'),
|
|
||||||
pl.col('high').alias('highs'),
|
|
||||||
pl.col('low').alias('lows'),
|
|
||||||
pl.col('open').alias('opens'),
|
|
||||||
pl.col('close').alias('closes'),
|
|
||||||
pl.col('dt').first().alias('dt'),
|
|
||||||
pl.len().alias('count'),
|
|
||||||
])
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validate OHLCV monotonicity for each duplicate group
|
|
||||||
def check_ohlcv_validity(row) -> dict[str, bool]:
|
|
||||||
'''
|
|
||||||
Check if duplicate bars follow expected race condition pattern.
|
|
||||||
|
|
||||||
For a valid live-update → backfill race:
|
|
||||||
- volume should be monotonically increasing
|
|
||||||
- high should be monotonically non-decreasing
|
|
||||||
- low should be monotonically non-increasing
|
|
||||||
- open should be identical (fixed at bar start)
|
|
||||||
|
|
||||||
Returns dict of violation flags.
|
|
||||||
|
|
||||||
'''
|
|
||||||
vols: list = row['volumes']
|
|
||||||
highs: list = row['highs']
|
|
||||||
lows: list = row['lows']
|
|
||||||
opens: list = row['opens']
|
|
||||||
|
|
||||||
violations: dict[str, bool] = {
|
|
||||||
'volume_non_monotonic': False,
|
|
||||||
'high_decreased': False,
|
|
||||||
'low_increased': False,
|
|
||||||
'open_mismatch': False,
|
|
||||||
'identical_bars': False,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check if all bars are identical (pure duplicate)
|
|
||||||
if (
|
|
||||||
len(set(vols)) == 1
|
|
||||||
and len(set(highs)) == 1
|
|
||||||
and len(set(lows)) == 1
|
|
||||||
and len(set(opens)) == 1
|
|
||||||
):
|
|
||||||
violations['identical_bars'] = True
|
|
||||||
return violations
|
|
||||||
|
|
||||||
# Check volume monotonicity
|
|
||||||
for i in range(1, len(vols)):
|
|
||||||
if vols[i] < vols[i-1]:
|
|
||||||
violations['volume_non_monotonic'] = True
|
|
||||||
break
|
|
||||||
|
|
||||||
# Check high monotonicity (can only increase or stay same)
|
|
||||||
for i in range(1, len(highs)):
|
|
||||||
if highs[i] < highs[i-1]:
|
|
||||||
violations['high_decreased'] = True
|
|
||||||
break
|
|
||||||
|
|
||||||
# Check low monotonicity (can only decrease or stay same)
|
|
||||||
for i in range(1, len(lows)):
|
|
||||||
if lows[i] > lows[i-1]:
|
|
||||||
violations['low_increased'] = True
|
|
||||||
break
|
|
||||||
|
|
||||||
# Check open consistency (should be fixed)
|
|
||||||
if len(set(opens)) > 1:
|
|
||||||
violations['open_mismatch'] = True
|
|
||||||
|
|
||||||
return violations
|
|
||||||
|
|
||||||
# Apply validation
|
|
||||||
dupe_analysis = dupe_analysis.with_columns([
|
|
||||||
pl.struct(['volumes', 'highs', 'lows', 'opens'])
|
|
||||||
.map_elements(
|
|
||||||
check_ohlcv_validity,
|
|
||||||
return_dtype=pl.Struct([
|
|
||||||
pl.Field('volume_non_monotonic', pl.Boolean),
|
|
||||||
pl.Field('high_decreased', pl.Boolean),
|
|
||||||
pl.Field('low_increased', pl.Boolean),
|
|
||||||
pl.Field('open_mismatch', pl.Boolean),
|
|
||||||
pl.Field('identical_bars', pl.Boolean),
|
|
||||||
])
|
|
||||||
)
|
|
||||||
.alias('validity')
|
|
||||||
])
|
|
||||||
|
|
||||||
# Unnest validity struct
|
|
||||||
dupe_analysis = dupe_analysis.unnest('validity')
|
|
||||||
|
|
||||||
# Separate valid races from data quality issues
|
|
||||||
valid_races: pl.DataFrame|None = (
|
|
||||||
dupe_analysis
|
|
||||||
.filter(
|
|
||||||
# Valid if no violations OR just identical bars
|
|
||||||
~pl.col('volume_non_monotonic')
|
|
||||||
& ~pl.col('high_decreased')
|
|
||||||
& ~pl.col('low_increased')
|
|
||||||
& ~pl.col('open_mismatch')
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if valid_races.is_empty():
|
|
||||||
valid_races = None
|
|
||||||
|
|
||||||
data_quality_issues: pl.DataFrame|None = (
|
|
||||||
dupe_analysis
|
|
||||||
.filter(
|
|
||||||
# Issues if any non-identical violation exists
|
|
||||||
(
|
|
||||||
pl.col('volume_non_monotonic')
|
|
||||||
| pl.col('high_decreased')
|
|
||||||
| pl.col('low_increased')
|
|
||||||
| pl.col('open_mismatch')
|
|
||||||
)
|
|
||||||
& ~pl.col('identical_bars')
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if data_quality_issues.is_empty():
|
|
||||||
data_quality_issues = None
|
|
||||||
|
|
||||||
# Deduplicate: keep highest volume bar for each timestamp
|
|
||||||
deduped: pl.DataFrame = (
|
|
||||||
wdts
|
|
||||||
.sort([time_col, volume_col])
|
|
||||||
.unique(
|
|
||||||
subset=[time_col],
|
|
||||||
keep='last',
|
|
||||||
maintain_order=False,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Re-sort by time or index
|
|
||||||
if sort:
|
|
||||||
deduped = deduped.sort(by=time_col)
|
|
||||||
|
|
||||||
diff: int = wdts.height - deduped.height
|
|
||||||
|
|
||||||
return (
|
|
||||||
wdts,
|
|
||||||
deduped,
|
|
||||||
diff,
|
|
||||||
valid_races,
|
|
||||||
data_quality_issues,
|
|
||||||
)
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -21,7 +21,6 @@ this module ties together quote and computational (fsp) streams with
|
||||||
graphics update methods via our custom ``pyqtgraph`` charting api.
|
graphics update methods via our custom ``pyqtgraph`` charting api.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from functools import partial
|
|
||||||
import itertools
|
import itertools
|
||||||
from math import floor
|
from math import floor
|
||||||
import time
|
import time
|
||||||
|
|
@ -209,7 +208,6 @@ class DisplayState(Struct):
|
||||||
async def increment_history_view(
|
async def increment_history_view(
|
||||||
# min_istream: tractor.MsgStream,
|
# min_istream: tractor.MsgStream,
|
||||||
ds: DisplayState,
|
ds: DisplayState,
|
||||||
loglevel: str = 'warning',
|
|
||||||
):
|
):
|
||||||
hist_chart: ChartPlotWidget = ds.hist_chart
|
hist_chart: ChartPlotWidget = ds.hist_chart
|
||||||
hist_viz: Viz = ds.hist_viz
|
hist_viz: Viz = ds.hist_viz
|
||||||
|
|
@ -231,10 +229,7 @@ async def increment_history_view(
|
||||||
hist_viz.reset_graphics()
|
hist_viz.reset_graphics()
|
||||||
# hist_viz.update_graphics(force_redraw=True)
|
# hist_viz.update_graphics(force_redraw=True)
|
||||||
|
|
||||||
async with open_sample_stream(
|
async with open_sample_stream(1.) as min_istream:
|
||||||
period_s=1.,
|
|
||||||
loglevel=loglevel,
|
|
||||||
) as min_istream:
|
|
||||||
async for msg in min_istream:
|
async for msg in min_istream:
|
||||||
|
|
||||||
profiler = Profiler(
|
profiler = Profiler(
|
||||||
|
|
@ -315,6 +310,7 @@ async def increment_history_view(
|
||||||
|
|
||||||
|
|
||||||
async def graphics_update_loop(
|
async def graphics_update_loop(
|
||||||
|
|
||||||
dss: dict[str, DisplayState],
|
dss: dict[str, DisplayState],
|
||||||
nurse: trio.Nursery,
|
nurse: trio.Nursery,
|
||||||
godwidget: GodWidget,
|
godwidget: GodWidget,
|
||||||
|
|
@ -323,7 +319,6 @@ async def graphics_update_loop(
|
||||||
|
|
||||||
pis: dict[str, list[pgo.PlotItem, pgo.PlotItem]] = {},
|
pis: dict[str, list[pgo.PlotItem, pgo.PlotItem]] = {},
|
||||||
vlm_charts: dict[str, ChartPlotWidget] = {},
|
vlm_charts: dict[str, ChartPlotWidget] = {},
|
||||||
loglevel: str = 'warning',
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
|
@ -467,12 +462,9 @@ async def graphics_update_loop(
|
||||||
# })
|
# })
|
||||||
|
|
||||||
nurse.start_soon(
|
nurse.start_soon(
|
||||||
partial(
|
|
||||||
increment_history_view,
|
increment_history_view,
|
||||||
# min_istream,
|
# min_istream,
|
||||||
ds=ds,
|
ds,
|
||||||
loglevel=loglevel,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
|
@ -519,19 +511,14 @@ async def graphics_update_loop(
|
||||||
fast_chart.linked.isHidden()
|
fast_chart.linked.isHidden()
|
||||||
or not rt_pi.isVisible()
|
or not rt_pi.isVisible()
|
||||||
):
|
):
|
||||||
log.debug(
|
print(f'{fqme} skipping update for HIDDEN CHART')
|
||||||
f'{fqme} skipping update for HIDDEN CHART'
|
|
||||||
)
|
|
||||||
fast_chart.pause_all_feeds()
|
fast_chart.pause_all_feeds()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ic = fast_chart.view._in_interact
|
ic = fast_chart.view._in_interact
|
||||||
if ic:
|
if ic:
|
||||||
fast_chart.pause_all_feeds()
|
fast_chart.pause_all_feeds()
|
||||||
log.debug(
|
print(f'{fqme} PAUSING DURING INTERACTION')
|
||||||
f'Pausing chart updaates during interaction\n'
|
|
||||||
f'fqme: {fqme!r}'
|
|
||||||
)
|
|
||||||
await ic.wait()
|
await ic.wait()
|
||||||
fast_chart.resume_all_feeds()
|
fast_chart.resume_all_feeds()
|
||||||
|
|
||||||
|
|
@ -1604,18 +1591,15 @@ async def display_symbol_data(
|
||||||
# start update loop task
|
# start update loop task
|
||||||
dss: dict[str, DisplayState] = {}
|
dss: dict[str, DisplayState] = {}
|
||||||
ln.start_soon(
|
ln.start_soon(
|
||||||
partial(
|
|
||||||
graphics_update_loop,
|
graphics_update_loop,
|
||||||
dss=dss,
|
dss,
|
||||||
nurse=ln,
|
ln,
|
||||||
godwidget=godwidget,
|
godwidget,
|
||||||
feed=feed,
|
feed,
|
||||||
# min_istream,
|
# min_istream,
|
||||||
|
|
||||||
pis=pis,
|
pis,
|
||||||
vlm_charts=vlm_charts,
|
vlm_charts,
|
||||||
loglevel=loglevel,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# boot order-mode
|
# boot order-mode
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,6 @@ Higher level annotation editors.
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import (
|
from typing import (
|
||||||
Literal,
|
|
||||||
Sequence,
|
Sequence,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
|
|
@ -72,18 +71,9 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ArrowEditor(Struct):
|
class ArrowEditor(Struct):
|
||||||
'''
|
|
||||||
Annotate a chart-view with arrows most often used for indicating,
|
|
||||||
- order txns/clears,
|
|
||||||
- positions directions,
|
|
||||||
- general points-of-interest like nooz events.
|
|
||||||
|
|
||||||
'''
|
|
||||||
godw: GodWidget = None # type: ignore # noqa
|
godw: GodWidget = None # type: ignore # noqa
|
||||||
_arrows: dict[
|
_arrows: dict[str, list[pg.ArrowItem]] = {}
|
||||||
str,
|
|
||||||
list[pg.ArrowItem]
|
|
||||||
] = {}
|
|
||||||
|
|
||||||
def add(
|
def add(
|
||||||
self,
|
self,
|
||||||
|
|
@ -91,19 +81,8 @@ class ArrowEditor(Struct):
|
||||||
uid: str,
|
uid: str,
|
||||||
x: float,
|
x: float,
|
||||||
y: float,
|
y: float,
|
||||||
color: str|None = None,
|
color: str = 'default',
|
||||||
pointing: Literal[
|
pointing: str | None = None,
|
||||||
'up',
|
|
||||||
'down',
|
|
||||||
None,
|
|
||||||
] = None,
|
|
||||||
alpha: int = 255,
|
|
||||||
zval: float = 1e9,
|
|
||||||
headLen: float|None = None,
|
|
||||||
headWidth: float|None = None,
|
|
||||||
tailLen: float|None = None,
|
|
||||||
tailWidth: float|None = None,
|
|
||||||
pxMode: bool = True,
|
|
||||||
|
|
||||||
) -> pg.ArrowItem:
|
) -> pg.ArrowItem:
|
||||||
'''
|
'''
|
||||||
|
|
@ -119,83 +98,29 @@ class ArrowEditor(Struct):
|
||||||
# scale arrow sizing to dpi-aware font
|
# scale arrow sizing to dpi-aware font
|
||||||
size = _font.font.pixelSize() * 0.8
|
size = _font.font.pixelSize() * 0.8
|
||||||
|
|
||||||
# allow caller override of head dimensions
|
|
||||||
if headLen is None:
|
|
||||||
headLen = size
|
|
||||||
if headWidth is None:
|
|
||||||
headWidth = size/2
|
|
||||||
# tail params default to None (no tail)
|
|
||||||
if tailWidth is None:
|
|
||||||
tailWidth = 3
|
|
||||||
|
|
||||||
color = color or 'default'
|
|
||||||
color = QColor(hcolor(color))
|
|
||||||
color.setAlpha(alpha)
|
|
||||||
pen = fn.mkPen(color, width=1)
|
|
||||||
brush = fn.mkBrush(color)
|
|
||||||
arrow = pg.ArrowItem(
|
arrow = pg.ArrowItem(
|
||||||
angle=angle,
|
angle=angle,
|
||||||
baseAngle=0,
|
baseAngle=0,
|
||||||
headLen=headLen,
|
headLen=size,
|
||||||
headWidth=headWidth,
|
headWidth=size/2,
|
||||||
tailLen=tailLen,
|
tailLen=None,
|
||||||
tailWidth=tailWidth,
|
pxMode=True,
|
||||||
pxMode=pxMode,
|
|
||||||
# coloring
|
|
||||||
pen=pen,
|
|
||||||
brush=brush,
|
|
||||||
)
|
|
||||||
arrow.setZValue(zval)
|
|
||||||
arrow.setPos(x, y)
|
|
||||||
plot.addItem(arrow) # render to view
|
|
||||||
|
|
||||||
# register for removal
|
# coloring
|
||||||
arrow._uid = uid
|
pen=pg.mkPen(hcolor('papas_special')),
|
||||||
self._arrows.setdefault(
|
brush=pg.mkBrush(hcolor(color)),
|
||||||
uid, []
|
)
|
||||||
).append(arrow)
|
arrow.setPos(x, y)
|
||||||
|
self._arrows.setdefault(uid, []).append(arrow)
|
||||||
|
|
||||||
|
# render to view
|
||||||
|
plot.addItem(arrow)
|
||||||
|
|
||||||
return arrow
|
return arrow
|
||||||
|
|
||||||
def remove(
|
def remove(self, arrow) -> bool:
|
||||||
self,
|
|
||||||
arrow: pg.ArrowItem,
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Remove a *single arrow* from all chart views to which it was
|
|
||||||
added.
|
|
||||||
|
|
||||||
'''
|
|
||||||
uid: str = arrow._uid
|
|
||||||
arrows: list[pg.ArrowItem] = self._arrows[uid]
|
|
||||||
log.info(
|
|
||||||
f'Removing arrow from views\n'
|
|
||||||
f'uid: {uid!r}\n'
|
|
||||||
f'{arrow!r}\n'
|
|
||||||
)
|
|
||||||
for linked in self.godw.iter_linked():
|
for linked in self.godw.iter_linked():
|
||||||
if not (chart := linked.chart):
|
linked.chart.plotItem.removeItem(arrow)
|
||||||
continue
|
|
||||||
|
|
||||||
chart.plotItem.removeItem(arrow)
|
|
||||||
try:
|
|
||||||
arrows.remove(arrow)
|
|
||||||
except ValueError:
|
|
||||||
log.warning(
|
|
||||||
f'Arrow was already removed?\n'
|
|
||||||
f'uid: {uid!r}\n'
|
|
||||||
f'{arrow!r}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
def remove_all(self) -> set[pg.ArrowItem]:
|
|
||||||
'''
|
|
||||||
Remove all arrows added by this editor from all
|
|
||||||
chart-views.
|
|
||||||
|
|
||||||
'''
|
|
||||||
for uid, arrows in self._arrows.items():
|
|
||||||
for arrow in arrows:
|
|
||||||
self.remove(arrow)
|
|
||||||
|
|
||||||
|
|
||||||
class LineEditor(Struct):
|
class LineEditor(Struct):
|
||||||
|
|
@ -341,9 +266,6 @@ class LineEditor(Struct):
|
||||||
|
|
||||||
return lines
|
return lines
|
||||||
|
|
||||||
# compat with ArrowEditor
|
|
||||||
remove = remove_line
|
|
||||||
|
|
||||||
|
|
||||||
def as_point(
|
def as_point(
|
||||||
pair: Sequence[float, float] | QPointF,
|
pair: Sequence[float, float] | QPointF,
|
||||||
|
|
@ -376,7 +298,7 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
viewbox: ViewBox,
|
viewbox: ViewBox,
|
||||||
color: str|None = None,
|
color: str | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(0, 0, 1, 1)
|
super().__init__(0, 0, 1, 1)
|
||||||
|
|
||||||
|
|
@ -692,6 +614,3 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||||
|
|
||||||
):
|
):
|
||||||
scen.removeItem(self._label_proxy)
|
scen.removeItem(self._label_proxy)
|
||||||
|
|
||||||
# compat with ArrowEditor
|
|
||||||
remove = delete
|
|
||||||
|
|
|
||||||
|
|
@ -183,17 +183,13 @@ async def open_fsp_sidepane(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_fsp_actor_cluster(
|
async def open_fsp_actor_cluster(
|
||||||
names: list[str] = [
|
names: list[str] = ['fsp_0', 'fsp_1'],
|
||||||
'fsp_0',
|
|
||||||
'fsp_1',
|
|
||||||
],
|
|
||||||
|
|
||||||
) -> AsyncGenerator[
|
) -> AsyncGenerator[
|
||||||
int,
|
int,
|
||||||
dict[str, tractor.Portal]
|
dict[str, tractor.Portal]
|
||||||
]:
|
]:
|
||||||
|
|
||||||
# TODO! change to .experimental!
|
|
||||||
from tractor._clustering import open_actor_cluster
|
from tractor._clustering import open_actor_cluster
|
||||||
|
|
||||||
# profiler = Profiler(
|
# profiler = Profiler(
|
||||||
|
|
@ -201,7 +197,7 @@ async def open_fsp_actor_cluster(
|
||||||
# disabled=False
|
# disabled=False
|
||||||
# )
|
# )
|
||||||
async with open_actor_cluster(
|
async with open_actor_cluster(
|
||||||
count=len(names),
|
count=2,
|
||||||
names=names,
|
names=names,
|
||||||
modules=['piker.fsp._engine'],
|
modules=['piker.fsp._engine'],
|
||||||
|
|
||||||
|
|
@ -501,8 +497,7 @@ class FspAdmin:
|
||||||
|
|
||||||
portal: tractor.Portal = (
|
portal: tractor.Portal = (
|
||||||
self.cluster.get(worker_name)
|
self.cluster.get(worker_name)
|
||||||
or
|
or self.rr_next_portal()
|
||||||
self.rr_next_portal()
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: this should probably be turned into a
|
# TODO: this should probably be turned into a
|
||||||
|
|
|
||||||
|
|
@ -27,12 +27,10 @@ from contextlib import (
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import (
|
from typing import (
|
||||||
|
# Any,
|
||||||
AsyncContextManager,
|
AsyncContextManager,
|
||||||
Literal,
|
|
||||||
)
|
)
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
import pyqtgraph as pg
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
from tractor import trionics
|
from tractor import trionics
|
||||||
|
|
@ -49,16 +47,12 @@ from piker.brokers import SymbolNotFound
|
||||||
from piker.ui.qt import (
|
from piker.ui.qt import (
|
||||||
QGraphicsItem,
|
QGraphicsItem,
|
||||||
)
|
)
|
||||||
from PyQt6.QtGui import QFont
|
|
||||||
from ._display import DisplayState
|
from ._display import DisplayState
|
||||||
from ._interaction import ChartView
|
from ._interaction import ChartView
|
||||||
from ._editors import (
|
from ._editors import SelectRect
|
||||||
SelectRect,
|
|
||||||
ArrowEditor,
|
|
||||||
)
|
|
||||||
from ._chart import ChartPlotWidget
|
from ._chart import ChartPlotWidget
|
||||||
from ._dataviz import Viz
|
from ._dataviz import Viz
|
||||||
from ._style import hcolor
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
@ -89,40 +83,8 @@ _ctxs: IpcCtxTable = {}
|
||||||
# the "annotations server" which actually renders to a Qt canvas).
|
# the "annotations server" which actually renders to a Qt canvas).
|
||||||
# type AnnotsTable = dict[int, QGraphicsItem]
|
# type AnnotsTable = dict[int, QGraphicsItem]
|
||||||
AnnotsTable = dict[int, QGraphicsItem]
|
AnnotsTable = dict[int, QGraphicsItem]
|
||||||
EditorsTable = dict[int, ArrowEditor]
|
|
||||||
|
|
||||||
_annots: AnnotsTable = {}
|
_annots: AnnotsTable = {}
|
||||||
_editors: EditorsTable = {}
|
|
||||||
|
|
||||||
def rm_annot(
|
|
||||||
annot: ArrowEditor|SelectRect|pg.TextItem
|
|
||||||
) -> bool:
|
|
||||||
global _editors
|
|
||||||
match annot:
|
|
||||||
case pg.ArrowItem():
|
|
||||||
editor = _editors[annot._uid]
|
|
||||||
editor.remove(annot)
|
|
||||||
# ^TODO? only remove each arrow or all?
|
|
||||||
# if editor._arrows:
|
|
||||||
# editor.remove_all()
|
|
||||||
# else:
|
|
||||||
# log.warning(
|
|
||||||
# f'Annot already removed!\n'
|
|
||||||
# f'{annot!r}\n'
|
|
||||||
# )
|
|
||||||
return True
|
|
||||||
|
|
||||||
case SelectRect():
|
|
||||||
annot.delete()
|
|
||||||
return True
|
|
||||||
|
|
||||||
case pg.TextItem():
|
|
||||||
scene = annot.scene()
|
|
||||||
if scene:
|
|
||||||
scene.removeItem(annot)
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
async def serve_rc_annots(
|
async def serve_rc_annots(
|
||||||
|
|
@ -133,12 +95,6 @@ async def serve_rc_annots(
|
||||||
annots: AnnotsTable,
|
annots: AnnotsTable,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
|
||||||
A small viz(ualization) server for remote ctl of chart
|
|
||||||
annotations.
|
|
||||||
|
|
||||||
'''
|
|
||||||
global _editors
|
|
||||||
async for msg in annot_req_stream:
|
async for msg in annot_req_stream:
|
||||||
match msg:
|
match msg:
|
||||||
case {
|
case {
|
||||||
|
|
@ -148,77 +104,14 @@ async def serve_rc_annots(
|
||||||
'meth': str(meth),
|
'meth': str(meth),
|
||||||
'kwargs': dict(kwargs),
|
'kwargs': dict(kwargs),
|
||||||
}:
|
}:
|
||||||
|
|
||||||
ds: DisplayState = _dss[fqme]
|
ds: DisplayState = _dss[fqme]
|
||||||
try:
|
|
||||||
chart: ChartPlotWidget = {
|
chart: ChartPlotWidget = {
|
||||||
60: ds.hist_chart,
|
60: ds.hist_chart,
|
||||||
1: ds.chart,
|
1: ds.chart,
|
||||||
}[timeframe]
|
}[timeframe]
|
||||||
except KeyError:
|
|
||||||
msg: str = (
|
|
||||||
f'No chart for timeframe={timeframe}s, '
|
|
||||||
f'skipping rect annotation'
|
|
||||||
)
|
|
||||||
log.exeception(msg)
|
|
||||||
await annot_req_stream.send({'error': msg})
|
|
||||||
continue
|
|
||||||
|
|
||||||
cv: ChartView = chart.cv
|
cv: ChartView = chart.cv
|
||||||
|
|
||||||
# NEW: if timestamps provided, lookup current indices
|
|
||||||
# from shm to ensure alignment with current buffer
|
|
||||||
# state
|
|
||||||
start_time = kwargs.pop('start_time', None)
|
|
||||||
end_time = kwargs.pop('end_time', None)
|
|
||||||
if (
|
|
||||||
start_time is not None
|
|
||||||
and end_time is not None
|
|
||||||
):
|
|
||||||
viz: Viz = chart.get_viz(fqme)
|
|
||||||
shm = viz.shm
|
|
||||||
arr = shm.array
|
|
||||||
|
|
||||||
# lookup start index
|
|
||||||
start_matches = arr[arr['time'] == start_time]
|
|
||||||
if len(start_matches) == 0:
|
|
||||||
msg: str = (
|
|
||||||
f'No shm entry for start_time={start_time}, '
|
|
||||||
f'skipping rect'
|
|
||||||
)
|
|
||||||
log.error(msg)
|
|
||||||
await annot_req_stream.send({'error': msg})
|
|
||||||
continue
|
|
||||||
|
|
||||||
# lookup end index
|
|
||||||
end_matches = arr[arr['time'] == end_time]
|
|
||||||
if len(end_matches) == 0:
|
|
||||||
msg: str = (
|
|
||||||
f'No shm entry for end_time={end_time}, '
|
|
||||||
f'skipping rect'
|
|
||||||
)
|
|
||||||
log.error(msg)
|
|
||||||
await annot_req_stream.send({'error': msg})
|
|
||||||
continue
|
|
||||||
|
|
||||||
# get close price from start bar, open from end
|
|
||||||
# bar
|
|
||||||
start_idx = float(start_matches[0]['index'])
|
|
||||||
end_idx = float(end_matches[0]['index'])
|
|
||||||
start_close = float(start_matches[0]['close'])
|
|
||||||
end_open = float(end_matches[0]['open'])
|
|
||||||
|
|
||||||
# reconstruct start_pos and end_pos with
|
|
||||||
# looked-up indices
|
|
||||||
from_idx: float = 0.16 - 0.06 # BGM offset
|
|
||||||
kwargs['start_pos'] = (
|
|
||||||
start_idx + 1 - from_idx,
|
|
||||||
start_close,
|
|
||||||
)
|
|
||||||
kwargs['end_pos'] = (
|
|
||||||
end_idx + from_idx,
|
|
||||||
end_open,
|
|
||||||
)
|
|
||||||
|
|
||||||
# annot type lookup from cmd
|
# annot type lookup from cmd
|
||||||
rect = SelectRect(
|
rect = SelectRect(
|
||||||
viewbox=cv,
|
viewbox=cv,
|
||||||
|
|
@ -237,207 +130,21 @@ async def serve_rc_annots(
|
||||||
# delegate generically to the requested method
|
# delegate generically to the requested method
|
||||||
getattr(rect, meth)(**kwargs)
|
getattr(rect, meth)(**kwargs)
|
||||||
rect.show()
|
rect.show()
|
||||||
|
|
||||||
# XXX: store absolute coords for repositioning
|
|
||||||
# during viz redraws (eg backfill updates)
|
|
||||||
rect._meth = meth
|
|
||||||
rect._kwargs = kwargs
|
|
||||||
|
|
||||||
aid: int = id(rect)
|
aid: int = id(rect)
|
||||||
annots[aid] = rect
|
annots[aid] = rect
|
||||||
aids: set[int] = ctxs[ipc_key][1]
|
aids: set[int] = ctxs[ipc_key][1]
|
||||||
aids.add(aid)
|
aids.add(aid)
|
||||||
await annot_req_stream.send(aid)
|
await annot_req_stream.send(aid)
|
||||||
|
|
||||||
case {
|
|
||||||
'cmd': 'ArrowEditor',
|
|
||||||
'fqme': fqme,
|
|
||||||
'timeframe': timeframe,
|
|
||||||
'meth': 'add'|'remove' as meth,
|
|
||||||
'kwargs': {
|
|
||||||
'x': float(x),
|
|
||||||
'y': float(y),
|
|
||||||
'pointing': pointing,
|
|
||||||
'color': color,
|
|
||||||
'aid': str()|None as aid,
|
|
||||||
'alpha': int(alpha),
|
|
||||||
'headLen': int()|float()|None as headLen,
|
|
||||||
'headWidth': int()|float()|None as headWidth,
|
|
||||||
'tailLen': int()|float()|None as tailLen,
|
|
||||||
'tailWidth': int()|float()|None as tailWidth,
|
|
||||||
'pxMode': bool(pxMode),
|
|
||||||
'time': int()|float()|None as timestamp,
|
|
||||||
},
|
|
||||||
# ?TODO? split based on method fn-sigs?
|
|
||||||
# 'pointing',
|
|
||||||
}:
|
|
||||||
ds: DisplayState = _dss[fqme]
|
|
||||||
try:
|
|
||||||
chart: ChartPlotWidget = {
|
|
||||||
60: ds.hist_chart,
|
|
||||||
1: ds.chart,
|
|
||||||
}[timeframe]
|
|
||||||
except KeyError:
|
|
||||||
log.warning(
|
|
||||||
f'No chart for timeframe={timeframe}s, '
|
|
||||||
f'skipping arrow annotation'
|
|
||||||
)
|
|
||||||
# return -1 to indicate failure
|
|
||||||
await annot_req_stream.send(-1)
|
|
||||||
continue
|
|
||||||
cv: ChartView = chart.cv
|
|
||||||
godw = chart.linked.godwidget
|
|
||||||
|
|
||||||
# NEW: if timestamp provided, lookup current index
|
|
||||||
# from shm to ensure alignment with current buffer
|
|
||||||
# state
|
|
||||||
if timestamp is not None:
|
|
||||||
viz: Viz = chart.get_viz(fqme)
|
|
||||||
shm = viz.shm
|
|
||||||
arr = shm.array
|
|
||||||
# find index where time matches timestamp
|
|
||||||
matches = arr[arr['time'] == timestamp]
|
|
||||||
if len(matches) == 0:
|
|
||||||
log.error(
|
|
||||||
f'No shm entry for timestamp={timestamp}, '
|
|
||||||
f'skipping arrow annotation'
|
|
||||||
)
|
|
||||||
await annot_req_stream.send(-1)
|
|
||||||
continue
|
|
||||||
# use the matched row's index as x
|
|
||||||
x = float(matches[0]['index'])
|
|
||||||
|
|
||||||
arrows = ArrowEditor(godw=godw)
|
|
||||||
# `.add/.remove()` API
|
|
||||||
if meth != 'add':
|
|
||||||
# await tractor.pause()
|
|
||||||
raise ValueError(
|
|
||||||
f'Invalid arrow-edit request ?\n'
|
|
||||||
f'{msg!r}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
aid: str = str(uuid4())
|
|
||||||
arrow: pg.ArrowItem = arrows.add(
|
|
||||||
plot=chart.plotItem,
|
|
||||||
uid=aid,
|
|
||||||
x=x,
|
|
||||||
y=y,
|
|
||||||
pointing=pointing,
|
|
||||||
color=color,
|
|
||||||
alpha=alpha,
|
|
||||||
headLen=headLen,
|
|
||||||
headWidth=headWidth,
|
|
||||||
tailLen=tailLen,
|
|
||||||
tailWidth=tailWidth,
|
|
||||||
pxMode=pxMode,
|
|
||||||
)
|
|
||||||
# XXX: store absolute coords for repositioning
|
|
||||||
# during viz redraws (eg backfill updates)
|
|
||||||
arrow._abs_x = x
|
|
||||||
arrow._abs_y = y
|
|
||||||
|
|
||||||
annots[aid] = arrow
|
|
||||||
_editors[aid] = arrows
|
|
||||||
aids: set[int] = ctxs[ipc_key][1]
|
|
||||||
aids.add(aid)
|
|
||||||
await annot_req_stream.send(aid)
|
|
||||||
|
|
||||||
case {
|
|
||||||
'cmd': 'TextItem',
|
|
||||||
'fqme': fqme,
|
|
||||||
'timeframe': timeframe,
|
|
||||||
'kwargs': {
|
|
||||||
'text': str(text),
|
|
||||||
'x': int()|float() as x,
|
|
||||||
'y': int()|float() as y,
|
|
||||||
'color': color,
|
|
||||||
'anchor': list(anchor),
|
|
||||||
'font_size': int()|None as font_size,
|
|
||||||
'time': int()|float()|None as timestamp,
|
|
||||||
},
|
|
||||||
}:
|
|
||||||
ds: DisplayState = _dss[fqme]
|
|
||||||
try:
|
|
||||||
chart: ChartPlotWidget = {
|
|
||||||
60: ds.hist_chart,
|
|
||||||
1: ds.chart,
|
|
||||||
}[timeframe]
|
|
||||||
except KeyError:
|
|
||||||
log.warning(
|
|
||||||
f'No chart for timeframe={timeframe}s, '
|
|
||||||
f'skipping text annotation'
|
|
||||||
)
|
|
||||||
await annot_req_stream.send(-1)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# NEW: if timestamp provided, lookup current index
|
|
||||||
# from shm to ensure alignment with current buffer
|
|
||||||
# state
|
|
||||||
if timestamp is not None:
|
|
||||||
viz: Viz = chart.get_viz(fqme)
|
|
||||||
shm = viz.shm
|
|
||||||
arr = shm.array
|
|
||||||
# find index where time matches timestamp
|
|
||||||
matches = arr[arr['time'] == timestamp]
|
|
||||||
if len(matches) == 0:
|
|
||||||
log.error(
|
|
||||||
f'No shm entry for timestamp={timestamp}, '
|
|
||||||
f'skipping text annotation'
|
|
||||||
)
|
|
||||||
await annot_req_stream.send(-1)
|
|
||||||
continue
|
|
||||||
# use the matched row's index as x, +1 for text
|
|
||||||
# offset
|
|
||||||
x = float(matches[0]['index']) + 1
|
|
||||||
|
|
||||||
# convert named color to hex
|
|
||||||
color_hex: str = hcolor(color)
|
|
||||||
|
|
||||||
# create text item
|
|
||||||
text_item: pg.TextItem = pg.TextItem(
|
|
||||||
text=text,
|
|
||||||
color=color_hex,
|
|
||||||
anchor=anchor,
|
|
||||||
|
|
||||||
# ?TODO, pin to github:main for this?
|
|
||||||
# legacy, can have scaling ish?
|
|
||||||
# ensureInBounds=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# apply font size (default to DpiAwareFont if not
|
|
||||||
# provided)
|
|
||||||
if font_size is None:
|
|
||||||
from ._style import get_fonts
|
|
||||||
font, font_small = get_fonts()
|
|
||||||
font_size = font_small.px_size - 1
|
|
||||||
|
|
||||||
qfont: QFont = text_item.textItem.font()
|
|
||||||
qfont.setPixelSize(font_size)
|
|
||||||
text_item.setFont(qfont)
|
|
||||||
|
|
||||||
text_item.setPos(x, y)
|
|
||||||
chart.plotItem.addItem(text_item)
|
|
||||||
|
|
||||||
# XXX: store absolute coords for repositioning
|
|
||||||
# during viz redraws (eg backfill updates)
|
|
||||||
text_item._abs_x = x
|
|
||||||
text_item._abs_y = y
|
|
||||||
|
|
||||||
aid: str = str(uuid4())
|
|
||||||
annots[aid] = text_item
|
|
||||||
aids: set[int] = ctxs[ipc_key][1]
|
|
||||||
aids.add(aid)
|
|
||||||
await annot_req_stream.send(aid)
|
|
||||||
|
|
||||||
case {
|
case {
|
||||||
'cmd': 'remove',
|
'cmd': 'remove',
|
||||||
'aid': int(aid)|str(aid),
|
'aid': int(aid),
|
||||||
}:
|
}:
|
||||||
# NOTE: this is normally entered on
|
# NOTE: this is normally entered on
|
||||||
# a client's annotation de-alloc normally
|
# a client's annotation de-alloc normally
|
||||||
# prior to detach or modify.
|
# prior to detach or modify.
|
||||||
annot: QGraphicsItem = annots[aid]
|
annot: QGraphicsItem = annots[aid]
|
||||||
assert rm_annot(annot)
|
annot.delete()
|
||||||
|
|
||||||
# respond to client indicating annot
|
# respond to client indicating annot
|
||||||
# was indeed deleted.
|
# was indeed deleted.
|
||||||
|
|
@ -468,38 +175,6 @@ async def serve_rc_annots(
|
||||||
)
|
)
|
||||||
viz.reset_graphics()
|
viz.reset_graphics()
|
||||||
|
|
||||||
# XXX: reposition all annotations to ensure they
|
|
||||||
# stay aligned with viz data after reset (eg during
|
|
||||||
# backfill when abs-index range changes)
|
|
||||||
n_repositioned: int = 0
|
|
||||||
for aid, annot in annots.items():
|
|
||||||
# arrows and text items use abs x,y coords
|
|
||||||
if (
|
|
||||||
hasattr(annot, '_abs_x')
|
|
||||||
and
|
|
||||||
hasattr(annot, '_abs_y')
|
|
||||||
):
|
|
||||||
annot.setPos(
|
|
||||||
annot._abs_x,
|
|
||||||
annot._abs_y,
|
|
||||||
)
|
|
||||||
n_repositioned += 1
|
|
||||||
|
|
||||||
# rects use method + kwargs
|
|
||||||
elif (
|
|
||||||
hasattr(annot, '_meth')
|
|
||||||
and
|
|
||||||
hasattr(annot, '_kwargs')
|
|
||||||
):
|
|
||||||
getattr(annot, annot._meth)(**annot._kwargs)
|
|
||||||
n_repositioned += 1
|
|
||||||
|
|
||||||
if n_repositioned:
|
|
||||||
log.info(
|
|
||||||
f'Repositioned {n_repositioned} annotation(s) '
|
|
||||||
f'after viz redraw'
|
|
||||||
)
|
|
||||||
|
|
||||||
case _:
|
case _:
|
||||||
log.error(
|
log.error(
|
||||||
'Unknown remote annotation cmd:\n'
|
'Unknown remote annotation cmd:\n'
|
||||||
|
|
@ -513,12 +188,6 @@ async def remote_annotate(
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
global _dss, _ctxs
|
global _dss, _ctxs
|
||||||
if not _dss:
|
|
||||||
raise RuntimeError(
|
|
||||||
'Race condition on chart-init state ??\n'
|
|
||||||
'Anoter actor is trying to annoate this chart '
|
|
||||||
'before it has fully spawned.\n'
|
|
||||||
)
|
|
||||||
assert _dss
|
assert _dss
|
||||||
|
|
||||||
_ctxs[ctx.cid] = (ctx, set())
|
_ctxs[ctx.cid] = (ctx, set())
|
||||||
|
|
@ -543,7 +212,7 @@ async def remote_annotate(
|
||||||
assert _ctx is ctx
|
assert _ctx is ctx
|
||||||
for aid in aids:
|
for aid in aids:
|
||||||
annot: QGraphicsItem = _annots[aid]
|
annot: QGraphicsItem = _annots[aid]
|
||||||
assert rm_annot(annot)
|
annot.delete()
|
||||||
|
|
||||||
|
|
||||||
class AnnotCtl(Struct):
|
class AnnotCtl(Struct):
|
||||||
|
|
@ -588,18 +257,13 @@ class AnnotCtl(Struct):
|
||||||
|
|
||||||
from_acm: bool = False,
|
from_acm: bool = False,
|
||||||
|
|
||||||
# NEW: optional timestamps for server-side index lookup
|
) -> int:
|
||||||
start_time: float|None = None,
|
|
||||||
end_time: float|None = None,
|
|
||||||
|
|
||||||
) -> int|None:
|
|
||||||
'''
|
'''
|
||||||
Add a `SelectRect` annotation to the target view, return
|
Add a `SelectRect` annotation to the target view, return
|
||||||
the instances `id(obj)` from the remote UI actor.
|
the instances `id(obj)` from the remote UI actor.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
ipc: MsgStream = self._get_ipc(fqme)
|
ipc: MsgStream = self._get_ipc(fqme)
|
||||||
with trio.fail_after(3):
|
|
||||||
await ipc.send({
|
await ipc.send({
|
||||||
'fqme': fqme,
|
'fqme': fqme,
|
||||||
'cmd': 'SelectRect',
|
'cmd': 'SelectRect',
|
||||||
|
|
@ -611,15 +275,9 @@ class AnnotCtl(Struct):
|
||||||
'end_pos': tuple(end_pos),
|
'end_pos': tuple(end_pos),
|
||||||
'color': color,
|
'color': color,
|
||||||
'update_label': False,
|
'update_label': False,
|
||||||
'start_time': start_time,
|
|
||||||
'end_time': end_time,
|
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
aid: int|dict = await ipc.receive()
|
aid: int = await ipc.receive()
|
||||||
match aid:
|
|
||||||
case {'error': str(msg)}:
|
|
||||||
log.error(msg)
|
|
||||||
return None
|
|
||||||
self._ipcs[aid] = ipc
|
self._ipcs[aid] = ipc
|
||||||
if not from_acm:
|
if not from_acm:
|
||||||
self._annot_stack.push_async_callback(
|
self._annot_stack.push_async_callback(
|
||||||
|
|
@ -676,130 +334,20 @@ class AnnotCtl(Struct):
|
||||||
'timeframe': timeframe,
|
'timeframe': timeframe,
|
||||||
})
|
})
|
||||||
|
|
||||||
async def add_arrow(
|
# TODO: do we even need this?
|
||||||
self,
|
# async def modify(
|
||||||
fqme: str,
|
# self,
|
||||||
timeframe: float,
|
# aid: int, # annotation id
|
||||||
x: float,
|
# meth: str, # far end graphics object method to invoke
|
||||||
y: float,
|
# params: dict[str, Any], # far end `meth(**kwargs)`
|
||||||
pointing: Literal[
|
# ) -> bool:
|
||||||
'up',
|
# '''
|
||||||
'down',
|
# Modify an existing (remote) annotation's graphics
|
||||||
],
|
# paramters, thus changing it's appearance / state in real
|
||||||
# TODO: a `Literal['view', 'scene']` for this?
|
# time.
|
||||||
# domain: str = 'view', # or 'scene'
|
|
||||||
color: str = 'dad_blue',
|
|
||||||
alpha: int = 116,
|
|
||||||
headLen: float|None = None,
|
|
||||||
headWidth: float|None = None,
|
|
||||||
tailLen: float|None = None,
|
|
||||||
tailWidth: float|None = None,
|
|
||||||
pxMode: bool = True,
|
|
||||||
|
|
||||||
from_acm: bool = False,
|
# '''
|
||||||
|
# raise NotImplementedError
|
||||||
# NEW: optional timestamp for server-side index lookup
|
|
||||||
time: float|None = None,
|
|
||||||
|
|
||||||
) -> int|None:
|
|
||||||
'''
|
|
||||||
Add a `SelectRect` annotation to the target view, return
|
|
||||||
the instances `id(obj)` from the remote UI actor.
|
|
||||||
|
|
||||||
'''
|
|
||||||
ipc: MsgStream = self._get_ipc(fqme)
|
|
||||||
with trio.fail_after(3):
|
|
||||||
await ipc.send({
|
|
||||||
'fqme': fqme,
|
|
||||||
'cmd': 'ArrowEditor',
|
|
||||||
'timeframe': timeframe,
|
|
||||||
# 'meth': str(meth),
|
|
||||||
'meth': 'add',
|
|
||||||
'kwargs': {
|
|
||||||
'x': float(x),
|
|
||||||
'y': float(y),
|
|
||||||
'color': color,
|
|
||||||
'pointing': pointing, # up|down
|
|
||||||
'alpha': alpha,
|
|
||||||
'aid': None,
|
|
||||||
'headLen': headLen,
|
|
||||||
'headWidth': headWidth,
|
|
||||||
'tailLen': tailLen,
|
|
||||||
'tailWidth': tailWidth,
|
|
||||||
'pxMode': pxMode,
|
|
||||||
'time': time, # for server-side index lookup
|
|
||||||
},
|
|
||||||
})
|
|
||||||
aid: int|dict = await ipc.receive()
|
|
||||||
match aid:
|
|
||||||
case {'error': str(msg)}:
|
|
||||||
log.error(msg)
|
|
||||||
return None
|
|
||||||
|
|
||||||
self._ipcs[aid] = ipc
|
|
||||||
if not from_acm:
|
|
||||||
self._annot_stack.push_async_callback(
|
|
||||||
partial(
|
|
||||||
self.remove,
|
|
||||||
aid,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return aid
|
|
||||||
|
|
||||||
async def add_text(
|
|
||||||
self,
|
|
||||||
fqme: str,
|
|
||||||
timeframe: float,
|
|
||||||
text: str,
|
|
||||||
x: float,
|
|
||||||
y: float,
|
|
||||||
color: str|tuple = 'dad_blue',
|
|
||||||
anchor: tuple[float, float] = (0, 1),
|
|
||||||
font_size: int|None = None,
|
|
||||||
|
|
||||||
from_acm: bool = False,
|
|
||||||
|
|
||||||
# NEW: optional timestamp for server-side index lookup
|
|
||||||
time: float|None = None,
|
|
||||||
|
|
||||||
) -> int|None:
|
|
||||||
'''
|
|
||||||
Add a `pg.TextItem` annotation to the target view.
|
|
||||||
|
|
||||||
anchor: (x, y) where (0,0) is upper-left, (1,1) is lower-right
|
|
||||||
font_size: pixel size for font, defaults to `_font.font.pixelSize()`
|
|
||||||
|
|
||||||
'''
|
|
||||||
ipc: MsgStream = self._get_ipc(fqme)
|
|
||||||
with trio.fail_after(3):
|
|
||||||
await ipc.send({
|
|
||||||
'fqme': fqme,
|
|
||||||
'cmd': 'TextItem',
|
|
||||||
'timeframe': timeframe,
|
|
||||||
'kwargs': {
|
|
||||||
'text': text,
|
|
||||||
'x': float(x),
|
|
||||||
'y': float(y),
|
|
||||||
'color': color,
|
|
||||||
'anchor': tuple(anchor),
|
|
||||||
'font_size': font_size,
|
|
||||||
'time': time, # for server-side index lookup
|
|
||||||
},
|
|
||||||
})
|
|
||||||
aid: int|dict = await ipc.receive()
|
|
||||||
match aid:
|
|
||||||
case {'error': str(msg)}:
|
|
||||||
log.error(msg)
|
|
||||||
return None
|
|
||||||
self._ipcs[aid] = ipc
|
|
||||||
if not from_acm:
|
|
||||||
self._annot_stack.push_async_callback(
|
|
||||||
partial(
|
|
||||||
self.remove,
|
|
||||||
aid,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return aid
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
|
@ -826,9 +374,7 @@ async def open_annot_ctl(
|
||||||
# TODO: print the current discoverable actor UID set
|
# TODO: print the current discoverable actor UID set
|
||||||
# here as well?
|
# here as well?
|
||||||
if not maybe_portals:
|
if not maybe_portals:
|
||||||
raise RuntimeError(
|
raise RuntimeError('No chart UI actors found in service domain?')
|
||||||
'No chart actors found in service domain?'
|
|
||||||
)
|
|
||||||
|
|
||||||
for portal in maybe_portals:
|
for portal in maybe_portals:
|
||||||
ctx_mngrs.append(
|
ctx_mngrs.append(
|
||||||
|
|
|
||||||
|
|
@ -61,7 +61,7 @@ class DpiAwareFont:
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
self._font_size_calc_key: str = _font_size_key
|
self._font_size_calc_key: str = _font_size_key
|
||||||
self._font_size: int|None = None
|
self._font_size: int | None = None
|
||||||
|
|
||||||
# Read preferred font size from main config file if it exists
|
# Read preferred font size from main config file if it exists
|
||||||
conf, path = config.load('conf', touch_if_dne=True)
|
conf, path = config.load('conf', touch_if_dne=True)
|
||||||
|
|
@ -107,22 +107,7 @@ class DpiAwareFont:
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def px_size(self) -> int:
|
def px_size(self) -> int:
|
||||||
size: int = self._qfont.pixelSize()
|
return self._qfont.pixelSize()
|
||||||
|
|
||||||
# XXX, when no Qt app has been spawned this will always be
|
|
||||||
# invalid..
|
|
||||||
# SO, just return any conf.toml value.
|
|
||||||
if size == -1:
|
|
||||||
if (conf_size := self._font_size) is None:
|
|
||||||
raise ValueError(
|
|
||||||
f'No valid `{type(_font).__name__}.px_size` set?\n'
|
|
||||||
f'\n'
|
|
||||||
f'-> `ui.font_size` is NOT set in `conf.toml`\n'
|
|
||||||
f'-> no Qt app is active ??\n'
|
|
||||||
)
|
|
||||||
return conf_size
|
|
||||||
|
|
||||||
return size
|
|
||||||
|
|
||||||
def configure_to_dpi(self, screen: QtGui.QScreen | None = None):
|
def configure_to_dpi(self, screen: QtGui.QScreen | None = None):
|
||||||
'''
|
'''
|
||||||
|
|
@ -236,20 +221,6 @@ def _config_fonts_to_screen() -> None:
|
||||||
_font_small.configure_to_dpi()
|
_font_small.configure_to_dpi()
|
||||||
|
|
||||||
|
|
||||||
def get_fonts() -> tuple[
|
|
||||||
DpiAwareFont,
|
|
||||||
DpiAwareFont,
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Get the singleton font pair (of instances) from which all other
|
|
||||||
UI/UX should be "scaled around".
|
|
||||||
|
|
||||||
See `DpiAwareFont` for (internal) deats.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return _font, _font_small
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: re-compute font size when main widget switches screens?
|
# TODO: re-compute font size when main widget switches screens?
|
||||||
# https://forum.qt.io/topic/54136/how-do-i-get-the-qscreen-my-widget-is-on-qapplication-desktop-screen-returns-a-qwidget-and-qobject_cast-qscreen-returns-null/3
|
# https://forum.qt.io/topic/54136/how-do-i-get-the-qscreen-my-widget-is-on-qapplication-desktop-screen-returns-a-qwidget-and-qobject_cast-qscreen-returns-null/3
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -300,10 +300,7 @@ class GodWidget(QWidget):
|
||||||
getattr(widget, 'on_resize')
|
getattr(widget, 'on_resize')
|
||||||
self._widgets[widget.mode_name] = widget
|
self._widgets[widget.mode_name] = widget
|
||||||
|
|
||||||
def on_win_resize(
|
def on_win_resize(self, event: QtCore.QEvent) -> None:
|
||||||
self,
|
|
||||||
event: QtCore.QEvent,
|
|
||||||
) -> None:
|
|
||||||
'''
|
'''
|
||||||
Top level god widget handler from window (the real yaweh) resize
|
Top level god widget handler from window (the real yaweh) resize
|
||||||
events such that any registered widgets which wish to be
|
events such that any registered widgets which wish to be
|
||||||
|
|
@ -318,10 +315,7 @@ class GodWidget(QWidget):
|
||||||
|
|
||||||
self._resizing = True
|
self._resizing = True
|
||||||
|
|
||||||
log.debug(
|
log.info('God widget resize')
|
||||||
f'God widget resize\n'
|
|
||||||
f'{event}\n'
|
|
||||||
)
|
|
||||||
for name, widget in self._widgets.items():
|
for name, widget in self._widgets.items():
|
||||||
widget.on_resize()
|
widget.on_resize()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -255,16 +255,8 @@ class MainWindow(QMainWindow):
|
||||||
current: QWidget,
|
current: QWidget,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
|
||||||
Focus handler.
|
|
||||||
|
|
||||||
For now updates the "current mode" name.
|
log.info(f'widget focus changed from {last} -> {current}')
|
||||||
|
|
||||||
'''
|
|
||||||
log.debug(
|
|
||||||
f'widget focus changed from,\n'
|
|
||||||
f'{last} -> {current}'
|
|
||||||
)
|
|
||||||
|
|
||||||
if current is not None:
|
if current is not None:
|
||||||
# cursor left window?
|
# cursor left window?
|
||||||
|
|
|
||||||
|
|
@ -177,7 +177,7 @@ def chart(
|
||||||
return
|
return
|
||||||
|
|
||||||
# global opts
|
# global opts
|
||||||
# brokernames: list[str] = config['brokers']
|
brokernames = config['brokers']
|
||||||
brokermods = config['brokermods']
|
brokermods = config['brokermods']
|
||||||
assert brokermods
|
assert brokermods
|
||||||
tractorloglevel = config['tractorloglevel']
|
tractorloglevel = config['tractorloglevel']
|
||||||
|
|
@ -216,7 +216,6 @@ def chart(
|
||||||
layers['tcp']['port'],
|
layers['tcp']['port'],
|
||||||
))
|
))
|
||||||
|
|
||||||
# breakpoint()
|
|
||||||
from tractor.devx import maybe_open_crash_handler
|
from tractor.devx import maybe_open_crash_handler
|
||||||
pdb: bool = config['pdb']
|
pdb: bool = config['pdb']
|
||||||
with maybe_open_crash_handler(pdb=pdb):
|
with maybe_open_crash_handler(pdb=pdb):
|
||||||
|
|
|
||||||
|
|
@ -513,14 +513,13 @@ class OrderMode:
|
||||||
def on_submit(
|
def on_submit(
|
||||||
self,
|
self,
|
||||||
uuid: str,
|
uuid: str,
|
||||||
order: Order|None = None,
|
order: Order | None = None,
|
||||||
|
|
||||||
) -> Dialog|None:
|
) -> Dialog | None:
|
||||||
'''
|
'''
|
||||||
Order submitted status event handler.
|
Order submitted status event handler.
|
||||||
|
|
||||||
Commit the order line and registered order uuid, store ack
|
Commit the order line and registered order uuid, store ack time stamp.
|
||||||
time stamp.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
lines = self.lines.commit_line(uuid)
|
lines = self.lines.commit_line(uuid)
|
||||||
|
|
|
||||||
|
|
@ -116,6 +116,7 @@ uis = [
|
||||||
dev = [
|
dev = [
|
||||||
# https://docs.astral.sh/uv/concepts/projects/dependencies/#development-dependencies
|
# https://docs.astral.sh/uv/concepts/projects/dependencies/#development-dependencies
|
||||||
"cython >=3.0.0, <4.0.0",
|
"cython >=3.0.0, <4.0.0",
|
||||||
|
|
||||||
# nested deps-groups
|
# nested deps-groups
|
||||||
# https://docs.astral.sh/uv/concepts/projects/dependencies/#nesting-groups
|
# https://docs.astral.sh/uv/concepts/projects/dependencies/#nesting-groups
|
||||||
{include-group = 'uis'},
|
{include-group = 'uis'},
|
||||||
|
|
@ -133,10 +134,6 @@ repl = [
|
||||||
"prompt-toolkit ==3.0.40",
|
"prompt-toolkit ==3.0.40",
|
||||||
"pyperclip>=1.9.0",
|
"pyperclip>=1.9.0",
|
||||||
|
|
||||||
# for @claude's `snippets/claude_debug_helper.py` it uses to do
|
|
||||||
# "offline" debug/crash REPL-in alongside a dev.
|
|
||||||
"pexpect>=4.9.0",
|
|
||||||
|
|
||||||
# ?TODO, new stuff to consider..
|
# ?TODO, new stuff to consider..
|
||||||
# "visidata" # console numerics
|
# "visidata" # console numerics
|
||||||
# "xxh" # for remote `xonsh`-ing
|
# "xxh" # for remote `xonsh`-ing
|
||||||
|
|
@ -194,15 +191,10 @@ pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" }
|
||||||
tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" }
|
tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" }
|
||||||
pyvnc = { git = "https://github.com/regulad/pyvnc.git" }
|
pyvnc = { git = "https://github.com/regulad/pyvnc.git" }
|
||||||
|
|
||||||
# to get fancy next-cmd/suggestion feats prior to 0.22.2 B)
|
|
||||||
# https://github.com/xonsh/xonsh/pull/6037
|
|
||||||
# https://github.com/xonsh/xonsh/pull/6048
|
|
||||||
xonsh = { git = 'https://github.com/xonsh/xonsh.git', branch = 'main' }
|
|
||||||
|
|
||||||
# XXX since, we're like, always hacking new shite all-the-time. Bp
|
# XXX since, we're like, always hacking new shite all-the-time. Bp
|
||||||
# tractor = { git = "https://github.com/goodboy/tractor.git", branch ="piker_pin" }
|
tractor = { git = "https://github.com/goodboy/tractor.git", branch ="piker_pin" }
|
||||||
# tractor = { git = "https://pikers.dev/goodboy/tractor", branch = "piker_pin" }
|
# tractor = { git = "https://pikers.dev/goodboy/tractor", branch = "piker_pin" }
|
||||||
# tractor = { git = "https://pikers.dev/goodboy/tractor", branch = "main" }
|
# tractor = { git = "https://pikers.dev/goodboy/tractor", branch = "main" }
|
||||||
# ------ goodboy ------
|
# ------ goodboy ------
|
||||||
# hackin dev-envs, usually there's something new he's hackin in..
|
# hackin dev-envs, usually there's something new he's hackin in..
|
||||||
tractor = { path = "../tractor", editable = true }
|
# tractor = { path = "../tractor", editable = true }
|
||||||
|
|
|
||||||
|
|
@ -1,256 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
'''
|
|
||||||
Programmatic debugging helper for `pdbp` REPL human-like
|
|
||||||
interaction but built to allow `claude` to interact with
|
|
||||||
crashes and `tractor.pause()` breakpoints along side a human dev.
|
|
||||||
|
|
||||||
Originally written by `clauded` during a backfiller inspection
|
|
||||||
session with @goodboy trying to resolve duplicate/gappy ohlcv ts
|
|
||||||
issues discovered while testing the new `nativedb` tsdb.
|
|
||||||
|
|
||||||
Allows `claude` to run `pdb` commands and capture output in an "offline"
|
|
||||||
manner but generating similar output as if it was iteracting with
|
|
||||||
the debug REPL.
|
|
||||||
|
|
||||||
The use of `pexpect` is heavily based on tractor's REPL UX test
|
|
||||||
suite(s), namely various `tests/devx/test_debugger.py` patterns.
|
|
||||||
|
|
||||||
'''
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
|
|
||||||
import pexpect
|
|
||||||
from pexpect.exceptions import (
|
|
||||||
TIMEOUT,
|
|
||||||
EOF,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
PROMPT: str = r'\(Pdb\+\)'
|
|
||||||
|
|
||||||
|
|
||||||
def expect(
|
|
||||||
child: pexpect.spawn,
|
|
||||||
patt: str,
|
|
||||||
**kwargs,
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Expect wrapper that prints last console data before failing.
|
|
||||||
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
child.expect(
|
|
||||||
patt,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
except TIMEOUT:
|
|
||||||
before: str = (
|
|
||||||
str(child.before.decode())
|
|
||||||
if isinstance(child.before, bytes)
|
|
||||||
else str(child.before)
|
|
||||||
)
|
|
||||||
print(
|
|
||||||
f'TIMEOUT waiting for pattern: {patt}\n'
|
|
||||||
f'Last seen output:\n{before}'
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def run_pdb_commands(
|
|
||||||
commands: list[str],
|
|
||||||
initial_cmd: str = 'piker store ldshm xmrusdt.usdtm.perp.binance',
|
|
||||||
timeout: int = 30,
|
|
||||||
print_output: bool = True,
|
|
||||||
) -> dict[str, str]:
|
|
||||||
'''
|
|
||||||
Spawn piker process, wait for pdb prompt, execute commands.
|
|
||||||
|
|
||||||
Returns dict mapping command -> output.
|
|
||||||
|
|
||||||
'''
|
|
||||||
results: dict[str, str] = {}
|
|
||||||
|
|
||||||
# Disable colored output for easier parsing
|
|
||||||
os.environ['PYTHON_COLORS'] = '0'
|
|
||||||
|
|
||||||
# Spawn the process
|
|
||||||
if print_output:
|
|
||||||
print(f'Spawning: {initial_cmd}')
|
|
||||||
|
|
||||||
child: pexpect.spawn = pexpect.spawn(
|
|
||||||
initial_cmd,
|
|
||||||
timeout=timeout,
|
|
||||||
encoding='utf-8',
|
|
||||||
echo=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Wait for pdb prompt
|
|
||||||
try:
|
|
||||||
expect(child, PROMPT, timeout=timeout)
|
|
||||||
if print_output:
|
|
||||||
print('Reached pdb prompt!')
|
|
||||||
|
|
||||||
# Execute each command
|
|
||||||
for cmd in commands:
|
|
||||||
if print_output:
|
|
||||||
print(f'\n>>> {cmd}')
|
|
||||||
|
|
||||||
child.sendline(cmd)
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
# Wait for next prompt
|
|
||||||
expect(child, PROMPT, timeout=timeout)
|
|
||||||
|
|
||||||
# Capture output (everything before the prompt)
|
|
||||||
output: str = (
|
|
||||||
str(child.before.decode())
|
|
||||||
if isinstance(child.before, bytes)
|
|
||||||
else str(child.before)
|
|
||||||
)
|
|
||||||
results[cmd] = output
|
|
||||||
|
|
||||||
if print_output:
|
|
||||||
print(output)
|
|
||||||
|
|
||||||
# Quit debugger gracefully
|
|
||||||
child.sendline('quit')
|
|
||||||
try:
|
|
||||||
child.expect(EOF, timeout=5)
|
|
||||||
except (TIMEOUT, EOF):
|
|
||||||
pass
|
|
||||||
|
|
||||||
except TIMEOUT as e:
|
|
||||||
print(f'Timeout: {e}')
|
|
||||||
if child.before:
|
|
||||||
before: str = (
|
|
||||||
str(child.before.decode())
|
|
||||||
if isinstance(child.before, bytes)
|
|
||||||
else str(child.before)
|
|
||||||
)
|
|
||||||
print(f'Buffer:\n{before}')
|
|
||||||
results['_error'] = str(e)
|
|
||||||
|
|
||||||
finally:
|
|
||||||
if child.isalive():
|
|
||||||
child.close(force=True)
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
class InteractivePdbSession:
|
|
||||||
'''
|
|
||||||
Interactive pdb session manager for incremental debugging.
|
|
||||||
|
|
||||||
'''
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
cmd: str = 'piker store ldshm xmrusdt.usdtm.perp.binance',
|
|
||||||
timeout: int = 30,
|
|
||||||
):
|
|
||||||
self.cmd: str = cmd
|
|
||||||
self.timeout: int = timeout
|
|
||||||
self.child: pexpect.spawn|None = None
|
|
||||||
self.history: list[tuple[str, str]] = []
|
|
||||||
|
|
||||||
def start(self) -> None:
|
|
||||||
'''
|
|
||||||
Start the piker process and wait for first prompt.
|
|
||||||
|
|
||||||
'''
|
|
||||||
os.environ['PYTHON_COLORS'] = '0'
|
|
||||||
|
|
||||||
print(f'Starting: {self.cmd}')
|
|
||||||
self.child = pexpect.spawn(
|
|
||||||
self.cmd,
|
|
||||||
timeout=self.timeout,
|
|
||||||
encoding='utf-8',
|
|
||||||
echo=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Wait for initial prompt
|
|
||||||
expect(self.child, PROMPT, timeout=self.timeout)
|
|
||||||
print('Ready at pdb prompt!')
|
|
||||||
|
|
||||||
def run(
|
|
||||||
self,
|
|
||||||
cmd: str,
|
|
||||||
print_output: bool = True,
|
|
||||||
) -> str:
|
|
||||||
'''
|
|
||||||
Execute a single pdb command and return output.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if not self.child or not self.child.isalive():
|
|
||||||
raise RuntimeError('Session not started or dead')
|
|
||||||
|
|
||||||
if print_output:
|
|
||||||
print(f'\n>>> {cmd}')
|
|
||||||
|
|
||||||
self.child.sendline(cmd)
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
# Wait for next prompt
|
|
||||||
expect(self.child, PROMPT, timeout=self.timeout)
|
|
||||||
|
|
||||||
output: str = (
|
|
||||||
str(self.child.before.decode())
|
|
||||||
if isinstance(self.child.before, bytes)
|
|
||||||
else str(self.child.before)
|
|
||||||
)
|
|
||||||
self.history.append((cmd, output))
|
|
||||||
|
|
||||||
if print_output:
|
|
||||||
print(output)
|
|
||||||
|
|
||||||
return output
|
|
||||||
|
|
||||||
def quit(self) -> None:
|
|
||||||
'''
|
|
||||||
Exit the debugger and cleanup.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if self.child and self.child.isalive():
|
|
||||||
self.child.sendline('quit')
|
|
||||||
try:
|
|
||||||
self.child.expect(EOF, timeout=5)
|
|
||||||
except (TIMEOUT, EOF):
|
|
||||||
pass
|
|
||||||
self.child.close(force=True)
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.start()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, *args):
|
|
||||||
self.quit()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
# Example inspection commands
|
|
||||||
inspect_cmds: list[str] = [
|
|
||||||
'locals().keys()',
|
|
||||||
'type(deduped)',
|
|
||||||
'deduped.shape',
|
|
||||||
(
|
|
||||||
'step_gaps.shape '
|
|
||||||
'if "step_gaps" in locals() '
|
|
||||||
'else "N/A"'
|
|
||||||
),
|
|
||||||
(
|
|
||||||
'venue_gaps.shape '
|
|
||||||
'if "venue_gaps" in locals() '
|
|
||||||
'else "N/A"'
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
# Allow commands from CLI args
|
|
||||||
if len(sys.argv) > 1:
|
|
||||||
inspect_cmds = sys.argv[1:]
|
|
||||||
|
|
||||||
# Interactive session example
|
|
||||||
with InteractivePdbSession() as session:
|
|
||||||
for cmd in inspect_cmds:
|
|
||||||
session.run(cmd)
|
|
||||||
|
|
||||||
print('\n=== Session Complete ===')
|
|
||||||
91
uv.lock
91
uv.lock
|
|
@ -1000,18 +1000,6 @@ wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/6e/23/e98758924d1b3aac11a626268eabf7f3cf177e7837c28d47bf84c64532d0/pendulum-3.1.0-py3-none-any.whl", hash = "sha256:f9178c2a8e291758ade1e8dd6371b1d26d08371b4c7730a6e9a3ef8b16ebae0f", size = 111799, upload-time = "2025-04-19T14:02:34.739Z" },
|
{ url = "https://files.pythonhosted.org/packages/6e/23/e98758924d1b3aac11a626268eabf7f3cf177e7837c28d47bf84c64532d0/pendulum-3.1.0-py3-none-any.whl", hash = "sha256:f9178c2a8e291758ade1e8dd6371b1d26d08371b4c7730a6e9a3ef8b16ebae0f", size = 111799, upload-time = "2025-04-19T14:02:34.739Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pexpect"
|
|
||||||
version = "4.9.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "ptyprocess" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "piker"
|
name = "piker"
|
||||||
version = "0.1.0a0.dev0"
|
version = "0.1.0a0.dev0"
|
||||||
|
|
@ -1059,7 +1047,6 @@ dev = [
|
||||||
{ name = "greenback" },
|
{ name = "greenback" },
|
||||||
{ name = "i3ipc" },
|
{ name = "i3ipc" },
|
||||||
{ name = "pdbp" },
|
{ name = "pdbp" },
|
||||||
{ name = "pexpect" },
|
|
||||||
{ name = "prompt-toolkit" },
|
{ name = "prompt-toolkit" },
|
||||||
{ name = "pyperclip" },
|
{ name = "pyperclip" },
|
||||||
{ name = "pyqt6" },
|
{ name = "pyqt6" },
|
||||||
|
|
@ -1075,7 +1062,6 @@ lint = [
|
||||||
repl = [
|
repl = [
|
||||||
{ name = "greenback" },
|
{ name = "greenback" },
|
||||||
{ name = "pdbp" },
|
{ name = "pdbp" },
|
||||||
{ name = "pexpect" },
|
|
||||||
{ name = "prompt-toolkit" },
|
{ name = "prompt-toolkit" },
|
||||||
{ name = "pyperclip" },
|
{ name = "pyperclip" },
|
||||||
{ name = "xonsh" },
|
{ name = "xonsh" },
|
||||||
|
|
@ -1113,7 +1099,7 @@ requires-dist = [
|
||||||
{ name = "tomli", specifier = ">=2.0.1,<3.0.0" },
|
{ name = "tomli", specifier = ">=2.0.1,<3.0.0" },
|
||||||
{ name = "tomli-w", specifier = ">=1.0.0,<2.0.0" },
|
{ name = "tomli-w", specifier = ">=1.0.0,<2.0.0" },
|
||||||
{ name = "tomlkit", git = "https://github.com/pikers/tomlkit.git?branch=piker_pin" },
|
{ name = "tomlkit", git = "https://github.com/pikers/tomlkit.git?branch=piker_pin" },
|
||||||
{ name = "tractor", editable = "../tractor" },
|
{ name = "tractor", git = "https://github.com/goodboy/tractor.git?branch=piker_pin" },
|
||||||
{ name = "trio", specifier = ">=0.27" },
|
{ name = "trio", specifier = ">=0.27" },
|
||||||
{ name = "trio-typing", specifier = ">=0.10.0" },
|
{ name = "trio-typing", specifier = ">=0.10.0" },
|
||||||
{ name = "trio-util", specifier = ">=0.7.0,<0.8.0" },
|
{ name = "trio-util", specifier = ">=0.7.0,<0.8.0" },
|
||||||
|
|
@ -1130,7 +1116,6 @@ dev = [
|
||||||
{ name = "greenback", specifier = ">=1.1.1,<2.0.0" },
|
{ name = "greenback", specifier = ">=1.1.1,<2.0.0" },
|
||||||
{ name = "i3ipc", specifier = ">=2.2.1" },
|
{ name = "i3ipc", specifier = ">=2.2.1" },
|
||||||
{ name = "pdbp", specifier = ">=1.8.2,<2.0.0" },
|
{ name = "pdbp", specifier = ">=1.8.2,<2.0.0" },
|
||||||
{ name = "pexpect", specifier = ">=4.9.0" },
|
|
||||||
{ name = "prompt-toolkit", specifier = "==3.0.40" },
|
{ name = "prompt-toolkit", specifier = "==3.0.40" },
|
||||||
{ name = "pyperclip", specifier = ">=1.9.0" },
|
{ name = "pyperclip", specifier = ">=1.9.0" },
|
||||||
{ name = "pyqt6", specifier = ">=6.7.0,<7.0.0" },
|
{ name = "pyqt6", specifier = ">=6.7.0,<7.0.0" },
|
||||||
|
|
@ -1138,16 +1123,15 @@ dev = [
|
||||||
{ name = "pytest" },
|
{ name = "pytest" },
|
||||||
{ name = "qdarkstyle", specifier = ">=3.0.2,<4.0.0" },
|
{ name = "qdarkstyle", specifier = ">=3.0.2,<4.0.0" },
|
||||||
{ name = "rapidfuzz", specifier = ">=3.2.0,<4.0.0" },
|
{ name = "rapidfuzz", specifier = ">=3.2.0,<4.0.0" },
|
||||||
{ name = "xonsh", git = "https://github.com/xonsh/xonsh.git?branch=main" },
|
{ name = "xonsh" },
|
||||||
]
|
]
|
||||||
lint = [{ name = "ruff", specifier = ">=0.9.6" }]
|
lint = [{ name = "ruff", specifier = ">=0.9.6" }]
|
||||||
repl = [
|
repl = [
|
||||||
{ name = "greenback", specifier = ">=1.1.1,<2.0.0" },
|
{ name = "greenback", specifier = ">=1.1.1,<2.0.0" },
|
||||||
{ name = "pdbp", specifier = ">=1.8.2,<2.0.0" },
|
{ name = "pdbp", specifier = ">=1.8.2,<2.0.0" },
|
||||||
{ name = "pexpect", specifier = ">=4.9.0" },
|
|
||||||
{ name = "prompt-toolkit", specifier = "==3.0.40" },
|
{ name = "prompt-toolkit", specifier = "==3.0.40" },
|
||||||
{ name = "pyperclip", specifier = ">=1.9.0" },
|
{ name = "pyperclip", specifier = ">=1.9.0" },
|
||||||
{ name = "xonsh", git = "https://github.com/xonsh/xonsh.git?branch=main" },
|
{ name = "xonsh" },
|
||||||
]
|
]
|
||||||
testing = [{ name = "pytest" }]
|
testing = [{ name = "pytest" }]
|
||||||
uis = [
|
uis = [
|
||||||
|
|
@ -1159,11 +1143,11 @@ uis = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "platformdirs"
|
name = "platformdirs"
|
||||||
version = "4.6.0"
|
version = "4.5.1"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/20/e5/474d0a8508029286b905622e6929470fb84337cfa08f9d09fbb624515249/platformdirs-4.6.0.tar.gz", hash = "sha256:4a13c2db1071e5846c3b3e04e5b095c0de36b2a24be9a3bc0145ca66fce4e328", size = 23433, upload-time = "2026-02-12T14:36:21.288Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/da/10/1b0dcf51427326f70e50d98df21b18c228117a743a1fc515a42f8dc7d342/platformdirs-4.6.0-py3-none-any.whl", hash = "sha256:dd7f808d828e1764a22ebff09e60f175ee3c41876606a6132a688d809c7c9c73", size = 19549, upload-time = "2026-02-12T14:36:19.743Z" },
|
{ url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1313,15 +1297,6 @@ wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" },
|
{ url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ptyprocess"
|
|
||||||
version = "0.7.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyarrow"
|
name = "pyarrow"
|
||||||
version = "22.0.0"
|
version = "22.0.0"
|
||||||
|
|
@ -1868,7 +1843,7 @@ source = { git = "https://github.com/pikers/tomlkit.git?branch=piker_pin#8e0239a
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tractor"
|
name = "tractor"
|
||||||
version = "0.1.0a6.dev0"
|
version = "0.1.0a6.dev0"
|
||||||
source = { editable = "../tractor" }
|
source = { git = "https://github.com/goodboy/tractor.git?branch=piker_pin#e232d9dd06f41b8dca997f0647f2083d27cc34f2" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "bidict" },
|
{ name = "bidict" },
|
||||||
{ name = "cffi" },
|
{ name = "cffi" },
|
||||||
|
|
@ -1881,48 +1856,6 @@ dependencies = [
|
||||||
{ name = "wrapt" },
|
{ name = "wrapt" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.metadata]
|
|
||||||
requires-dist = [
|
|
||||||
{ name = "bidict", specifier = ">=0.23.1" },
|
|
||||||
{ name = "cffi", specifier = ">=1.17.1" },
|
|
||||||
{ name = "colorlog", specifier = ">=6.8.2,<7" },
|
|
||||||
{ name = "msgspec", specifier = ">=0.19.0" },
|
|
||||||
{ name = "pdbp", specifier = ">=1.8.2,<2" },
|
|
||||||
{ name = "platformdirs", specifier = ">=4.4.0" },
|
|
||||||
{ name = "tricycle", specifier = ">=0.4.1,<0.5" },
|
|
||||||
{ name = "trio", specifier = ">0.27" },
|
|
||||||
{ name = "wrapt", specifier = ">=1.16.0,<2" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.metadata.requires-dev]
|
|
||||||
dev = [
|
|
||||||
{ name = "greenback", specifier = ">=1.2.1,<2" },
|
|
||||||
{ name = "pexpect", specifier = ">=4.9.0,<5" },
|
|
||||||
{ name = "prompt-toolkit", specifier = ">=3.0.50" },
|
|
||||||
{ name = "psutil", specifier = ">=7.0.0" },
|
|
||||||
{ name = "pyperclip", specifier = ">=1.9.0" },
|
|
||||||
{ name = "pytest", specifier = ">=8.3.5" },
|
|
||||||
{ name = "stackscope", specifier = ">=0.2.2,<0.3" },
|
|
||||||
{ name = "typing-extensions", specifier = ">=4.14.1" },
|
|
||||||
{ name = "xonsh", specifier = ">=0.22.2" },
|
|
||||||
]
|
|
||||||
devx = [
|
|
||||||
{ name = "greenback", specifier = ">=1.2.1,<2" },
|
|
||||||
{ name = "stackscope", specifier = ">=0.2.2,<0.3" },
|
|
||||||
{ name = "typing-extensions", specifier = ">=4.14.1" },
|
|
||||||
]
|
|
||||||
lint = [{ name = "ruff", specifier = ">=0.9.6" }]
|
|
||||||
repl = [
|
|
||||||
{ name = "prompt-toolkit", specifier = ">=3.0.50" },
|
|
||||||
{ name = "psutil", specifier = ">=7.0.0" },
|
|
||||||
{ name = "pyperclip", specifier = ">=1.9.0" },
|
|
||||||
{ name = "xonsh", specifier = ">=0.22.2" },
|
|
||||||
]
|
|
||||||
testing = [
|
|
||||||
{ name = "pexpect", specifier = ">=4.9.0,<5" },
|
|
||||||
{ name = "pytest", specifier = ">=8.3.5" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tricycle"
|
name = "tricycle"
|
||||||
version = "0.4.1"
|
version = "0.4.1"
|
||||||
|
|
@ -2162,8 +2095,14 @@ wheels = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "xonsh"
|
name = "xonsh"
|
||||||
version = "0.22.3"
|
version = "0.20.0"
|
||||||
source = { git = "https://github.com/xonsh/xonsh.git?branch=main#b446946fd94c3913e002318db1d1b41ee4fa1f9a" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/56/af/7e2ba3885da44cbe03c7ff46f90ea917ba10d91dc74d68604001ea28055f/xonsh-0.20.0.tar.gz", hash = "sha256:d44a50ee9f288ff96bd0456f0a38988ef6d4985637140ea793beeef5ec5d2d38", size = 811907, upload-time = "2025-11-24T07:50:50.847Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e8/db/1c5c057c0b2a89b8919477726558685720ae0849ea1a98a3803e93550824/xonsh-0.20.0-py311-none-any.whl", hash = "sha256:65d27ba31d558f79010d6c652751449fd3ed4df1f1eda78040a6427fa0a0f03e", size = 646312, upload-time = "2025-11-24T07:50:49.488Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d2/a2/d6f7534f31489a4b8b54bd2a2496248f86f7c21a6a6ce9bfdcdd389fe4e7/xonsh-0.20.0-py312-none-any.whl", hash = "sha256:3148900e67b9c2796bef6f2eda003b0a64d4c6f50a0db23324f786d9e1af9353", size = 646323, upload-time = "2025-11-24T07:50:43.028Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bd/48/bcb1e4d329c3d522bc29b066b0b6ee86938ec392376a29c36fac0ad1c586/xonsh-0.20.0-py313-none-any.whl", hash = "sha256:c83daaf6eb2960180fc5a507459dbdf6c0d6d63e1733c43f4e43db77255c7278", size = 646830, upload-time = "2025-11-24T07:50:45.078Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "yapic-json"
|
name = "yapic-json"
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue