Compare commits

..

No commits in common. "ib_async_CONT" and "main" have entirely different histories.

26 changed files with 1526 additions and 1454 deletions

View File

@ -203,13 +203,9 @@ async def stream_messages(
yield 'trade', piker_quote
def make_sub(
pairs: list[str],
sub_name: str,
uid: int,
) -> dict[str, str]:
def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]:
'''
Create a request subscription packet `dict`.
Create a request subscription packet dict.
- spot:
https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams
@ -336,8 +332,7 @@ async def get_mkt_info(
# TODO: handle coinm futes which have a margin asset that
# is some crypto token!
# https://binance-docs.github.io/apidocs/delivery/en/#exchange-information
or
'btc' in venue_lower
or 'btc' in venue_lower
):
return None
@ -348,14 +343,12 @@ async def get_mkt_info(
if (
venue
and
'spot' not in venue_lower
and 'spot' not in venue_lower
# XXX: catch all in case user doesn't know which
# venue they want (usdtm vs. coinm) and we can choose
# a default (via config?) once we support coin-m APIs.
or
'perp' in venue_lower
or 'perp' in venue_lower
):
if not mkt_mode:
mkt_mode: str = f'{venue_lower}_futes'

View File

@ -2,7 +2,7 @@
--------------
more or less the "everything broker" for traditional and international
markets. they are the "go to" provider for automatic retail trading
and we interface to their APIs using the `ib_async` project.
and we interface to their APIs using the `ib_insync` project.
status
******

View File

@ -22,7 +22,7 @@ Sub-modules within break into the core functionalities:
- ``broker.py`` part for orders / trading endpoints
- ``feed.py`` for real-time data feed endpoints
- ``api.py`` for the core API machinery which is ``trio``-ized
wrapping around `ib_async`.
wrapping around ``ib_insync``.
"""
from .api import (

View File

@ -111,7 +111,7 @@ def load_flex_trades(
) -> dict[str, Any]:
from ib_async import flexreport, util
from ib_insync import flexreport, util
conf = get_config()
@ -154,7 +154,8 @@ def load_flex_trades(
trade_entries,
)
ledger_dict: dict|None
ledger_dict: dict | None = None
for acctid in trades_by_account:
trades_by_id = trades_by_account[acctid]

View File

@ -20,7 +20,6 @@ runnable script-programs.
'''
from __future__ import annotations
import asyncio
from datetime import ( # noqa
datetime,
date,
@ -141,8 +140,7 @@ async def data_reset_hack(
except (
OSError, # no VNC server avail..
PermissionError, # asyncvnc pw fail..
) as _vnc_err:
vnc_err = _vnc_err
):
try:
import i3ipc # noqa (since a deps dynamic check)
except ModuleNotFoundError:
@ -168,22 +166,14 @@ async def data_reset_hack(
# localhost but no vnc-client or it borked..
else:
log.error(
'VNC CLICK HACK FAILE with,\n'
f'{vnc_err!r}\n'
)
# breakpoint()
# try_xdo_manual(client)
try_xdo_manual(client)
case 'i3ipc_xdotool':
try_xdo_manual(client)
# i3ipc_xdotool_manual_click_hack()
case _ as tech:
raise RuntimeError(
f'{tech!r} is not supported for reset tech!?'
)
raise RuntimeError(f'{tech} is not supported for reset tech!?')
# we don't really need the ``xdotool`` approach any more B)
return True
@ -279,35 +269,10 @@ async def vnc_click_hack(
500,
)
)
# in case a prior dialog win is open/active.
await client.press('ISO_Enter')
# ensure the ib-gw window is active
await client.click(MOUSE_BUTTON_LEFT)
# send the hotkeys combo B)
await client.press(
'Ctrl',
'Alt',
key,
) # NOTE, keys are stacked
# XXX, sometimes a dialog asking if you want to "simulate
# a reset" will show, in which case we want to select
# "Yes" (by tabbing) and then hit enter.
iters: int = 1
delay: float = 0.3
await asyncio.sleep(delay)
for i in range(iters):
log.info(f'Sending TAB {i}')
await client.press('Tab')
await asyncio.sleep(delay)
for i in range(iters):
log.info(f'Sending ENTER {i}')
await client.press('KP_Enter')
await asyncio.sleep(delay)
await client.press('Ctrl', 'Alt', key) # keys are stacked
def i3ipc_fin_wins_titled(

View File

@ -15,8 +15,7 @@
# along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
Core API client machinery; mostly sane/useful wrapping around
`ib_async`..
Core API client machinery; mostly sane/useful wrapping around `ib_insync`..
'''
from __future__ import annotations
@ -58,7 +57,7 @@ from pendulum import (
Interval,
)
from eventkit import Event
from ib_async import (
from ib_insync import (
client as ib_client,
IB,
Contract,
@ -144,7 +143,7 @@ _bar_sizes = {
_show_wap_in_history: bool = False
# overrides to sidestep pretty questionable design decisions in
# ``ib_async``:
# ``ib_insync``:
class NonShittyWrapper(Wrapper):
def tcpDataArrived(self):
"""Override time stamps to be floats for now.
@ -184,7 +183,7 @@ class NonShittyIB(IB):
'''
def __init__(self):
# override `ib_async` internal loggers so we can see wtf
# override `ib_insync` internal loggers so we can see wtf
# it's doing..
self._logger = get_logger(
name=__name__,
@ -195,7 +194,7 @@ class NonShittyIB(IB):
self.wrapper = NonShittyWrapper(self)
self.client = ib_client.Client(self.wrapper)
self.client._logger = get_logger(
name='ib_async.client',
name='ib_insync.client',
)
# self.errorEvent += self._onError
@ -561,7 +560,7 @@ class Client:
# f'Recursing for more bars:\n'
)
# XXX, debug!
# breakpoint()
breakpoint()
# XXX ? TODO? recursively try to re-request?
# => i think *NO* right?
#
@ -768,48 +767,25 @@ class Client:
expiry: str = '',
front: bool = False,
) -> Contract|list[Contract]:
) -> Contract:
'''
Get an unqualifed contract for the current "continous"
future.
When input params result in a so called "ambiguous contract"
situation, we return the list of all matches provided by,
`IB.qualifyContractsAsync(..., returnAll=True)`
'''
# it's the "front" contract returned here
if front:
cons = (
await self.ib.qualifyContractsAsync(
ContFuture(symbol, exchange=exchange),
returnAll=True,
)
)
con = (await self.ib.qualifyContractsAsync(
ContFuture(symbol, exchange=exchange)
))[0]
else:
cons = (
await self.ib.qualifyContractsAsync(
con = (await self.ib.qualifyContractsAsync(
Future(
symbol,
exchange=exchange,
lastTradeDateOrContractMonth=expiry,
),
returnAll=True,
)
)
con = cons[0]
if isinstance(con, list):
log.warning(
f'{len(con)!r} futes cons matched for input params,\n'
f'symbol={symbol!r}\n'
f'exchange={exchange!r}\n'
f'expiry={expiry!r}\n'
f'\n'
f'cons:\n'
f'{con!r}\n'
)
))[0]
return con
@ -902,7 +878,7 @@ class Client:
currency='USD',
exchange='PAXOS',
)
# XXX, on `ib_async` when first tried this,
# XXX, on `ib_insync` when first tried this,
# > Error 10299, reqId 141: Expected what to show is
# > AGGTRADES, please use that instead of TRADES.,
# > contract: Crypto(conId=479624278, symbol='BTC',
@ -934,17 +910,11 @@ class Client:
)
exch = 'SMART' if not exch else exch
if isinstance(con, list):
contracts: list[Contract] = con
else:
contracts: list[Contract] = [con]
if qualify:
try:
contracts: list[Contract] = (
await self.ib.qualifyContractsAsync(
*contracts
)
await self.ib.qualifyContractsAsync(con)
)
except RequestError as err:
msg = err.message
@ -1022,6 +992,7 @@ class Client:
async def get_sym_details(
self,
fqme: str,
) -> tuple[
Contract,
ContractDetails,
@ -1121,7 +1092,7 @@ class Client:
size: int,
account: str, # if blank the "default" tws account is used
# XXX: by default 0 tells ``ib_async`` methods that there is no
# XXX: by default 0 tells ``ib_insync`` methods that there is no
# existing order so ask the client to create a new one (which it
# seems to do by allocating an int counter - collision prone..)
reqid: int = None,
@ -1310,7 +1281,7 @@ async def load_aio_clients(
port: int = None,
client_id: int = 6116,
# the API TCP in `ib_async` connection can be flaky af so instead
# the API TCP in `ib_insync` connection can be flaky af so instead
# retry a few times to get the client going..
connect_retries: int = 3,
connect_timeout: float = 30, # in case a remote-host
@ -1318,7 +1289,7 @@ async def load_aio_clients(
) -> dict[str, Client]:
'''
Return an ``ib_async.IB`` instance wrapped in our client API.
Return an ``ib_insync.IB`` instance wrapped in our client API.
Client instances are cached for later use.
@ -1660,7 +1631,6 @@ async def open_aio_client_method_relay(
) -> None:
# with tractor.devx.maybe_open_crash_handler() as _bxerr:
# sync with `open_client_proxy()` caller
chan.started_nowait(client)
@ -1670,11 +1640,7 @@ async def open_aio_client_method_relay(
# relay all method requests to ``asyncio``-side client and deliver
# back results
while not chan._to_trio._closed: # <- TODO, better check like `._web_bs`?
msg: (
None
|tuple[str, dict]
|dict
) = await chan.get()
msg: tuple[str, dict]|dict|None = await chan.get()
match msg:
case None: # termination sentinel
log.info('asyncio `Client` method-proxy SHUTDOWN!')
@ -1776,7 +1742,7 @@ async def get_client(
) -> Client:
'''
Init the ``ib_async`` client in another actor and return
Init the ``ib_insync`` client in another actor and return
a method proxy to it.
'''

View File

@ -35,14 +35,14 @@ from trio_typing import TaskStatus
import tractor
from tractor.to_asyncio import LinkedTaskChannel
from tractor import trionics
from ib_async.contract import (
from ib_insync.contract import (
Contract,
)
from ib_async.order import (
from ib_insync.order import (
Trade,
OrderStatus,
)
from ib_async.objects import (
from ib_insync.objects import (
Fill,
Execution,
CommissionReport,
@ -181,7 +181,7 @@ async def handle_order_requests(
# validate
order = BrokerdOrder(**request_msg)
# XXX: by default 0 tells ``ib_async`` methods that
# XXX: by default 0 tells ``ib_insync`` methods that
# there is no existing order so ask the client to create
# a new one (which it seems to do by allocating an int
# counter - collision prone..)
@ -237,7 +237,7 @@ async def recv_trade_updates(
) -> None:
'''
Receive and relay order control and positioning related events
from `ib_async`, pack as tuples and push over mem-chan to our
from `ib_insync`, pack as tuples and push over mem-chan to our
trio relay task for processing and relay to EMS.
'''
@ -303,7 +303,7 @@ async def recv_trade_updates(
# much more then a few more pnl fields..
# 'updatePortfolioEvent',
# XXX: these all seem to be weird ib_async internal
# XXX: these all seem to be weird ib_insync internal
# events that we probably don't care that much about
# given the internal design is wonky af..
# 'newOrderEvent',
@ -499,7 +499,7 @@ async def open_trade_event_stream(
] = trio.TASK_STATUS_IGNORED,
):
'''
Proxy wrapper for starting trade event stream from ib_async
Proxy wrapper for starting trade event stream from ib_insync
which spawns an asyncio task that registers an internal closure
(`push_tradies()`) which in turn relays trading events through
a `tractor.to_asyncio.LinkedTaskChannel` which the parent
@ -991,9 +991,6 @@ _statuses: dict[str, str] = {
# TODO: see a current ``ib_insync`` issue around this:
# https://github.com/erdewit/ib_insync/issues/363
'Inactive': 'pending',
# XXX, uhh wut the heck is this?
'ValidationError': 'error',
}
_action_map = {
@ -1066,19 +1063,8 @@ async def deliver_trade_events(
# TODO: for some reason we can receive a ``None`` here when the
# ib-gw goes down? Not sure exactly how that's happening looking
# at the eventkit code above but we should probably handle it...
event_name: str
item: (
Trade
|tuple[Trade, Fill]
|CommissionReport
|IbPosition
|dict
)
async for event_name, item in trade_event_stream:
log.info(
f'Relaying {event_name!r}:\n'
f'{pformat(item)}\n'
)
log.info(f'Relaying `{event_name}`:\n{pformat(item)}')
match event_name:
case 'orderStatusEvent':
@ -1089,12 +1075,11 @@ async def deliver_trade_events(
trade: Trade = item
reqid: str = str(trade.order.orderId)
status: OrderStatus = trade.orderStatus
status_str: str = _statuses.get(
status.status,
'error',
)
status_str: str = _statuses[status.status]
remaining: float = status.remaining
if status_str == 'filled':
if (
status_str == 'filled'
):
fill: Fill = trade.fills[-1]
execu: Execution = fill.execution
@ -1125,12 +1110,6 @@ async def deliver_trade_events(
# all units were cleared.
status_str = 'closed'
elif status_str == 'error':
log.error(
f'IB reported error status for order ??\n'
f'{status.status!r}\n'
)
# skip duplicate filled updates - we get the deats
# from the execution details event
msg = BrokerdStatus(
@ -1291,24 +1270,14 @@ async def deliver_trade_events(
case 'error':
# NOTE: see impl deats in
# `Client.inline_errors()::push_err()`
err: dict|str = item
err: dict = item
# std case, never relay errors for non-order-control
# related issues.
# never relay errors for non-broker related issues
# https://interactivebrokers.github.io/tws-api/message_codes.html
if isinstance(err, dict):
code: int = err['error_code']
reason: str = err['reason']
reqid: str = str(err['reqid'])
# XXX, sometimes you'll get just a `str` of the form,
# '[code 104] connection failed' or something..
elif isinstance(err, str):
code_part, _, reason = err.rpartition(']')
if code_part:
_, _, code = code_part.partition('[code')
reqid: str = '<unknown>'
# "Warning:" msg codes,
# https://interactivebrokers.github.io/tws-api/message_codes.html#warning_codes
# - 2109: 'Outside Regular Trading Hours'

View File

@ -36,7 +36,7 @@ from typing import (
)
from async_generator import aclosing
import ib_async as ibis
import ib_insync as ibis
import numpy as np
from pendulum import (
now,
@ -100,7 +100,7 @@ tick_types = {
5: 'size',
8: 'volume',
# `ib_async` already packs these into
# ``ib_insync`` already packs these into
# quotes under the following fields.
55: 'trades_per_min', # `'tradeRate'`
56: 'vlm_per_min', # `'volumeRate'`
@ -201,15 +201,6 @@ async def open_history_client(
fqme,
timeframe,
end_dt=end_dt,
# XXX WARNING, we don't actually use this inside
# `Client.bars()` since it isn't really supported,
# the API instead supports a "duration" of time style
# from the `end_dt` (or at least that was the best
# way to get it working sanely)..
#
# SO, with that in mind be aware that any downstream
# logic based on this may be mostly futile Xp
start_dt=start_dt,
)
latency = time.time() - query_start
@ -287,27 +278,19 @@ async def open_history_client(
trimmed_bars = bars_array[
bars_array['time'] >= start_dt.timestamp()
]
# XXX, should NEVER get HERE!
if trimmed_bars.size:
trimmed_first_dt: datetime = from_timestamp(trimmed_bars['time'][0])
if (
trimmed_first_dt
>=
trimmed_first_dt := from_timestamp(trimmed_bars['time'][0])
!=
start_dt
):
msg: str = (
# TODO! rm this once we're more confident it never hits!
# breakpoint()
raise RuntimeError(
f'OHLC-bars array start is gt `start_dt` limit !!\n'
f'start_dt: {start_dt}\n'
f'first_dt: {first_dt}\n'
f'trimmed_first_dt: {trimmed_first_dt}\n'
f'\n'
f'Delivering shorted frame of {trimmed_bars.size!r}\n'
)
log.warning(msg)
# TODO! rm this once we're more confident it
# never breaks anything (in the caller)!
# breakpoint()
# raise RuntimeError(msg)
# XXX, overwrite with start_dt-limited frame
bars_array = trimmed_bars
@ -321,7 +304,7 @@ async def open_history_client(
# TODO: it seems like we can do async queries for ohlc
# but getting the order right still isn't working and I'm not
# quite sure why.. needs some tinkering and probably
# a lookthrough of the `ib_async` machinery, for eg. maybe
# a lookthrough of the `ib_insync` machinery, for eg. maybe
# we have to do the batch queries on the `asyncio` side?
yield (
get_hist,
@ -1068,21 +1051,6 @@ def normalize(
# ticker.rtTime.timestamp) / 1000.
data.pop('rtTime')
# XXX, `ib_async` seems to set a
# `'timezone': datetime.timezone.utc` in this `dict`
# which is NOT IPC serializeable sin codec!
#
# pretty sure we don't need any of this field for now anyway?
data.pop('defaults')
if lts := data.get('lastTimeStamp'):
lts.replace(tzinfo=None)
log.warning(
f'Stripping `.tzinfo` from datetime\n'
f'{lts}\n'
)
# breakpoint()
return data
@ -1259,7 +1227,7 @@ async def stream_quotes(
):
# ?TODO? can we rm this - particularly for `ib_async`?
# ugh, clear ticks since we've consumed them
# (ahem, ib_async is stateful trash)
# (ahem, ib_insync is stateful trash)
# first_ticker.ticks = []
# only on first entry at feed boot up

View File

@ -36,7 +36,7 @@ from pendulum import (
parse,
from_timestamp,
)
from ib_async import (
from ib_insync import (
Contract,
Commodity,
Fill,

View File

@ -23,7 +23,6 @@ from contextlib import (
nullcontext,
)
from decimal import Decimal
from functools import partial
import time
from typing import (
Awaitable,
@ -31,9 +30,8 @@ from typing import (
)
from rapidfuzz import process as fuzzy
import ib_async as ibis
import ib_insync as ibis
import tractor
from tractor.devx.pformat import ppfmt
import trio
from piker.accounting import (
@ -217,19 +215,18 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
f'{ib_client}\n'
)
last: float = time.time()
last = time.time()
async for pattern in stream:
log.info(f'received {pattern}')
now: float = time.time()
# TODO? check this is no longer true?
# this causes tractor hang...
# assert 0
assert pattern, 'IB can not accept blank search pattern'
# throttle search requests to no faster then 1Hz
diff: float = now - last
diff = now - last
if diff < 1.0:
log.debug('throttle sleeping')
await trio.sleep(diff)
@ -240,12 +237,11 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
if (
not pattern
or
pattern.isspace()
or
or pattern.isspace()
# XXX: not sure if this is a bad assumption but it
# seems to make search snappier?
len(pattern) < 1
or len(pattern) < 1
):
log.warning('empty pattern received, skipping..')
@ -258,58 +254,36 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
# XXX: this unblocks the far end search task which may
# hold up a multi-search nursery block
await stream.send({})
continue
log.info(
f'Searching for FQME with,\n'
f'pattern: {pattern!r}\n'
)
log.info(f'searching for {pattern}')
last: float = time.time()
last = time.time()
# async batch search using api stocks endpoint and
# module defined adhoc symbol set.
stock_results: list[dict] = []
# async batch search using api stocks endpoint and module
# defined adhoc symbol set.
stock_results = []
async def extend_results(
# ?TODO, how to type async-fn!?
target: Awaitable[list],
pattern: str,
**kwargs,
target: Awaitable[list]
) -> None:
try:
results = await target(
pattern=pattern,
**kwargs,
)
client_repr: str = proxy._aio_ns.ib.client.__class__.__name__
meth_repr: str = target.keywords["meth"]
log.info(
f'Search query,\n'
f'{client_repr}.{meth_repr}(\n'
f' pattern={pattern!r}\n'
f' **kwargs={kwargs!r},\n'
f') = {ppfmt(list(results))}'
# XXX ^ just the keys since that's what
# shows in UI results table.
)
results = await target
except tractor.trionics.Lagged:
log.exception(
'IB SYM-SEARCH OVERRUN?!?\n'
)
print("IB SYM-SEARCH OVERRUN?!?")
return
stock_results.extend(results)
for _ in range(10):
with trio.move_on_after(3) as cs:
async with trio.open_nursery() as tn:
tn.start_soon(
partial(
async with trio.open_nursery() as sn:
sn.start_soon(
extend_results,
proxy.search_symbols(
pattern=pattern,
target=proxy.search_symbols,
upto=10,
upto=5,
),
)
@ -339,9 +313,7 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
# adhoc_match_results = {i[0]: {} for i in
# adhoc_matches}
log.debug(
f'fuzzy matching stocks {ppfmt(stock_results)}'
)
log.debug(f'fuzzy matching stocks {stock_results}')
stock_matches = fuzzy.extract(
pattern,
stock_results,
@ -355,10 +327,7 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
# TODO: we used to deliver contract details
# {item[2]: item[0] for item in stock_matches}
log.debug(
f'Sending final matches\n'
f'{matches.keys()}'
)
log.debug(f"sending matches: {matches.keys()}")
await stream.send(matches)
@ -553,11 +522,7 @@ async def get_mkt_info(
if atype == 'commodity':
venue: str = 'cmdty'
else:
venue: str = (
con.primaryExchange
or
con.exchange
)
venue = con.primaryExchange or con.exchange
price_tick: Decimal = Decimal(str(details.minTick))
ib_min_tick_gt_2: Decimal = Decimal('0.01')

View File

@ -41,9 +41,8 @@ from pendulum import (
)
if TYPE_CHECKING:
from ib_async import (
from ib_insync import (
TradingSession,
Contract,
ContractDetails,
)
from exchange_calendars.exchange_calendars import (
@ -83,20 +82,8 @@ def has_holiday(
'''
tz: str = con_deats.timeZoneId
con: Contract = con_deats.contract
exch: str = (
con.primaryExchange
or
con.exchange
)
# XXX, ad-hoc handle any IB exchange which are non-std
# via lookup table..
std_exch: dict = {
'ARCA': 'ARCX',
}.get(exch, exch)
cal: ExchangeCalendar = xcals.get_calendar(std_exch)
exch: str = con_deats.contract.primaryExchange
cal: ExchangeCalendar = xcals.get_calendar(exch)
end: datetime = period.end
# _start: datetime = period.start
# ?TODO, can rm ya?
@ -249,7 +236,7 @@ def is_venue_closure(
#
# NOTE, this was generated by @guille from a gpt5 prompt
# and was originally thot to be needed before learning about
# `ib_async.contract.ContractDetails._parseSessions()` and
# `ib_insync.contract.ContractDetails._parseSessions()` and
# it's downstream meths..
#
# This is still likely useful to keep for now to parse the

View File

@ -19,6 +19,7 @@ Platform configuration (files) mgmt.
"""
import platform
import sys
import os
import shutil
from typing import (
@ -28,7 +29,6 @@ from typing import (
from pathlib import Path
from bidict import bidict
import platformdirs
import tomlkit
try:
import tomllib
@ -41,7 +41,7 @@ from .log import get_logger
log = get_logger('broker-config')
# XXX NOTE: orig impl was taken from `click`
# XXX NOTE: taken from `click`
# |_https://github.com/pallets/click/blob/main/src/click/utils.py#L449
#
# (since apparently they have some super weirdness with SIGINT and
@ -54,21 +54,44 @@ def get_app_dir(
force_posix: bool = False,
) -> str:
'''
Returns the config folder for the application. The default behavior
r"""Returns the config folder for the application. The default behavior
is to return whatever is most appropriate for the operating system.
----
NOTE, below is originally from `click` impl fn, we can prolly remove?
----
To give you an idea, for an app called ``"Foo Bar"``, something like
the following folders could be returned:
Mac OS X:
``~/Library/Application Support/Foo Bar``
Mac OS X (POSIX):
``~/.foo-bar``
Unix:
``~/.config/foo-bar``
Unix (POSIX):
``~/.foo-bar``
Win XP (roaming):
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo``
Win XP (not roaming):
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
Win 7 (roaming):
``C:\Users\<user>\AppData\Roaming\Foo Bar``
Win 7 (not roaming):
``C:\Users\<user>\AppData\Local\Foo Bar``
.. versionadded:: 2.0
:param app_name: the application name. This should be properly capitalized
and can contain whitespace.
:param roaming: controls if the folder should be roaming or not on Windows.
Has no affect otherwise.
:param force_posix: if this is set to `True` then on any POSIX system the
folder will be stored in the home folder with a leading
dot instead of the XDG config home or darwin's
application support folder.
'''
"""
def _posixify(name):
return "-".join(name.split()).lower()
# NOTE: for testing with `pytest` we leverage the `tmp_dir`
# fixture to generate (and clean up) a test-request-specific
# directory for isolated configuration files such that,
@ -94,30 +117,23 @@ def get_app_dir(
# assert testdirpath.exists(), 'piker test harness might be borked!?'
# app_name = str(testdirpath)
os_name: str = platform.system()
conf_dir: Path = platformdirs.user_config_path()
app_dir: Path = conf_dir / app_name
# ?TODO, from `click`; can remove?
if platform.system() == 'Windows':
key = "APPDATA" if roaming else "LOCALAPPDATA"
folder = os.environ.get(key)
if folder is None:
folder = os.path.expanduser("~")
return os.path.join(folder, app_name)
if force_posix:
def _posixify(name):
return "-".join(name.split()).lower()
return os.path.join(
os.path.expanduser(
"~/.{}".format(
_posixify(app_name)
os.path.expanduser("~/.{}".format(_posixify(app_name))))
if sys.platform == "darwin":
return os.path.join(
os.path.expanduser("~/Library/Application Support"), app_name
)
return os.path.join(
os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
_posixify(app_name),
)
)
log.info(
f'Using user config directory,\n'
f'platform.system(): {os_name!r}\n'
f'conf_dir: {conf_dir!r}\n'
f'app_dir: {conf_dir!r}\n'
)
return app_dir
_click_config_dir: Path = Path(get_app_dir('piker'))
@ -234,9 +250,7 @@ def repodir() -> Path:
repodir: Path = Path(os.environ.get('GITHUB_WORKSPACE'))
confdir: Path = repodir / 'config'
assert confdir.is_dir(), (
f'{confdir} DNE, {repodir} is likely incorrect!'
)
assert confdir.is_dir(), f'{confdir} DNE, {repodir} is likely incorrect!'
return repodir

View File

@ -973,6 +973,9 @@ async def open_feed(
# assert flume.mkt.fqme == fqme
feed.flumes[fqme] = flume
# TODO: do we need this?
flume.feed = feed
# attach and cache shm handles
rt_shm = flume.rt_shm
assert rt_shm

View File

@ -22,6 +22,9 @@ real-time data processing data-structures.
"""
from __future__ import annotations
from typing import (
TYPE_CHECKING,
)
import tractor
import pendulum
@ -35,6 +38,9 @@ from ._sharedmem import (
)
from piker.accounting import MktPair
if TYPE_CHECKING:
from piker.data.feed import Feed
class Flume(Struct):
'''
@ -74,6 +80,10 @@ class Flume(Struct):
izero_rt: int = 0
throttle_rate: int | None = None
# TODO: do we need this really if we can pull the `Portal` from
# ``tractor``'s internals?
feed: Feed|None = None
@property
def rt_shm(self) -> ShmArray:
@ -146,6 +156,7 @@ class Flume(Struct):
# will get instead some kind of msg-compat version
# that it can load.
msg.pop('stream')
msg.pop('feed')
msg.pop('_rt_shm')
msg.pop('_hist_shm')

View File

@ -294,11 +294,6 @@ def ldshm(
f'Something is wrong with time period for {shm}:\n{times}'
)
period_s: float = float(max(d1, d2, med))
log.info(
f'Processing shm buffer:\n'
f' file: {shmfile.name}\n'
f' period: {period_s}s\n'
)
null_segs: tuple = tsp.get_null_segs(
frame=shm.array,

View File

@ -276,41 +276,14 @@ def get_null_segs(
absi_zdiff: np.ndarray = np.diff(absi_zeros)
if zero_t.size < 2:
idx: int = zero_t['index'][0]
idx_before: int = idx - 1
idx_after: int = idx + 1
index = frame['index']
before_cond = idx_before <= index
after_cond = index <= idx_after
bars: np.ndarray = frame[
before_cond
&
after_cond
]
time: np.ndarray = bars['time']
from pendulum import (
from_timestamp,
Interval,
try:
breakpoint()
except RuntimeError:
# XXX, if greenback not active from
# piker store ldshm cmd..
log.exception(
"Can't debug single-sample null!\n"
)
gap: Interval = (
from_timestamp(time[-1])
-
from_timestamp(time[0])
)
log.warning(
f'Single OHLCV-bar null-segment detected??\n'
f'gap -> {gap}\n'
)
# ^^XXX, if you want to debug the above bar-gap^^
# try:
# breakpoint()
# except RuntimeError:
# # XXX, if greenback not active from
# # piker store ldshm cmd..
# log.exception(
# "Can't debug single-sample null!\n"
# )
return None

View File

@ -30,11 +30,6 @@ import tractor
from piker.data._formatters import BGM
from piker.storage import log
from piker.toolz.profile import (
Profiler,
pg_profile_enabled,
ms_slower_then,
)
from piker.ui._style import get_fonts
if TYPE_CHECKING:
@ -97,22 +92,12 @@ async def markup_gaps(
# gap's duration.
show_txt: bool = False,
# A/B comparison: render individual arrows alongside batch
# for visual comparison
show_individual_arrows: bool = False,
) -> dict[int, dict]:
'''
Remote annotate time-gaps in a dt-fielded ts (normally OHLC)
with rectangles.
'''
profiler = Profiler(
msg=f'markup_gaps() for {gaps.height} gaps',
disabled=False,
ms_threshold=0.0,
)
# XXX: force chart redraw FIRST to ensure PlotItem coordinate
# system is properly initialized before we position annotations!
# Without this, annotations may be misaligned on first creation
@ -121,19 +106,6 @@ async def markup_gaps(
fqme=fqme,
timeframe=timeframe,
)
profiler('first `.redraw()` before annot creation')
log.info(
f'markup_gaps() called:\n'
f' fqme: {fqme}\n'
f' timeframe: {timeframe}s\n'
f' gaps.height: {gaps.height}\n'
)
# collect all annotation specs for batch submission
rect_specs: list[dict] = []
arrow_specs: list[dict] = []
text_specs: list[dict] = []
aids: dict[int] = {}
for i in range(gaps.height):
@ -245,38 +217,56 @@ async def markup_gaps(
# 1: 'wine', # down-gap
# }[sgn]
# collect rect spec (no fqme/timeframe, added by batch
# API)
rect_spec: dict[str, Any] = dict(
meth='set_view_pos',
rect_kwargs: dict[str, Any] = dict(
fqme=fqme,
timeframe=timeframe,
start_pos=lc,
end_pos=ro,
color=color,
update_label=False,
start_time=start_time,
end_time=end_time,
)
rect_specs.append(rect_spec)
# add up/down rects
aid: int|None = await actl.add_rect(**rect_kwargs)
if aid is None:
log.error(
f'Failed to add rect for,\n'
f'{rect_kwargs!r}\n'
f'\n'
f'Skipping to next gap!\n'
)
continue
assert aid
aids[aid] = rect_kwargs
direction: str = (
'down' if down_gap
else 'up'
)
# collect arrow spec
# TODO! mk this a `msgspec.Struct` which we deserialize
# on the server side!
# XXX: send timestamp for server-side index lookup
# to ensure alignment with current shm state
gap_time: float = row['time'][0]
arrow_spec: dict[str, Any] = dict(
arrow_kwargs: dict[str, Any] = dict(
fqme=fqme,
timeframe=timeframe,
x=iend, # fallback if timestamp lookup fails
y=cls,
time=gap_time, # for server-side index lookup
color=color,
alpha=169,
pointing=direction,
# TODO: expose these as params to markup_gaps()?
headLen=10,
headWidth=2.222,
pxMode=True,
)
arrow_specs.append(arrow_spec)
aid: int = await actl.add_arrow(
**arrow_kwargs
)
# add duration label to RHS of arrow
if up_gap:
@ -288,12 +278,15 @@ async def markup_gaps(
assert flat
anchor = (0, 0) # up from bottom
# collect text spec if enabled
if show_txt:
# use a slightly smaller font for gap label txt.
font, small_font = get_fonts()
font_size: int = small_font.px_size - 1
assert isinstance(font_size, int)
text_spec: dict[str, Any] = dict(
if show_txt:
text_aid: int = await actl.add_text(
fqme=fqme,
timeframe=timeframe,
text=gap_label,
x=iend + 1, # fallback if timestamp lookup fails
y=cls,
@ -302,46 +295,12 @@ async def markup_gaps(
anchor=anchor,
font_size=font_size,
)
text_specs.append(text_spec)
aids[text_aid] = {'text': gap_label}
# submit all annotations in single batch IPC msg
log.info(
f'Submitting batch annotations:\n'
f' rects: {len(rect_specs)}\n'
f' arrows: {len(arrow_specs)}\n'
f' texts: {len(text_specs)}\n'
)
profiler('built all annotation specs')
result: dict[str, list[int]] = await actl.add_batch(
fqme=fqme,
timeframe=timeframe,
rects=rect_specs,
arrows=arrow_specs,
texts=text_specs,
show_individual_arrows=show_individual_arrows,
)
profiler('batch `.add_batch()` IPC call complete')
# build aids dict from batch results
for aid in result['rects']:
aids[aid] = {'type': 'rect'}
for aid in result['arrows']:
aids[aid] = {'type': 'arrow'}
for aid in result['texts']:
aids[aid] = {'type': 'text'}
log.info(
f'Batch submission complete: {len(aids)} annotation(s) '
f'created'
)
profiler('built aids result dict')
# tell chart to redraw all its graphics view layers
# tell chart to redraw all its
# graphics view layers Bo
await actl.redraw(
fqme=fqme,
timeframe=timeframe,
)
profiler('final `.redraw()` after annot creation')
return aids

View File

@ -58,10 +58,7 @@ from piker.brokers import NoData
from piker.accounting import (
MktPair,
)
from piker.log import (
get_logger,
get_console_log,
)
from piker.log import get_logger
from ..data._sharedmem import (
maybe_open_shm_array,
ShmArray,
@ -251,20 +248,10 @@ async def maybe_fill_null_segments(
end_dt=end_dt,
)
if array.size == 0:
log.warning(
f'Valid gap from backend ??\n'
f'{end_dt} -> {start_dt}\n'
)
# ?TODO? do we want to remove the nulls and push
# the close price here for the gap duration?
await tractor.pause()
break
if (
frame_start_dt := (from_timestamp(array['time'][0]))
<
backfill_until_dt
frame_start_dt := (
from_timestamp(array['time'][0])
) < backfill_until_dt
):
log.error(
f'Invalid frame_start !?\n'
@ -626,17 +613,10 @@ async def start_backfill(
else:
log.warning(
f'0 BARS TO PUSH after diff!?\n'
'0 BARS TO PUSH after diff!?\n'
f'{next_start_dt} -> {last_start_dt}'
f'\n'
f'This might mean we rxed a gap frame which starts BEFORE,\n'
f'backfill_until_dt: {backfill_until_dt}\n'
f'end_dt_param: {end_dt_param}\n'
)
# XXX, to debug it and be sure.
# await tractor.pause()
break
await tractor.pause()
# Check if we're about to exceed buffer capacity BEFORE
# attempting the push
@ -739,21 +719,12 @@ async def start_backfill(
# including the dst[/src] source asset token. SO,
# 'tsla.nasdaq.ib' over 'tsla/usd.nasdaq.ib' for
# historical reasons ONLY.
if (
mkt.dst.atype not in {
if mkt.dst.atype not in {
'crypto',
'crypto_currency',
'fiat', # a "forex pair"
'perpetual_future', # stupid "perps" from cex land
}
and not (
mkt.src.atype == 'crypto_currency'
and
mkt.dst.atype in {
'future',
}
)
):
}:
col_sym_key: str = mkt.get_fqme(
delim_char='',
without_src=True,
@ -1397,10 +1368,6 @@ async def manage_history(
engages.
'''
get_console_log(
name=__name__,
level=loglevel,
)
# TODO: is there a way to make each shm file key
# actor-tree-discovery-addr unique so we avoid collisions
# when doing tests which also allocate shms for certain instruments

View File

@ -24,11 +24,8 @@ from pyqtgraph import (
Point,
functions as fn,
Color,
GraphicsObject,
)
from pyqtgraph.Qt import internals
import numpy as np
import pyqtgraph as pg
from piker.ui.qt import (
QtCore,
@ -38,10 +35,6 @@ from piker.ui.qt import (
QRectF,
QGraphicsPathItem,
)
from piker.ui._style import hcolor
from piker.log import get_logger
log = get_logger(__name__)
def mk_marker_path(
@ -111,7 +104,7 @@ def mk_marker_path(
class LevelMarker(QGraphicsPathItem):
'''
An arrow marker path graphic which redraws itself
An arrow marker path graphich which redraws itself
to the specified view coordinate level on each paint cycle.
'''
@ -258,9 +251,9 @@ def qgo_draw_markers(
) -> float:
'''
Paint markers in ``pg.GraphicsItem`` style by first removing the
view transform for the painter, drawing the markers in scene
coords, then restoring the view coords.
Paint markers in ``pg.GraphicsItem`` style by first
removing the view transform for the painter, drawing the markers
in scene coords, then restoring the view coords.
'''
# paint markers in native coordinate system
@ -302,449 +295,3 @@ def qgo_draw_markers(
p.setTransform(orig_tr)
return max(sizes)
class GapAnnotations(GraphicsObject):
'''
Batch-rendered gap annotations using Qt's efficient drawing
APIs.
Instead of creating individual `QGraphicsItem` instances per
gap (which is very slow for 1000+ gaps), this class stores all
gap rectangles and arrows in numpy-backed arrays and renders
them in single batch paint calls.
Performance: ~1000x faster than individual items for large gap
counts.
Based on patterns from:
- `pyqtgraph.BarGraphItem` (batch rect rendering)
- `pyqtgraph.ScatterPlotItem` (fragment rendering)
- `piker.ui._curve.FlowGraphic` (single path pattern)
'''
def __init__(
self,
gap_specs: list[dict],
array: np.ndarray|None = None,
color: str = 'dad_blue',
alpha: int = 169,
arrow_size: float = 10.0,
fqme: str|None = None,
timeframe: float|None = None,
) -> None:
'''
gap_specs: list of dicts with keys:
- start_pos: (x, y) tuple for left corner of rect
- end_pos: (x, y) tuple for right corner of rect
- arrow_x: x position for arrow
- arrow_y: y position for arrow
- pointing: 'up' or 'down' for arrow direction
- start_time: (optional) timestamp for repositioning
- end_time: (optional) timestamp for repositioning
array: optional OHLC numpy array for repositioning on
backfill updates (when abs-index changes)
fqme: symbol name for these gaps (for logging/debugging)
timeframe: period in seconds that these gaps were
detected on (used to skip reposition when
called with wrong timeframe's array)
'''
super().__init__()
self._gap_specs = gap_specs
self._array = array
self._fqme = fqme
self._timeframe = timeframe
n_gaps = len(gap_specs)
# shared pen/brush matching original SelectRect/ArrowItem style
base_color = pg.mkColor(hcolor(color))
# rect pen: base color, fully opaque for outline
self._rect_pen = pg.mkPen(base_color, width=1)
# rect brush: base color with alpha=66 (SelectRect default)
rect_fill = pg.mkColor(hcolor(color))
rect_fill.setAlpha(66)
self._rect_brush = pg.functions.mkBrush(rect_fill)
# arrow pen: same as rects
self._arrow_pen = pg.mkPen(base_color, width=1)
# arrow brush: base color with user-specified alpha (default 169)
arrow_fill = pg.mkColor(hcolor(color))
arrow_fill.setAlpha(alpha)
self._arrow_brush = pg.functions.mkBrush(arrow_fill)
# allocate rect array using Qt's efficient storage
self._rectarray = internals.PrimitiveArray(
QtCore.QRectF,
4,
)
self._rectarray.resize(n_gaps)
rect_memory = self._rectarray.ndarray()
# fill rect array from gap specs
for (
i,
spec,
) in enumerate(gap_specs):
(
start_x,
start_y,
) = spec['start_pos']
(
end_x,
end_y,
) = spec['end_pos']
# QRectF expects (x, y, width, height)
rect_memory[i, 0] = start_x
rect_memory[i, 1] = min(start_y, end_y)
rect_memory[i, 2] = end_x - start_x
rect_memory[i, 3] = abs(end_y - start_y)
# build single QPainterPath for all arrows
self._arrow_path = QtGui.QPainterPath()
self._arrow_size = arrow_size
for spec in gap_specs:
arrow_x = spec['arrow_x']
arrow_y = spec['arrow_y']
pointing = spec['pointing']
# create arrow polygon
if pointing == 'down':
# arrow points downward
arrow_poly = QtGui.QPolygonF([
QPointF(arrow_x, arrow_y), # tip
QPointF(
arrow_x - arrow_size/2,
arrow_y - arrow_size,
), # left
QPointF(
arrow_x + arrow_size/2,
arrow_y - arrow_size,
), # right
])
else: # up
# arrow points upward
arrow_poly = QtGui.QPolygonF([
QPointF(arrow_x, arrow_y), # tip
QPointF(
arrow_x - arrow_size/2,
arrow_y + arrow_size,
), # left
QPointF(
arrow_x + arrow_size/2,
arrow_y + arrow_size,
), # right
])
self._arrow_path.addPolygon(arrow_poly)
self._arrow_path.closeSubpath()
# cache bounding rect
self._br: QRectF|None = None
def boundingRect(self) -> QRectF:
'''
Compute bounding rect from rect array and arrow path.
'''
if self._br is not None:
return self._br
# get rect bounds
rect_memory = self._rectarray.ndarray()
if len(rect_memory) == 0:
self._br = QRectF()
return self._br
x_min = rect_memory[:, 0].min()
y_min = rect_memory[:, 1].min()
x_max = (rect_memory[:, 0] + rect_memory[:, 2]).max()
y_max = (rect_memory[:, 1] + rect_memory[:, 3]).max()
# expand for arrow path
arrow_br = self._arrow_path.boundingRect()
x_min = min(x_min, arrow_br.left())
y_min = min(y_min, arrow_br.top())
x_max = max(x_max, arrow_br.right())
y_max = max(y_max, arrow_br.bottom())
self._br = QRectF(
x_min,
y_min,
x_max - x_min,
y_max - y_min,
)
return self._br
def paint(
self,
p: QtGui.QPainter,
opt: QtWidgets.QStyleOptionGraphicsItem,
w: QtWidgets.QWidget,
) -> None:
'''
Batch render all rects and arrows in minimal paint calls.
'''
# draw all rects in single batch call (data coordinates)
p.setPen(self._rect_pen)
p.setBrush(self._rect_brush)
drawargs = self._rectarray.drawargs()
p.drawRects(*drawargs)
# draw arrows in scene/pixel coordinates so they maintain
# size regardless of zoom level
orig_tr = p.transform()
p.resetTransform()
# rebuild arrow path in scene coordinates
arrow_path_scene = QtGui.QPainterPath()
# arrow geometry matching pg.ArrowItem defaults
# headLen=10, headWidth=2.222
# headWidth is the half-width (center to edge distance)
head_len = self._arrow_size
head_width = head_len * 0.2222 # 2.222 at size=10
for spec in self._gap_specs:
if 'arrow_x' not in spec:
continue
arrow_x = spec['arrow_x']
arrow_y = spec['arrow_y']
pointing = spec['pointing']
# transform data coords to scene coords
scene_pt = orig_tr.map(QPointF(arrow_x, arrow_y))
sx = scene_pt.x()
sy = scene_pt.y()
# create arrow polygon in scene/pixel coords
# matching pg.ArrowItem geometry but rotated for up/down
if pointing == 'down':
# tip points downward (negative y direction)
arrow_poly = QtGui.QPolygonF([
QPointF(sx, sy), # tip
QPointF(
sx - head_width,
sy - head_len,
), # left base
QPointF(
sx + head_width,
sy - head_len,
), # right base
])
else: # up
# tip points upward (positive y direction)
arrow_poly = QtGui.QPolygonF([
QPointF(sx, sy), # tip
QPointF(
sx - head_width,
sy + head_len,
), # left base
QPointF(
sx + head_width,
sy + head_len,
), # right base
])
arrow_path_scene.addPolygon(arrow_poly)
arrow_path_scene.closeSubpath()
p.setPen(self._arrow_pen)
p.setBrush(self._arrow_brush)
p.drawPath(arrow_path_scene)
# restore original transform
p.setTransform(orig_tr)
def reposition(
self,
array: np.ndarray|None = None,
fqme: str|None = None,
timeframe: float|None = None,
) -> None:
'''
Reposition all annotations based on timestamps.
Used when viz is updated (eg during backfill) and abs-index
range changes - we need to lookup new indices from timestamps.
'''
# skip reposition if timeframe doesn't match
# (e.g., 1s gaps being repositioned with 60s array)
if (
timeframe is not None
and
self._timeframe is not None
and
timeframe != self._timeframe
):
log.debug(
f'Skipping reposition for {self._fqme} gaps:\n'
f' gap timeframe: {self._timeframe}s\n'
f' array timeframe: {timeframe}s\n'
)
return
if array is None:
array = self._array
if array is None:
log.warning(
'GapAnnotations.reposition() called but no array '
'provided'
)
return
# collect all unique timestamps we need to lookup
timestamps: set[float] = set()
for spec in self._gap_specs:
if spec.get('start_time') is not None:
timestamps.add(spec['start_time'])
if spec.get('end_time') is not None:
timestamps.add(spec['end_time'])
if spec.get('time') is not None:
timestamps.add(spec['time'])
# vectorized timestamp -> row lookup using binary search
time_to_row: dict[float, dict] = {}
if timestamps:
import numpy as np
time_arr = array['time']
ts_array = np.array(list(timestamps))
search_indices = np.searchsorted(
time_arr,
ts_array,
)
# vectorized bounds check and exact match verification
valid_mask = (
(search_indices < len(array))
& (time_arr[search_indices] == ts_array)
)
valid_indices = search_indices[valid_mask]
valid_timestamps = ts_array[valid_mask]
matched_rows = array[valid_indices]
time_to_row = {
float(ts): {
'index': float(row['index']),
'open': float(row['open']),
'close': float(row['close']),
}
for ts, row in zip(
valid_timestamps,
matched_rows,
)
}
# rebuild rect array from gap specs with new indices
rect_memory = self._rectarray.ndarray()
for (
i,
spec,
) in enumerate(self._gap_specs):
start_time = spec.get('start_time')
end_time = spec.get('end_time')
if (
start_time is None
or end_time is None
):
continue
start_row = time_to_row.get(start_time)
end_row = time_to_row.get(end_time)
if (
start_row is None
or end_row is None
):
log.warning(
f'Timestamp lookup failed for gap[{i}] during '
f'reposition:\n'
f' fqme: {fqme}\n'
f' timeframe: {timeframe}s\n'
f' start_time: {start_time}\n'
f' end_time: {end_time}\n'
f' array time range: '
f'{array["time"][0]} -> {array["time"][-1]}\n'
)
continue
start_idx = start_row['index']
end_idx = end_row['index']
start_close = start_row['close']
end_open = end_row['open']
from_idx: float = 0.16 - 0.06
start_x = start_idx + 1 - from_idx
end_x = end_idx + from_idx
# update rect in array
rect_memory[i, 0] = start_x
rect_memory[i, 1] = min(start_close, end_open)
rect_memory[i, 2] = end_x - start_x
rect_memory[i, 3] = abs(end_open - start_close)
# rebuild arrow path with new indices
self._arrow_path.clear()
for spec in self._gap_specs:
time_val = spec.get('time')
if time_val is None:
continue
arrow_row = time_to_row.get(time_val)
if arrow_row is None:
continue
arrow_x = arrow_row['index']
arrow_y = arrow_row['close']
pointing = spec['pointing']
# create arrow polygon
if pointing == 'down':
arrow_poly = QtGui.QPolygonF([
QPointF(arrow_x, arrow_y),
QPointF(
arrow_x - self._arrow_size/2,
arrow_y - self._arrow_size,
),
QPointF(
arrow_x + self._arrow_size/2,
arrow_y - self._arrow_size,
),
])
else: # up
arrow_poly = QtGui.QPolygonF([
QPointF(arrow_x, arrow_y),
QPointF(
arrow_x - self._arrow_size/2,
arrow_y + self._arrow_size,
),
QPointF(
arrow_x + self._arrow_size/2,
arrow_y + self._arrow_size,
),
])
self._arrow_path.addPolygon(arrow_poly)
self._arrow_path.closeSubpath()
# invalidate bounding rect cache
self._br = None
self.prepareGeometryChange()
self.update()

View File

@ -168,7 +168,7 @@ class ArrowEditor(Struct):
'''
uid: str = arrow._uid
arrows: list[pg.ArrowItem] = self._arrows[uid]
log.debug(
log.info(
f'Removing arrow from views\n'
f'uid: {uid!r}\n'
f'{arrow!r}\n'
@ -286,9 +286,7 @@ class LineEditor(Struct):
for line in lines:
line.show_labels()
line.hide_markers()
log.debug(
f'Line active @ level: {line.value()!r}'
)
log.debug(f'Level active for level: {line.value()}')
# TODO: other flashy things to indicate the order is active
return lines
@ -331,11 +329,7 @@ class LineEditor(Struct):
if line in hovered:
hovered.remove(line)
log.debug(
f'Deleting level-line\n'
f'line: {line!r}\n'
f'oid: {uuid!r}\n'
)
log.debug(f'deleting {line} with oid: {uuid}')
line.delete()
# make sure the xhair doesn't get left off
@ -343,11 +337,7 @@ class LineEditor(Struct):
cursor.show_xhair()
else:
log.warning(
f'Could not find line for removal ??\n'
f'\n'
f'{line!r}\n'
)
log.warning(f'Could not find line for {line}')
return lines
@ -579,11 +569,11 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
if update_label:
self.init_label(view_rect)
log.debug(
f'SelectRect modify,\n'
print(
'SelectRect modify:\n'
f'QRectF: {view_rect}\n'
f'start_pos: {start_pos!r}\n'
f'end_pos: {end_pos!r}\n'
f'start_pos: {start_pos}\n'
f'end_pos: {end_pos}\n'
)
self.show()
@ -650,11 +640,8 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
dmn=dmn,
))
# tracing
# log.info(
# f'x2, y2: {(x2, y2)}\n'
# f'xmn, ymn: {(xmn, ymx)}\n'
# )
# print(f'x2, y2: {(x2, y2)}')
# print(f'xmn, ymn: {(xmn, ymx)}')
label_anchor = Point(
xmx + 2,

View File

@ -38,6 +38,7 @@ from piker.ui.qt import (
QtGui,
QGraphicsPathItem,
QStyleOptionGraphicsItem,
QGraphicsItem,
QGraphicsScene,
QWidget,
QPointF,

View File

@ -22,7 +22,6 @@ a chart from some other actor.
from __future__ import annotations
from contextlib import (
asynccontextmanager as acm,
contextmanager as cm,
AsyncExitStack,
)
from functools import partial
@ -47,7 +46,6 @@ from piker.log import get_logger
from piker.types import Struct
from piker.service import find_service
from piker.brokers import SymbolNotFound
from piker.toolz import Profiler
from piker.ui.qt import (
QGraphicsItem,
)
@ -100,8 +98,6 @@ def rm_annot(
annot: ArrowEditor|SelectRect|pg.TextItem
) -> bool:
global _editors
from piker.ui._annotate import GapAnnotations
match annot:
case pg.ArrowItem():
editor = _editors[annot._uid]
@ -126,35 +122,9 @@ def rm_annot(
scene.removeItem(annot)
return True
case GapAnnotations():
scene = annot.scene()
if scene:
scene.removeItem(annot)
return True
return False
@cm
def no_qt_updates(*items):
'''
Disable Qt widget/item updates during context to batch
render operations and only trigger single repaint on exit.
Accepts both QWidgets and QGraphicsItems.
'''
for item in items:
if hasattr(item, 'setUpdatesEnabled'):
item.setUpdatesEnabled(False)
try:
yield
finally:
for item in items:
if hasattr(item, 'setUpdatesEnabled'):
item.setUpdatesEnabled(True)
async def serve_rc_annots(
ipc_key: str,
annot_req_stream: MsgStream,
@ -459,333 +429,6 @@ async def serve_rc_annots(
aids.add(aid)
await annot_req_stream.send(aid)
case {
'cmd': 'batch',
'fqme': fqme,
'timeframe': timeframe,
'rects': list(rect_specs),
'arrows': list(arrow_specs),
'texts': list(text_specs),
'show_individual_arrows': bool(show_individual_arrows),
}:
# batch submission handler - process multiple
# annotations in single IPC round-trip
ds: DisplayState = _dss[fqme]
try:
chart: ChartPlotWidget = {
60: ds.hist_chart,
1: ds.chart,
}[timeframe]
except KeyError:
msg: str = (
f'No chart for timeframe={timeframe}s, '
f'skipping batch annotation'
)
log.error(msg)
await annot_req_stream.send({'error': msg})
continue
cv: ChartView = chart.cv
viz: Viz = chart.get_viz(fqme)
shm = viz.shm
arr = shm.array
result: dict[str, list[int]] = {
'rects': [],
'arrows': [],
'texts': [],
}
profiler = Profiler(
msg=(
f'Batch annotate {len(rect_specs)} gaps '
f'on {fqme}@{timeframe}s'
),
disabled=False,
delayed=False,
)
aids_set: set[int] = ctxs[ipc_key][1]
# build unified gap_specs for GapAnnotations class
from piker.ui._annotate import GapAnnotations
gap_specs: list[dict] = []
n_gaps: int = max(
len(rect_specs),
len(arrow_specs),
)
profiler('setup batch annot creation')
# collect all unique timestamps for vectorized lookup
timestamps: list[float] = []
for rect_spec in rect_specs:
if start_time := rect_spec.get('start_time'):
timestamps.append(start_time)
if end_time := rect_spec.get('end_time'):
timestamps.append(end_time)
for arrow_spec in arrow_specs:
if time_val := arrow_spec.get('time'):
timestamps.append(time_val)
profiler('collect `timestamps: list` complet!')
# build timestamp -> row mapping using binary search
# O(m log n) instead of O(n*m) with np.isin
time_to_row: dict[float, dict] = {}
if timestamps:
import numpy as np
time_arr = arr['time']
ts_array = np.array(timestamps)
# binary search for each timestamp in sorted time array
search_indices = np.searchsorted(
time_arr,
ts_array,
)
profiler('`np.searchsorted()` complete!')
# vectorized bounds check and exact match verification
valid_mask = (
(search_indices < len(arr))
& (time_arr[search_indices] == ts_array)
)
# get all valid indices and timestamps
valid_indices = search_indices[valid_mask]
valid_timestamps = ts_array[valid_mask]
# use fancy indexing to get all rows at once
matched_rows = arr[valid_indices]
# extract fields to plain arrays BEFORE dict building
indices_arr = matched_rows['index'].astype(float)
opens_arr = matched_rows['open'].astype(float)
closes_arr = matched_rows['close'].astype(float)
profiler('extracted field arrays')
# build dict from plain arrays (much faster)
time_to_row: dict[float, dict] = {
float(ts): {
'index': idx,
'open': opn,
'close': cls,
}
for (
ts,
idx,
opn,
cls,
) in zip(
valid_timestamps,
indices_arr,
opens_arr,
closes_arr,
)
}
profiler('`time_to_row` creation complete!')
profiler(f'built timestamp lookup for {len(timestamps)} times')
# build gap_specs from rect+arrow specs
for i in range(n_gaps):
gap_spec: dict = {}
# get rect spec for this gap
if i < len(rect_specs):
rect_spec: dict = rect_specs[i].copy()
start_time = rect_spec.get('start_time')
end_time = rect_spec.get('end_time')
if (
start_time is not None
and end_time is not None
):
# lookup from pre-built mapping
start_row = time_to_row.get(start_time)
end_row = time_to_row.get(end_time)
if (
start_row is None
or end_row is None
):
log.warning(
f'Timestamp lookup failed for '
f'gap[{i}], skipping'
)
continue
start_idx = start_row['index']
end_idx = end_row['index']
start_close = start_row['close']
end_open = end_row['open']
from_idx: float = 0.16 - 0.06
gap_spec['start_pos'] = (
start_idx + 1 - from_idx,
start_close,
)
gap_spec['end_pos'] = (
end_idx + from_idx,
end_open,
)
gap_spec['start_time'] = start_time
gap_spec['end_time'] = end_time
gap_spec['color'] = rect_spec.get(
'color',
'dad_blue',
)
# get arrow spec for this gap
if i < len(arrow_specs):
arrow_spec: dict = arrow_specs[i].copy()
x: float = float(arrow_spec.get('x', 0))
y: float = float(arrow_spec.get('y', 0))
time_val: float|None = arrow_spec.get('time')
# timestamp-based index lookup (only for x, NOT y!)
# y is already set to the PREVIOUS bar's close
if time_val is not None:
arrow_row = time_to_row.get(time_val)
if arrow_row is not None:
x = arrow_row['index']
# NOTE: do NOT update y! it's the
# previous bar's close, not current
else:
log.warning(
f'Arrow timestamp {time_val} not '
f'found for gap[{i}], using x={x}'
)
gap_spec['arrow_x'] = x
gap_spec['arrow_y'] = y
gap_spec['time'] = time_val
gap_spec['pointing'] = arrow_spec.get(
'pointing',
'down',
)
gap_spec['alpha'] = arrow_spec.get('alpha', 169)
gap_specs.append(gap_spec)
profiler(f'built {len(gap_specs)} gap_specs')
# create single GapAnnotations item for all gaps
if gap_specs:
gaps_item = GapAnnotations(
gap_specs=gap_specs,
array=arr,
color=gap_specs[0].get('color', 'dad_blue'),
alpha=gap_specs[0].get('alpha', 169),
arrow_size=10.0,
fqme=fqme,
timeframe=timeframe,
)
chart.plotItem.addItem(gaps_item)
# register single item for repositioning
aid: int = id(gaps_item)
annots[aid] = gaps_item
aids_set.add(aid)
result['rects'].append(aid)
profiler(
f'created GapAnnotations item for {len(gap_specs)} '
f'gaps'
)
# A/B comparison: optionally create individual arrows
# alongside batch for visual comparison
if show_individual_arrows:
godw = chart.linked.godwidget
arrows: ArrowEditor = ArrowEditor(godw=godw)
for i, spec in enumerate(gap_specs):
if 'arrow_x' not in spec:
continue
aid_str: str = str(uuid4())
arrow: pg.ArrowItem = arrows.add(
plot=chart.plotItem,
uid=aid_str,
x=spec['arrow_x'],
y=spec['arrow_y'],
pointing=spec['pointing'],
color='bracket', # different color
alpha=spec.get('alpha', 169),
headLen=10.0,
headWidth=2.222,
pxMode=True,
)
arrow._abs_x = spec['arrow_x']
arrow._abs_y = spec['arrow_y']
annots[aid_str] = arrow
_editors[aid_str] = arrows
aids_set.add(aid_str)
result['arrows'].append(aid_str)
profiler(
f'created {len(gap_specs)} individual arrows '
f'for comparison'
)
# handle text items separately (less common, keep
# individual items)
n_texts: int = 0
for text_spec in text_specs:
kwargs: dict = text_spec.copy()
text: str = kwargs.pop('text')
x: float = float(kwargs.pop('x'))
y: float = float(kwargs.pop('y'))
time_val: float|None = kwargs.pop('time', None)
# timestamp-based index lookup
if time_val is not None:
matches = arr[arr['time'] == time_val]
if len(matches) > 0:
x = float(matches[0]['index'])
y = float(matches[0]['close'])
color = kwargs.pop('color', 'dad_blue')
anchor = kwargs.pop('anchor', (0, 1))
font_size = kwargs.pop('font_size', None)
text_item: pg.TextItem = pg.TextItem(
text,
color=hcolor(color),
anchor=anchor,
)
if font_size is None:
from ._style import get_fonts
font, font_small = get_fonts()
font_size = font_small.px_size - 1
qfont: QFont = text_item.textItem.font()
qfont.setPixelSize(font_size)
text_item.setFont(qfont)
text_item.setPos(float(x), float(y))
chart.plotItem.addItem(text_item)
text_item._abs_x = float(x)
text_item._abs_y = float(y)
aid: str = str(uuid4())
annots[aid] = text_item
aids_set.add(aid)
result['texts'].append(aid)
n_texts += 1
profiler(
f'created text annotations: {n_texts} texts'
)
profiler.finish()
await annot_req_stream.send(result)
case {
'cmd': 'remove',
'aid': int(aid)|str(aid),
@ -828,26 +471,10 @@ async def serve_rc_annots(
# XXX: reposition all annotations to ensure they
# stay aligned with viz data after reset (eg during
# backfill when abs-index range changes)
chart: ChartPlotWidget = {
60: ds.hist_chart,
1: ds.chart,
}[timeframe]
viz: Viz = chart.get_viz(fqme)
arr = viz.shm.array
n_repositioned: int = 0
for aid, annot in annots.items():
# GapAnnotations batch items have .reposition()
if hasattr(annot, 'reposition'):
annot.reposition(
array=arr,
fqme=fqme,
timeframe=timeframe,
)
n_repositioned += 1
# arrows and text items use abs x,y coords
elif (
if (
hasattr(annot, '_abs_x')
and
hasattr(annot, '_abs_y')
@ -912,21 +539,12 @@ async def remote_annotate(
finally:
# ensure all annots for this connection are deleted
# on any final teardown
profiler = Profiler(
msg=f'Annotation teardown for ctx {ctx.cid}',
disabled=False,
ms_threshold=0.0,
)
(_ctx, aids) = _ctxs[ctx.cid]
assert _ctx is ctx
profiler(f'got {len(aids)} aids to remove')
for aid in aids:
annot: QGraphicsItem = _annots[aid]
assert rm_annot(annot)
profiler(f'removed all {len(aids)} annotations')
class AnnotCtl(Struct):
'''
@ -1128,64 +746,6 @@ class AnnotCtl(Struct):
)
return aid
async def add_batch(
self,
fqme: str,
timeframe: float,
rects: list[dict]|None = None,
arrows: list[dict]|None = None,
texts: list[dict]|None = None,
show_individual_arrows: bool = False,
from_acm: bool = False,
) -> dict[str, list[int]]:
'''
Batch submit multiple annotations in single IPC msg for
much faster remote annotation vs. per-annot round-trips.
Returns dict of annotation IDs:
{
'rects': [aid1, aid2, ...],
'arrows': [aid3, aid4, ...],
'texts': [aid5, aid6, ...],
}
'''
ipc: MsgStream = self._get_ipc(fqme)
with trio.fail_after(10):
await ipc.send({
'fqme': fqme,
'cmd': 'batch',
'timeframe': timeframe,
'rects': rects or [],
'arrows': arrows or [],
'texts': texts or [],
'show_individual_arrows': show_individual_arrows,
})
result: dict = await ipc.receive()
match result:
case {'error': str(msg)}:
log.error(msg)
return {
'rects': [],
'arrows': [],
'texts': [],
}
# register all AIDs with their IPC streams
for aid_list in result.values():
for aid in aid_list:
self._ipcs[aid] = ipc
if not from_acm:
self._annot_stack.push_async_callback(
partial(
self.remove,
aid,
)
)
return result
async def add_text(
self,
fqme: str,
@ -1321,14 +881,3 @@ async def open_annot_ctl(
_annot_stack=annots_stack,
)
yield client
# client exited, measure teardown time
teardown_profiler = Profiler(
msg='Client AnnotCtl teardown',
disabled=False,
ms_threshold=0.0,
)
teardown_profiler('exiting annots_stack')
teardown_profiler('annots_stack exited')
teardown_profiler('exiting gather_contexts')

View File

@ -34,7 +34,6 @@ import uuid
from bidict import bidict
import tractor
from tractor.devx.pformat import ppfmt
import trio
from piker import config
@ -1208,10 +1207,11 @@ async def process_trade_msg(
f'\n'
f'=> CANCELLING ORDER DIALOG <=\n'
# from tractor.devx.pformat import ppfmt
# !TODO LOL, wtf the msg is causing
# a recursion bug!
# -[ ] get this shit on msgspec stat!
f'{ppfmt(broker_msg)}'
# f'{ppfmt(broker_msg)}'
)
# do all the things for a cancel:
# - drop order-msg dialog from client table

1263
poetry.lock generated 100644

File diff suppressed because it is too large Load Diff

View File

@ -52,6 +52,7 @@ dependencies = [
"bidict >=0.23.1",
"colorama >=0.4.6, <0.5.0",
"colorlog >=6.7.0, <7.0.0",
"ib-insync >=0.9.86, <0.10.0",
"numpy>=2.0",
"polars >=0.20.6",
"polars-fuzzy-match>=0.1.5",
@ -75,8 +76,6 @@ dependencies = [
"numba>=0.61.0",
"pyvnc",
"exchange-calendars>=4.13.1",
"ib-async>=2.1.0",
"aeventkit>=2.1.0", # XXX, imports as eventkit?
]
# ------ dependencies ------
# NOTE, by default we ship only a "headless" deps set bc
@ -106,7 +105,7 @@ default-groups = [
[dependency-groups]
uis = [
"pyqtgraph >= 0.14.0",
"pyqtgraph",
"qdarkstyle >=3.0.2, <4.0.0",
"pyqt6 >=6.7.0, <7.0.0",
@ -193,12 +192,9 @@ include = ["piker"]
[tool.uv.sources]
pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" }
tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" }
pyvnc = { git = "https://github.com/regulad/pyvnc.git" }
# pyqtgraph = { git = "https://github.com/pyqtgraph/pyqtgraph.git", branch = 'master' }
# pyqtgraph = { path = '../pyqtgraph', editable = true }
# ?TODO, resync our fork?
# pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" }
# to get fancy next-cmd/suggestion feats prior to 0.22.2 B)
# https://github.com/xonsh/xonsh/pull/6037

53
uv.lock
View File

@ -10,18 +10,6 @@ resolution-markers = [
"python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'",
]
[[package]]
name = "aeventkit"
version = "2.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5c/8c/c08db1a1910f8d04ec6a524de522edd0bac181bdf94dbb01183f7685cd77/aeventkit-2.1.0.tar.gz", hash = "sha256:4e7d81bb0a67227121da50a23e19e5bbf13eded541a9f4857eeb6b7b857b738a", size = 24703, upload-time = "2025-06-22T15:54:03.961Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8d/8c/2a4b912b1afa201b25bdd0f5bccf96d5a8b5dccb6131316a8dd2d9cabcc1/aeventkit-2.1.0-py3-none-any.whl", hash = "sha256:962d43f79e731ac43527f2d0defeed118e6dbaa85f1487f5667540ebb8f00729", size = 26678, upload-time = "2025-06-22T15:54:02.141Z" },
]
[[package]]
name = "aiodns"
version = "3.6.0"
@ -408,6 +396,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/56/01/6f77d042b83260ef9ed73ea9647dfa0ef8414eba0a3fc57a509a088ad39b/elasticsearch-8.19.2-py3-none-any.whl", hash = "sha256:c16ba20c4c76cf6952e836dae7f4e724e00ba7bf31b94b79472b873683accdd4", size = 949706, upload-time = "2025-10-28T16:36:41.003Z" },
]
[[package]]
name = "eventkit"
version = "1.0.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/16/1e/0fac4e45d71ace143a2673ec642701c3cd16f833a0e77a57fa6a40472696/eventkit-1.0.3.tar.gz", hash = "sha256:99497f6f3c638a50ff7616f2f8cd887b18bbff3765dc1bd8681554db1467c933", size = 28320, upload-time = "2023-12-11T11:41:35.339Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/93/d9/7497d650b69b420e1a913329a843e16c715dac883750679240ef00a921e2/eventkit-1.0.3-py3-none-any.whl", hash = "sha256:0e199527a89aff9d195b9671ad45d2cc9f79ecda0900de8ecfb4c864d67ad6a2", size = 31837, upload-time = "2023-12-11T11:41:33.358Z" },
]
[[package]]
name = "exceptiongroup"
version = "1.3.1"
@ -630,17 +630,16 @@ wheels = [
]
[[package]]
name = "ib-async"
version = "2.1.0"
name = "ib-insync"
version = "0.9.86"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aeventkit" },
{ name = "eventkit" },
{ name = "nest-asyncio" },
{ name = "tzdata" },
]
sdist = { url = "https://files.pythonhosted.org/packages/30/4d/dfc1da8224c3ffcdcd668da7283c4e5f14239a07f83ea66af99700296fc3/ib_async-2.1.0.tar.gz", hash = "sha256:6a03a87d6c06acacb0217a5bea60a8a168ecd5b5a7e86e1c73678d5b48cbc796", size = 87678, upload-time = "2025-12-08T01:42:32.004Z" }
sdist = { url = "https://files.pythonhosted.org/packages/55/bb/733d5c81c8c2f54e90898afc7ff3a99f4d53619e6917c848833f9cc1ab56/ib_insync-0.9.86.tar.gz", hash = "sha256:73af602ca2463f260999970c5bd937b1c4325e383686eff301743a4de08d381e", size = 69859, upload-time = "2023-07-02T12:43:31.968Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/80/e7/8f33801788c66f15e9250957ff7f53a8000843f79af1a3ed7a96def0e96b/ib_async-2.1.0-py3-none-any.whl", hash = "sha256:f6d8b991bdbd6dd38e700c61b3dced06ebe0f14be4e5263e2ef10ba10b88d434", size = 88876, upload-time = "2025-12-08T01:42:30.883Z" },
{ url = "https://files.pythonhosted.org/packages/8f/f3/28ea87be30570f4d6b8fd24380d12fa74e59467ee003755e76aeb29082b8/ib_insync-0.9.86-py3-none-any.whl", hash = "sha256:a61fbe56ff405d93d211dad8238d7300de76dd6399eafc04c320470edec9a4a4", size = 72980, upload-time = "2023-07-02T12:43:29.928Z" },
]
[[package]]
@ -1100,7 +1099,6 @@ name = "piker"
version = "0.1.0a0.dev0"
source = { editable = "." }
dependencies = [
{ name = "aeventkit" },
{ name = "async-generator" },
{ name = "attrs" },
{ name = "bidict" },
@ -1109,7 +1107,7 @@ dependencies = [
{ name = "cryptofeed" },
{ name = "exchange-calendars" },
{ name = "httpx" },
{ name = "ib-async" },
{ name = "ib-insync" },
{ name = "msgspec" },
{ name = "numba" },
{ name = "numpy" },
@ -1177,7 +1175,6 @@ uis = [
[package.metadata]
requires-dist = [
{ name = "aeventkit", specifier = ">=2.1.0" },
{ name = "async-generator", specifier = ">=1.10,<2.0.0" },
{ name = "attrs", specifier = ">=23.1.0,<24.0.0" },
{ name = "bidict", specifier = ">=0.23.1" },
@ -1186,7 +1183,7 @@ requires-dist = [
{ name = "cryptofeed", specifier = ">=2.4.0,<3.0.0" },
{ name = "exchange-calendars", specifier = ">=4.13.1" },
{ name = "httpx", specifier = ">=0.27.0,<0.28.0" },
{ name = "ib-async", specifier = ">=2.1.0" },
{ name = "ib-insync", specifier = ">=0.9.86,<0.10.0" },
{ name = "msgspec", specifier = ">=0.19.0,<0.20" },
{ name = "numba", specifier = ">=0.61.0" },
{ name = "numpy", specifier = ">=2.0" },
@ -1221,7 +1218,7 @@ dev = [
{ name = "prompt-toolkit", specifier = "==3.0.40" },
{ name = "pyperclip", specifier = ">=1.9.0" },
{ name = "pyqt6", specifier = ">=6.7.0,<7.0.0" },
{ name = "pyqtgraph", specifier = ">=0.14.0" },
{ name = "pyqtgraph", git = "https://github.com/pikers/pyqtgraph.git" },
{ name = "pytest" },
{ name = "qdarkstyle", specifier = ">=3.0.2,<4.0.0" },
{ name = "rapidfuzz", specifier = ">=3.2.0,<4.0.0" },
@ -1239,7 +1236,7 @@ repl = [
testing = [{ name = "pytest" }]
uis = [
{ name = "pyqt6", specifier = ">=6.7.0,<7.0.0" },
{ name = "pyqtgraph", specifier = ">=0.14.0" },
{ name = "pyqtgraph", git = "https://github.com/pikers/pyqtgraph.git" },
{ name = "qdarkstyle", specifier = ">=3.0.2,<4.0.0" },
{ name = "rapidfuzz", specifier = ">=3.2.0,<4.0.0" },
]
@ -1606,15 +1603,11 @@ wheels = [
[[package]]
name = "pyqtgraph"
version = "0.14.0"
source = { registry = "https://pypi.org/simple" }
version = "0.12.3"
source = { git = "https://github.com/pikers/pyqtgraph.git#373f9561ea8ec4fef9b4e8bdcdd4bbf372dd6512" }
dependencies = [
{ name = "colorama" },
{ name = "numpy" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/32/36/4c242f81fdcbfa4fb62a5645f6af79191f4097a0577bd5460c24f19cc4ef/pyqtgraph-0.14.0-py3-none-any.whl", hash = "sha256:7abb7c3e17362add64f8711b474dffac5e7b0e9245abdf992e9a44119b7aa4f5", size = 1924755, upload-time = "2025-11-16T19:43:22.251Z" },
]
[[package]]
name = "pyreadline3"