diff --git a/piker/brokers/ib/README.rst b/piker/brokers/ib/README.rst
index d56b52ca..301ab208 100644
--- a/piker/brokers/ib/README.rst
+++ b/piker/brokers/ib/README.rst
@@ -2,7 +2,7 @@
--------------
more or less the "everything broker" for traditional and international
markets. they are the "go to" provider for automatic retail trading
-and we interface to their APIs using the `ib_insync` project.
+and we interface to their APIs using the `ib_async` project.
status
******
diff --git a/piker/brokers/ib/__init__.py b/piker/brokers/ib/__init__.py
index e792eb25..e07ad482 100644
--- a/piker/brokers/ib/__init__.py
+++ b/piker/brokers/ib/__init__.py
@@ -22,7 +22,7 @@ Sub-modules within break into the core functionalities:
- ``broker.py`` part for orders / trading endpoints
- ``feed.py`` for real-time data feed endpoints
- ``api.py`` for the core API machinery which is ``trio``-ized
- wrapping around ``ib_insync``.
+ wrapping around `ib_async`.
"""
from .api import (
diff --git a/piker/brokers/ib/_flex_reports.py b/piker/brokers/ib/_flex_reports.py
index e8c22ecb..4b8110cc 100644
--- a/piker/brokers/ib/_flex_reports.py
+++ b/piker/brokers/ib/_flex_reports.py
@@ -111,7 +111,7 @@ def load_flex_trades(
) -> dict[str, Any]:
- from ib_insync import flexreport, util
+ from ib_async import flexreport, util
conf = get_config()
@@ -154,8 +154,7 @@ def load_flex_trades(
trade_entries,
)
- ledger_dict: dict | None = None
-
+ ledger_dict: dict|None
for acctid in trades_by_account:
trades_by_id = trades_by_account[acctid]
diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py
index adb1bb89..b467ee0e 100644
--- a/piker/brokers/ib/api.py
+++ b/piker/brokers/ib/api.py
@@ -15,7 +15,8 @@
# along with this program. If not, see .
'''
-Core API client machinery; mostly sane/useful wrapping around `ib_insync`..
+Core API client machinery; mostly sane/useful wrapping around
+`ib_async`..
'''
from __future__ import annotations
@@ -57,7 +58,7 @@ from pendulum import (
Interval,
)
from eventkit import Event
-from ib_insync import (
+from ib_async import (
client as ib_client,
IB,
Contract,
@@ -143,7 +144,7 @@ _bar_sizes = {
_show_wap_in_history: bool = False
# overrides to sidestep pretty questionable design decisions in
-# ``ib_insync``:
+# ``ib_async``:
class NonShittyWrapper(Wrapper):
def tcpDataArrived(self):
"""Override time stamps to be floats for now.
@@ -183,7 +184,7 @@ class NonShittyIB(IB):
'''
def __init__(self):
- # override `ib_insync` internal loggers so we can see wtf
+ # override `ib_async` internal loggers so we can see wtf
# it's doing..
self._logger = get_logger(
name=__name__,
@@ -194,7 +195,7 @@ class NonShittyIB(IB):
self.wrapper = NonShittyWrapper(self)
self.client = ib_client.Client(self.wrapper)
self.client._logger = get_logger(
- name='ib_insync.client',
+ name='ib_async.client',
)
# self.errorEvent += self._onError
@@ -767,25 +768,48 @@ class Client:
expiry: str = '',
front: bool = False,
- ) -> Contract:
+ ) -> Contract|list[Contract]:
'''
Get an unqualifed contract for the current "continous"
future.
+ When input params result in a so called "ambiguous contract"
+ situation, we return the list of all matches provided by,
+
+ `IB.qualifyContractsAsync(..., returnAll=True)`
+
'''
# it's the "front" contract returned here
if front:
- con = (await self.ib.qualifyContractsAsync(
- ContFuture(symbol, exchange=exchange)
- ))[0]
- else:
- con = (await self.ib.qualifyContractsAsync(
- Future(
- symbol,
- exchange=exchange,
- lastTradeDateOrContractMonth=expiry,
+ cons = (
+ await self.ib.qualifyContractsAsync(
+ ContFuture(symbol, exchange=exchange),
+ returnAll=True,
)
- ))[0]
+ )
+ else:
+ cons = (
+ await self.ib.qualifyContractsAsync(
+ Future(
+ symbol,
+ exchange=exchange,
+ lastTradeDateOrContractMonth=expiry,
+ ),
+ returnAll=True,
+ )
+ )
+
+ con = cons[0]
+ if isinstance(con, list):
+ log.warning(
+ f'{len(con)!r} futes cons matched for input params,\n'
+ f'symbol={symbol!r}\n'
+ f'exchange={exchange!r}\n'
+ f'expiry={expiry!r}\n'
+ f'\n'
+ f'cons:\n'
+ f'{con!r}\n'
+ )
return con
@@ -878,7 +902,7 @@ class Client:
currency='USD',
exchange='PAXOS',
)
- # XXX, on `ib_insync` when first tried this,
+ # XXX, on `ib_async` when first tried this,
# > Error 10299, reqId 141: Expected what to show is
# > AGGTRADES, please use that instead of TRADES.,
# > contract: Crypto(conId=479624278, symbol='BTC',
@@ -910,11 +934,17 @@ class Client:
)
exch = 'SMART' if not exch else exch
- contracts: list[Contract] = [con]
+ if isinstance(con, list):
+ contracts: list[Contract] = con
+ else:
+ contracts: list[Contract] = [con]
+
if qualify:
try:
contracts: list[Contract] = (
- await self.ib.qualifyContractsAsync(con)
+ await self.ib.qualifyContractsAsync(
+ *contracts
+ )
)
except RequestError as err:
msg = err.message
@@ -992,7 +1022,6 @@ class Client:
async def get_sym_details(
self,
fqme: str,
-
) -> tuple[
Contract,
ContractDetails,
@@ -1092,7 +1121,7 @@ class Client:
size: int,
account: str, # if blank the "default" tws account is used
- # XXX: by default 0 tells ``ib_insync`` methods that there is no
+ # XXX: by default 0 tells ``ib_async`` methods that there is no
# existing order so ask the client to create a new one (which it
# seems to do by allocating an int counter - collision prone..)
reqid: int = None,
@@ -1281,7 +1310,7 @@ async def load_aio_clients(
port: int = None,
client_id: int = 6116,
- # the API TCP in `ib_insync` connection can be flaky af so instead
+ # the API TCP in `ib_async` connection can be flaky af so instead
# retry a few times to get the client going..
connect_retries: int = 3,
connect_timeout: float = 30, # in case a remote-host
@@ -1289,7 +1318,7 @@ async def load_aio_clients(
) -> dict[str, Client]:
'''
- Return an ``ib_insync.IB`` instance wrapped in our client API.
+ Return an ``ib_async.IB`` instance wrapped in our client API.
Client instances are cached for later use.
@@ -1631,6 +1660,7 @@ async def open_aio_client_method_relay(
) -> None:
+ # with tractor.devx.maybe_open_crash_handler() as _bxerr:
# sync with `open_client_proxy()` caller
chan.started_nowait(client)
@@ -1640,7 +1670,11 @@ async def open_aio_client_method_relay(
# relay all method requests to ``asyncio``-side client and deliver
# back results
while not chan._to_trio._closed: # <- TODO, better check like `._web_bs`?
- msg: tuple[str, dict]|dict|None = await chan.get()
+ msg: (
+ None
+ |tuple[str, dict]
+ |dict
+ ) = await chan.get()
match msg:
case None: # termination sentinel
log.info('asyncio `Client` method-proxy SHUTDOWN!')
@@ -1742,7 +1776,7 @@ async def get_client(
) -> Client:
'''
- Init the ``ib_insync`` client in another actor and return
+ Init the ``ib_async`` client in another actor and return
a method proxy to it.
'''
diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py
index e8514958..0705fc21 100644
--- a/piker/brokers/ib/broker.py
+++ b/piker/brokers/ib/broker.py
@@ -35,14 +35,14 @@ from trio_typing import TaskStatus
import tractor
from tractor.to_asyncio import LinkedTaskChannel
from tractor import trionics
-from ib_insync.contract import (
+from ib_async.contract import (
Contract,
)
-from ib_insync.order import (
+from ib_async.order import (
Trade,
OrderStatus,
)
-from ib_insync.objects import (
+from ib_async.objects import (
Fill,
Execution,
CommissionReport,
@@ -181,7 +181,7 @@ async def handle_order_requests(
# validate
order = BrokerdOrder(**request_msg)
- # XXX: by default 0 tells ``ib_insync`` methods that
+ # XXX: by default 0 tells ``ib_async`` methods that
# there is no existing order so ask the client to create
# a new one (which it seems to do by allocating an int
# counter - collision prone..)
@@ -237,7 +237,7 @@ async def recv_trade_updates(
) -> None:
'''
Receive and relay order control and positioning related events
- from `ib_insync`, pack as tuples and push over mem-chan to our
+ from `ib_async`, pack as tuples and push over mem-chan to our
trio relay task for processing and relay to EMS.
'''
@@ -303,7 +303,7 @@ async def recv_trade_updates(
# much more then a few more pnl fields..
# 'updatePortfolioEvent',
- # XXX: these all seem to be weird ib_insync internal
+ # XXX: these all seem to be weird ib_async internal
# events that we probably don't care that much about
# given the internal design is wonky af..
# 'newOrderEvent',
@@ -499,7 +499,7 @@ async def open_trade_event_stream(
] = trio.TASK_STATUS_IGNORED,
):
'''
- Proxy wrapper for starting trade event stream from ib_insync
+ Proxy wrapper for starting trade event stream from ib_async
which spawns an asyncio task that registers an internal closure
(`push_tradies()`) which in turn relays trading events through
a `tractor.to_asyncio.LinkedTaskChannel` which the parent
@@ -991,6 +991,9 @@ _statuses: dict[str, str] = {
# TODO: see a current ``ib_insync`` issue around this:
# https://github.com/erdewit/ib_insync/issues/363
'Inactive': 'pending',
+
+ # XXX, uhh wut the heck is this?
+ 'ValidationError': 'error',
}
_action_map = {
@@ -1063,8 +1066,19 @@ async def deliver_trade_events(
# TODO: for some reason we can receive a ``None`` here when the
# ib-gw goes down? Not sure exactly how that's happening looking
# at the eventkit code above but we should probably handle it...
+ event_name: str
+ item: (
+ Trade
+ |tuple[Trade, Fill]
+ |CommissionReport
+ |IbPosition
+ |dict
+ )
async for event_name, item in trade_event_stream:
- log.info(f'Relaying `{event_name}`:\n{pformat(item)}')
+ log.info(
+ f'Relaying {event_name!r}:\n'
+ f'{pformat(item)}\n'
+ )
match event_name:
case 'orderStatusEvent':
@@ -1075,11 +1089,12 @@ async def deliver_trade_events(
trade: Trade = item
reqid: str = str(trade.order.orderId)
status: OrderStatus = trade.orderStatus
- status_str: str = _statuses[status.status]
+ status_str: str = _statuses.get(
+ status.status,
+ 'error',
+ )
remaining: float = status.remaining
- if (
- status_str == 'filled'
- ):
+ if status_str == 'filled':
fill: Fill = trade.fills[-1]
execu: Execution = fill.execution
@@ -1110,6 +1125,12 @@ async def deliver_trade_events(
# all units were cleared.
status_str = 'closed'
+ elif status_str == 'error':
+ log.error(
+ f'IB reported error status for order ??\n'
+ f'{status.status!r}\n'
+ )
+
# skip duplicate filled updates - we get the deats
# from the execution details event
msg = BrokerdStatus(
@@ -1270,13 +1291,23 @@ async def deliver_trade_events(
case 'error':
# NOTE: see impl deats in
# `Client.inline_errors()::push_err()`
- err: dict = item
+ err: dict|str = item
- # never relay errors for non-broker related issues
+ # std case, never relay errors for non-order-control
+ # related issues.
# https://interactivebrokers.github.io/tws-api/message_codes.html
- code: int = err['error_code']
- reason: str = err['reason']
- reqid: str = str(err['reqid'])
+ if isinstance(err, dict):
+ code: int = err['error_code']
+ reason: str = err['reason']
+ reqid: str = str(err['reqid'])
+
+ # XXX, sometimes you'll get just a `str` of the form,
+ # '[code 104] connection failed' or something..
+ elif isinstance(err, str):
+ code_part, _, reason = err.rpartition(']')
+ if code_part:
+ _, _, code = code_part.partition('[code')
+ reqid: str = ''
# "Warning:" msg codes,
# https://interactivebrokers.github.io/tws-api/message_codes.html#warning_codes
diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py
index 0576b9e5..fc273c07 100644
--- a/piker/brokers/ib/feed.py
+++ b/piker/brokers/ib/feed.py
@@ -36,7 +36,7 @@ from typing import (
)
from async_generator import aclosing
-import ib_insync as ibis
+import ib_async as ibis
import numpy as np
from pendulum import (
now,
@@ -100,7 +100,7 @@ tick_types = {
5: 'size',
8: 'volume',
- # ``ib_insync`` already packs these into
+ # `ib_async` already packs these into
# quotes under the following fields.
55: 'trades_per_min', # `'tradeRate'`
56: 'vlm_per_min', # `'volumeRate'`
@@ -201,6 +201,15 @@ async def open_history_client(
fqme,
timeframe,
end_dt=end_dt,
+
+ # XXX WARNING, we don't actually use this inside
+ # `Client.bars()` since it isn't really supported,
+ # the API instead supports a "duration" of time style
+ # from the `end_dt` (or at least that was the best
+ # way to get it working sanely)..
+ #
+ # SO, with that in mind be aware that any downstream
+ # logic based on this may be mostly futile Xp
start_dt=start_dt,
)
latency = time.time() - query_start
@@ -278,19 +287,27 @@ async def open_history_client(
trimmed_bars = bars_array[
bars_array['time'] >= start_dt.timestamp()
]
- if (
- trimmed_first_dt := from_timestamp(trimmed_bars['time'][0])
- !=
- start_dt
- ):
- # TODO! rm this once we're more confident it never hits!
- # breakpoint()
- raise RuntimeError(
- f'OHLC-bars array start is gt `start_dt` limit !!\n'
- f'start_dt: {start_dt}\n'
- f'first_dt: {first_dt}\n'
- f'trimmed_first_dt: {trimmed_first_dt}\n'
- )
+ # XXX, should NEVER get HERE!
+ if trimmed_bars.size:
+ trimmed_first_dt: datetime = from_timestamp(trimmed_bars['time'][0])
+ if (
+ trimmed_first_dt
+ >=
+ start_dt
+ ):
+ msg: str = (
+ f'OHLC-bars array start is gt `start_dt` limit !!\n'
+ f'start_dt: {start_dt}\n'
+ f'first_dt: {first_dt}\n'
+ f'trimmed_first_dt: {trimmed_first_dt}\n'
+ f'\n'
+ f'Delivering shorted frame of {trimmed_bars.size!r}\n'
+ )
+ log.warning(msg)
+ # TODO! rm this once we're more confident it
+ # never breaks anything (in the caller)!
+ # breakpoint()
+ # raise RuntimeError(msg)
# XXX, overwrite with start_dt-limited frame
bars_array = trimmed_bars
@@ -304,7 +321,7 @@ async def open_history_client(
# TODO: it seems like we can do async queries for ohlc
# but getting the order right still isn't working and I'm not
# quite sure why.. needs some tinkering and probably
- # a lookthrough of the `ib_insync` machinery, for eg. maybe
+ # a lookthrough of the `ib_async` machinery, for eg. maybe
# we have to do the batch queries on the `asyncio` side?
yield (
get_hist,
@@ -1051,6 +1068,21 @@ def normalize(
# ticker.rtTime.timestamp) / 1000.
data.pop('rtTime')
+ # XXX, `ib_async` seems to set a
+ # `'timezone': datetime.timezone.utc` in this `dict`
+ # which is NOT IPC serializeable sin codec!
+ #
+ # pretty sure we don't need any of this field for now anyway?
+ data.pop('defaults')
+
+ if lts := data.get('lastTimeStamp'):
+ lts.replace(tzinfo=None)
+ log.warning(
+ f'Stripping `.tzinfo` from datetime\n'
+ f'{lts}\n'
+ )
+ # breakpoint()
+
return data
@@ -1227,7 +1259,7 @@ async def stream_quotes(
):
# ?TODO? can we rm this - particularly for `ib_async`?
# ugh, clear ticks since we've consumed them
- # (ahem, ib_insync is stateful trash)
+ # (ahem, ib_async is stateful trash)
# first_ticker.ticks = []
# only on first entry at feed boot up
diff --git a/piker/brokers/ib/ledger.py b/piker/brokers/ib/ledger.py
index dc23748d..d8510f8b 100644
--- a/piker/brokers/ib/ledger.py
+++ b/piker/brokers/ib/ledger.py
@@ -36,7 +36,7 @@ from pendulum import (
parse,
from_timestamp,
)
-from ib_insync import (
+from ib_async import (
Contract,
Commodity,
Fill,
diff --git a/piker/brokers/ib/symbols.py b/piker/brokers/ib/symbols.py
index a9ca0594..d74f494d 100644
--- a/piker/brokers/ib/symbols.py
+++ b/piker/brokers/ib/symbols.py
@@ -23,6 +23,7 @@ from contextlib import (
nullcontext,
)
from decimal import Decimal
+from functools import partial
import time
from typing import (
Awaitable,
@@ -30,8 +31,9 @@ from typing import (
)
from rapidfuzz import process as fuzzy
-import ib_insync as ibis
+import ib_async as ibis
import tractor
+from tractor.devx.pformat import ppfmt
import trio
from piker.accounting import (
@@ -215,18 +217,19 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
f'{ib_client}\n'
)
- last = time.time()
+ last: float = time.time()
async for pattern in stream:
log.info(f'received {pattern}')
now: float = time.time()
+ # TODO? check this is no longer true?
# this causes tractor hang...
# assert 0
assert pattern, 'IB can not accept blank search pattern'
# throttle search requests to no faster then 1Hz
- diff = now - last
+ diff: float = now - last
if diff < 1.0:
log.debug('throttle sleeping')
await trio.sleep(diff)
@@ -237,11 +240,12 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
if (
not pattern
- or pattern.isspace()
-
+ or
+ pattern.isspace()
+ or
# XXX: not sure if this is a bad assumption but it
# seems to make search snappier?
- or len(pattern) < 1
+ len(pattern) < 1
):
log.warning('empty pattern received, skipping..')
@@ -254,36 +258,58 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
# XXX: this unblocks the far end search task which may
# hold up a multi-search nursery block
await stream.send({})
-
continue
- log.info(f'searching for {pattern}')
+ log.info(
+ f'Searching for FQME with,\n'
+ f'pattern: {pattern!r}\n'
+ )
- last = time.time()
+ last: float = time.time()
- # async batch search using api stocks endpoint and module
- # defined adhoc symbol set.
- stock_results = []
+ # async batch search using api stocks endpoint and
+ # module defined adhoc symbol set.
+ stock_results: list[dict] = []
async def extend_results(
- target: Awaitable[list]
+ # ?TODO, how to type async-fn!?
+ target: Awaitable[list],
+ pattern: str,
+ **kwargs,
) -> None:
try:
- results = await target
+ results = await target(
+ pattern=pattern,
+ **kwargs,
+ )
+ client_repr: str = proxy._aio_ns.ib.client.__class__.__name__
+ meth_repr: str = target.keywords["meth"]
+ log.info(
+ f'Search query,\n'
+ f'{client_repr}.{meth_repr}(\n'
+ f' pattern={pattern!r}\n'
+ f' **kwargs={kwargs!r},\n'
+ f') = {ppfmt(list(results))}'
+ # XXX ^ just the keys since that's what
+ # shows in UI results table.
+ )
except tractor.trionics.Lagged:
- print("IB SYM-SEARCH OVERRUN?!?")
+ log.exception(
+ 'IB SYM-SEARCH OVERRUN?!?\n'
+ )
return
stock_results.extend(results)
for _ in range(10):
with trio.move_on_after(3) as cs:
- async with trio.open_nursery() as sn:
- sn.start_soon(
- extend_results,
- proxy.search_symbols(
+ async with trio.open_nursery() as tn:
+ tn.start_soon(
+ partial(
+ extend_results,
pattern=pattern,
- upto=5,
+ target=proxy.search_symbols,
+ upto=10,
),
)
@@ -313,7 +339,9 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
# adhoc_match_results = {i[0]: {} for i in
# adhoc_matches}
- log.debug(f'fuzzy matching stocks {stock_results}')
+ log.debug(
+ f'fuzzy matching stocks {ppfmt(stock_results)}'
+ )
stock_matches = fuzzy.extract(
pattern,
stock_results,
@@ -327,7 +355,10 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
# TODO: we used to deliver contract details
# {item[2]: item[0] for item in stock_matches}
- log.debug(f"sending matches: {matches.keys()}")
+ log.debug(
+ f'Sending final matches\n'
+ f'{matches.keys()}'
+ )
await stream.send(matches)
@@ -522,7 +553,11 @@ async def get_mkt_info(
if atype == 'commodity':
venue: str = 'cmdty'
else:
- venue = con.primaryExchange or con.exchange
+ venue: str = (
+ con.primaryExchange
+ or
+ con.exchange
+ )
price_tick: Decimal = Decimal(str(details.minTick))
ib_min_tick_gt_2: Decimal = Decimal('0.01')
diff --git a/piker/brokers/ib/venues.py b/piker/brokers/ib/venues.py
index 7f73af77..a24635bd 100644
--- a/piker/brokers/ib/venues.py
+++ b/piker/brokers/ib/venues.py
@@ -41,8 +41,9 @@ from pendulum import (
)
if TYPE_CHECKING:
- from ib_insync import (
+ from ib_async import (
TradingSession,
+ Contract,
ContractDetails,
)
from exchange_calendars.exchange_calendars import (
@@ -82,8 +83,20 @@ def has_holiday(
'''
tz: str = con_deats.timeZoneId
- exch: str = con_deats.contract.primaryExchange
- cal: ExchangeCalendar = xcals.get_calendar(exch)
+ con: Contract = con_deats.contract
+ exch: str = (
+ con.primaryExchange
+ or
+ con.exchange
+ )
+
+ # XXX, ad-hoc handle any IB exchange which are non-std
+ # via lookup table..
+ std_exch: dict = {
+ 'ARCA': 'ARCX',
+ }.get(exch, exch)
+
+ cal: ExchangeCalendar = xcals.get_calendar(std_exch)
end: datetime = period.end
# _start: datetime = period.start
# ?TODO, can rm ya?
@@ -236,7 +249,7 @@ def is_venue_closure(
#
# NOTE, this was generated by @guille from a gpt5 prompt
# and was originally thot to be needed before learning about
-# `ib_insync.contract.ContractDetails._parseSessions()` and
+# `ib_async.contract.ContractDetails._parseSessions()` and
# it's downstream meths..
#
# This is still likely useful to keep for now to parse the
diff --git a/piker/tsp/_history.py b/piker/tsp/_history.py
index 99261342..0510593e 100644
--- a/piker/tsp/_history.py
+++ b/piker/tsp/_history.py
@@ -248,10 +248,20 @@ async def maybe_fill_null_segments(
end_dt=end_dt,
)
+ if array.size == 0:
+ log.warning(
+ f'Valid gap from backend ??\n'
+ f'{end_dt} -> {start_dt}\n'
+ )
+ # ?TODO? do we want to remove the nulls and push
+ # the close price here for the gap duration?
+ await tractor.pause()
+ break
+
if (
- frame_start_dt := (
- from_timestamp(array['time'][0])
- ) < backfill_until_dt
+ frame_start_dt := (from_timestamp(array['time'][0]))
+ <
+ backfill_until_dt
):
log.error(
f'Invalid frame_start !?\n'
@@ -613,10 +623,17 @@ async def start_backfill(
else:
log.warning(
- '0 BARS TO PUSH after diff!?\n'
+ f'0 BARS TO PUSH after diff!?\n'
f'{next_start_dt} -> {last_start_dt}'
+ f'\n'
+ f'This might mean we rxed a gap frame which starts BEFORE,\n'
+ f'backfill_until_dt: {backfill_until_dt}\n'
+ f'end_dt_param: {end_dt_param}\n'
+
)
- await tractor.pause()
+ # XXX, to debug it and be sure.
+ # await tractor.pause()
+ break
# Check if we're about to exceed buffer capacity BEFORE
# attempting the push
diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py
index 0f655749..7ce39b86 100644
--- a/piker/ui/order_mode.py
+++ b/piker/ui/order_mode.py
@@ -34,6 +34,7 @@ import uuid
from bidict import bidict
import tractor
+from tractor.devx.pformat import ppfmt
import trio
from piker import config
@@ -1207,11 +1208,10 @@ async def process_trade_msg(
f'\n'
f'=> CANCELLING ORDER DIALOG <=\n'
- # from tractor.devx.pformat import ppfmt
# !TODO LOL, wtf the msg is causing
# a recursion bug!
# -[ ] get this shit on msgspec stat!
- # f'{ppfmt(broker_msg)}'
+ f'{ppfmt(broker_msg)}'
)
# do all the things for a cancel:
# - drop order-msg dialog from client table
diff --git a/pyproject.toml b/pyproject.toml
index 31de030e..2d63059b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -52,7 +52,6 @@ dependencies = [
"bidict >=0.23.1",
"colorama >=0.4.6, <0.5.0",
"colorlog >=6.7.0, <7.0.0",
- "ib-insync >=0.9.86, <0.10.0",
"numpy>=2.0",
"polars >=0.20.6",
"polars-fuzzy-match>=0.1.5",
@@ -76,6 +75,8 @@ dependencies = [
"numba>=0.61.0",
"pyvnc",
"exchange-calendars>=4.13.1",
+ "ib-async>=2.1.0",
+ "aeventkit>=2.1.0", # XXX, imports as eventkit?
]
# ------ dependencies ------
# NOTE, by default we ship only a "headless" deps set bc
diff --git a/uv.lock b/uv.lock
index e0f08915..103915eb 100644
--- a/uv.lock
+++ b/uv.lock
@@ -10,6 +10,18 @@ resolution-markers = [
"python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'",
]
+[[package]]
+name = "aeventkit"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "numpy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5c/8c/c08db1a1910f8d04ec6a524de522edd0bac181bdf94dbb01183f7685cd77/aeventkit-2.1.0.tar.gz", hash = "sha256:4e7d81bb0a67227121da50a23e19e5bbf13eded541a9f4857eeb6b7b857b738a", size = 24703, upload-time = "2025-06-22T15:54:03.961Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8d/8c/2a4b912b1afa201b25bdd0f5bccf96d5a8b5dccb6131316a8dd2d9cabcc1/aeventkit-2.1.0-py3-none-any.whl", hash = "sha256:962d43f79e731ac43527f2d0defeed118e6dbaa85f1487f5667540ebb8f00729", size = 26678, upload-time = "2025-06-22T15:54:02.141Z" },
+]
+
[[package]]
name = "aiodns"
version = "3.6.0"
@@ -396,18 +408,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/56/01/6f77d042b83260ef9ed73ea9647dfa0ef8414eba0a3fc57a509a088ad39b/elasticsearch-8.19.2-py3-none-any.whl", hash = "sha256:c16ba20c4c76cf6952e836dae7f4e724e00ba7bf31b94b79472b873683accdd4", size = 949706, upload-time = "2025-10-28T16:36:41.003Z" },
]
-[[package]]
-name = "eventkit"
-version = "1.0.3"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "numpy" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/16/1e/0fac4e45d71ace143a2673ec642701c3cd16f833a0e77a57fa6a40472696/eventkit-1.0.3.tar.gz", hash = "sha256:99497f6f3c638a50ff7616f2f8cd887b18bbff3765dc1bd8681554db1467c933", size = 28320, upload-time = "2023-12-11T11:41:35.339Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/93/d9/7497d650b69b420e1a913329a843e16c715dac883750679240ef00a921e2/eventkit-1.0.3-py3-none-any.whl", hash = "sha256:0e199527a89aff9d195b9671ad45d2cc9f79ecda0900de8ecfb4c864d67ad6a2", size = 31837, upload-time = "2023-12-11T11:41:33.358Z" },
-]
-
[[package]]
name = "exceptiongroup"
version = "1.3.1"
@@ -630,16 +630,17 @@ wheels = [
]
[[package]]
-name = "ib-insync"
-version = "0.9.86"
+name = "ib-async"
+version = "2.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "eventkit" },
+ { name = "aeventkit" },
{ name = "nest-asyncio" },
+ { name = "tzdata" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/55/bb/733d5c81c8c2f54e90898afc7ff3a99f4d53619e6917c848833f9cc1ab56/ib_insync-0.9.86.tar.gz", hash = "sha256:73af602ca2463f260999970c5bd937b1c4325e383686eff301743a4de08d381e", size = 69859, upload-time = "2023-07-02T12:43:31.968Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/30/4d/dfc1da8224c3ffcdcd668da7283c4e5f14239a07f83ea66af99700296fc3/ib_async-2.1.0.tar.gz", hash = "sha256:6a03a87d6c06acacb0217a5bea60a8a168ecd5b5a7e86e1c73678d5b48cbc796", size = 87678, upload-time = "2025-12-08T01:42:32.004Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/8f/f3/28ea87be30570f4d6b8fd24380d12fa74e59467ee003755e76aeb29082b8/ib_insync-0.9.86-py3-none-any.whl", hash = "sha256:a61fbe56ff405d93d211dad8238d7300de76dd6399eafc04c320470edec9a4a4", size = 72980, upload-time = "2023-07-02T12:43:29.928Z" },
+ { url = "https://files.pythonhosted.org/packages/80/e7/8f33801788c66f15e9250957ff7f53a8000843f79af1a3ed7a96def0e96b/ib_async-2.1.0-py3-none-any.whl", hash = "sha256:f6d8b991bdbd6dd38e700c61b3dced06ebe0f14be4e5263e2ef10ba10b88d434", size = 88876, upload-time = "2025-12-08T01:42:30.883Z" },
]
[[package]]
@@ -1099,6 +1100,7 @@ name = "piker"
version = "0.1.0a0.dev0"
source = { editable = "." }
dependencies = [
+ { name = "aeventkit" },
{ name = "async-generator" },
{ name = "attrs" },
{ name = "bidict" },
@@ -1107,7 +1109,7 @@ dependencies = [
{ name = "cryptofeed" },
{ name = "exchange-calendars" },
{ name = "httpx" },
- { name = "ib-insync" },
+ { name = "ib-async" },
{ name = "msgspec" },
{ name = "numba" },
{ name = "numpy" },
@@ -1175,6 +1177,7 @@ uis = [
[package.metadata]
requires-dist = [
+ { name = "aeventkit", specifier = ">=2.1.0" },
{ name = "async-generator", specifier = ">=1.10,<2.0.0" },
{ name = "attrs", specifier = ">=23.1.0,<24.0.0" },
{ name = "bidict", specifier = ">=0.23.1" },
@@ -1183,7 +1186,7 @@ requires-dist = [
{ name = "cryptofeed", specifier = ">=2.4.0,<3.0.0" },
{ name = "exchange-calendars", specifier = ">=4.13.1" },
{ name = "httpx", specifier = ">=0.27.0,<0.28.0" },
- { name = "ib-insync", specifier = ">=0.9.86,<0.10.0" },
+ { name = "ib-async", specifier = ">=2.1.0" },
{ name = "msgspec", specifier = ">=0.19.0,<0.20" },
{ name = "numba", specifier = ">=0.61.0" },
{ name = "numpy", specifier = ">=2.0" },