Compare commits
4 Commits
176090b234
...
9ebb977731
| Author | SHA1 | Date |
|---|---|---|
|
|
9ebb977731 | |
|
|
56b69f97b3 | |
|
|
e2ff43f5c3 | |
|
|
0d76323a90 |
|
|
@ -102,12 +102,12 @@ class AggTrade(Struct, frozen=True):
|
||||||
a: int # Aggregate trade ID
|
a: int # Aggregate trade ID
|
||||||
p: float # Price
|
p: float # Price
|
||||||
q: float # Quantity with all the market trades
|
q: float # Quantity with all the market trades
|
||||||
nq: float # Normal quantity without the trades involving RPI orders
|
|
||||||
f: int # First trade ID
|
f: int # First trade ID
|
||||||
l: int # noqa Last trade ID
|
l: int # noqa Last trade ID
|
||||||
T: int # Trade time
|
T: int # Trade time
|
||||||
m: bool # Is the buyer the market maker?
|
m: bool # Is the buyer the market maker?
|
||||||
M: bool|None = None # Ignore
|
M: bool|None = None # Ignore
|
||||||
|
nq: float|None = None # Normal quantity without the trades involving RPI orders
|
||||||
|
|
||||||
|
|
||||||
async def stream_messages(
|
async def stream_messages(
|
||||||
|
|
|
||||||
|
|
@ -1187,7 +1187,7 @@ async def load_aio_clients(
|
||||||
# the API TCP in `ib_insync` connection can be flaky af so instead
|
# the API TCP in `ib_insync` connection can be flaky af so instead
|
||||||
# retry a few times to get the client going..
|
# retry a few times to get the client going..
|
||||||
connect_retries: int = 3,
|
connect_retries: int = 3,
|
||||||
connect_timeout: float = 10,
|
connect_timeout: float = 30, # in case a remote-host
|
||||||
disconnect_on_exit: bool = True,
|
disconnect_on_exit: bool = True,
|
||||||
|
|
||||||
) -> dict[str, Client]:
|
) -> dict[str, Client]:
|
||||||
|
|
|
||||||
|
|
@ -242,6 +242,7 @@ def anal(
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO, move to `.tsp._annotate`
|
||||||
async def markup_gaps(
|
async def markup_gaps(
|
||||||
fqme: str,
|
fqme: str,
|
||||||
timeframe: float,
|
timeframe: float,
|
||||||
|
|
@ -288,18 +289,38 @@ async def markup_gaps(
|
||||||
)
|
)
|
||||||
# XXX: probably a gap in the (newly sorted or de-duplicated)
|
# XXX: probably a gap in the (newly sorted or de-duplicated)
|
||||||
# dt-df, so we might need to re-index first..
|
# dt-df, so we might need to re-index first..
|
||||||
|
dt: pl.Series = row['dt']
|
||||||
|
dt_prev: pl.Series = row['dt_prev']
|
||||||
if prev_r.is_empty():
|
if prev_r.is_empty():
|
||||||
|
|
||||||
|
# XXX, filter out any special ignore cases,
|
||||||
|
# - UNIX-epoch stamped datums
|
||||||
|
# - first row
|
||||||
|
if (
|
||||||
|
dt_prev.dt.epoch()[0] == 0
|
||||||
|
or
|
||||||
|
dt.dt.epoch()[0] == 0
|
||||||
|
):
|
||||||
|
log.warning('Skipping row with UNIX epoch timestamp ??')
|
||||||
|
continue
|
||||||
|
|
||||||
|
if wdts[0]['index'][0] == iend: # first row
|
||||||
|
log.warning('Skipping first-row (has no previous obvi) !!')
|
||||||
|
continue
|
||||||
|
|
||||||
|
# XXX, if the previous-row by shm-index is missing,
|
||||||
|
# meaning there is a missing sample (set), get the prior
|
||||||
|
# row by df index and attempt to use it?
|
||||||
|
i_wdts: pl.DataFrame = wdts.with_row_index(name='i')
|
||||||
|
i_row: int = i_wdts.filter(pl.col('index') == iend)['i'][0]
|
||||||
|
prev_row_by_i = wdts[i_row]
|
||||||
|
prev_r: pl.DataFrame = prev_row_by_i
|
||||||
|
|
||||||
|
# debug any missing pre-row
|
||||||
|
if tractor._state.is_debug_mode():
|
||||||
await tractor.pause()
|
await tractor.pause()
|
||||||
|
|
||||||
istart: int = prev_r['index'][0]
|
istart: int = prev_r['index'][0]
|
||||||
# dt_start_t: float = dt_prev.timestamp()
|
|
||||||
|
|
||||||
# start_t: float = prev_r['time']
|
|
||||||
# assert (
|
|
||||||
# dt_start_t
|
|
||||||
# ==
|
|
||||||
# start_t
|
|
||||||
# )
|
|
||||||
|
|
||||||
# TODO: implement px-col width measure
|
# TODO: implement px-col width measure
|
||||||
# and ensure at least as many px-cols
|
# and ensure at least as many px-cols
|
||||||
|
|
@ -358,6 +379,7 @@ def ldshm(
|
||||||
fqme: str,
|
fqme: str,
|
||||||
write_parquet: bool = True,
|
write_parquet: bool = True,
|
||||||
reload_parquet_to_shm: bool = True,
|
reload_parquet_to_shm: bool = True,
|
||||||
|
pdb: bool = False, # --pdb passed?
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
|
@ -377,7 +399,7 @@ def ldshm(
|
||||||
open_piker_runtime(
|
open_piker_runtime(
|
||||||
'polars_boi',
|
'polars_boi',
|
||||||
enable_modules=['piker.data._sharedmem'],
|
enable_modules=['piker.data._sharedmem'],
|
||||||
debug_mode=True,
|
debug_mode=pdb,
|
||||||
),
|
),
|
||||||
open_storage_client() as (
|
open_storage_client() as (
|
||||||
mod,
|
mod,
|
||||||
|
|
@ -397,6 +419,9 @@ def ldshm(
|
||||||
|
|
||||||
times: np.ndarray = shm.array['time']
|
times: np.ndarray = shm.array['time']
|
||||||
d1: float = float(times[-1] - times[-2])
|
d1: float = float(times[-1] - times[-2])
|
||||||
|
d2: float = 0
|
||||||
|
# XXX, take a median sample rate if sufficient data
|
||||||
|
if times.size > 2:
|
||||||
d2: float = float(times[-2] - times[-3])
|
d2: float = float(times[-2] - times[-3])
|
||||||
med: float = np.median(np.diff(times))
|
med: float = np.median(np.diff(times))
|
||||||
if (
|
if (
|
||||||
|
|
@ -407,7 +432,6 @@ def ldshm(
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f'Something is wrong with time period for {shm}:\n{times}'
|
f'Something is wrong with time period for {shm}:\n{times}'
|
||||||
)
|
)
|
||||||
|
|
||||||
period_s: float = float(max(d1, d2, med))
|
period_s: float = float(max(d1, d2, med))
|
||||||
|
|
||||||
null_segs: tuple = tsp.get_null_segs(
|
null_segs: tuple = tsp.get_null_segs(
|
||||||
|
|
@ -417,6 +441,8 @@ def ldshm(
|
||||||
|
|
||||||
# TODO: call null-seg fixer somehow?
|
# TODO: call null-seg fixer somehow?
|
||||||
if null_segs:
|
if null_segs:
|
||||||
|
|
||||||
|
if tractor._state.is_debug_mode():
|
||||||
await tractor.pause()
|
await tractor.pause()
|
||||||
# async with (
|
# async with (
|
||||||
# trio.open_nursery() as tn,
|
# trio.open_nursery() as tn,
|
||||||
|
|
@ -498,8 +524,11 @@ def ldshm(
|
||||||
if (
|
if (
|
||||||
not venue_gaps.is_empty()
|
not venue_gaps.is_empty()
|
||||||
or (
|
or (
|
||||||
period_s < 60
|
not step_gaps.is_empty()
|
||||||
and not step_gaps.is_empty()
|
# XXX, i presume i put this bc i was guarding
|
||||||
|
# for ib venue gaps?
|
||||||
|
# and
|
||||||
|
# period_s < 60
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
# write repaired ts to parquet-file?
|
# write repaired ts to parquet-file?
|
||||||
|
|
|
||||||
|
|
@ -237,8 +237,8 @@ class LevelLabel(YAxisLabel):
|
||||||
class L1Label(LevelLabel):
|
class L1Label(LevelLabel):
|
||||||
|
|
||||||
text_flags = (
|
text_flags = (
|
||||||
QtCore.Qt.TextDontClip
|
QtCore.Qt.TextFlag.TextDontClip
|
||||||
| QtCore.Qt.AlignLeft
|
| QtCore.Qt.AlignmentFlag.AlignLeft
|
||||||
)
|
)
|
||||||
|
|
||||||
def set_label_str(
|
def set_label_str(
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue