Compare commits

...

10 Commits

34 changed files with 2815 additions and 3394 deletions

View File

@ -7,14 +7,57 @@ jobs:
name: Pytest Tests
runs-on: ubuntu-24.04
timeout-minutes: 10
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install system dependencies (Rust + Python)
run: |
sudo apt-get update
sudo apt-get install -y \
build-essential \
pkg-config \
libssl-dev \
clang \
lld \
protobuf-compiler \
make \
wget
- name: Install binaryen 120
run: |
wget https://github.com/WebAssembly/binaryen/releases/download/version_120/binaryen-version_120-x86_64-linux.tar.gz
tar xvf binaryen-version_120-x86_64-linux.tar.gz
echo "$(pwd)/binaryen-version_120/bin" >> $GITHUB_PATH
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v5
- name: Install Rust
run: |
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
rustup install stable
rustup component add rust-src --toolchain stable
rustup target add wasm32-wasip1
- name: Set up Python environment and dependencies
run: |
uv venv .venv --python=3.12
uv pip install -U rust-contracts-builder
echo "$(pwd)/.venv/bin" >> $GITHUB_PATH
- name: Apply required modifications for rust-contracts-builder
run: |
sed -i "s/wasm32-wasi /wasm32-wasip1 /g" .venv/lib/python3.12/site-packages/rust_contracts_builder/__init__.py
sed -i "s/wasm32-wasi\//wasm32-wasip1\//g" .venv/lib/python3.12/site-packages/rust_contracts_builder/__init__.py
- name: Build Rust project
run: rust-contract build
working-directory: tests/contracts/skygpu-contract
- uses: actions/cache@v3
name: Cache venv
with:

3
.gitmodules vendored 100644
View File

@ -0,0 +1,3 @@
[submodule "tests/contracts/skygpu-contract"]
path = tests/contracts/skygpu-contract
url = https://github.com/skygpu/skygpu-contract.git

View File

@ -77,7 +77,8 @@ explicit = true
torch = { index = "torch" }
triton = { index = "torch" }
torchvision = { index = "torch" }
py-leap = { git = "https://github.com/guilledk/py-leap.git", rev = "v0.1a35" }
py-leap = { git = "https://github.com/guilledk/py-leap.git", branch = "struct_unwrap" }
# py-leap = { path = "../py-leap", editable = true }
pytest-dockerctl = { git = "https://github.com/pikers/pytest-dockerctl.git", branch = "g_update" }
[build-system]

View File

@ -16,13 +16,14 @@ async def open_test_worker(
cleos, ipfs_node,
account: str = 'testworker',
permission: str = 'active',
key: str = '5KRPFxF4RJebqPXqRzwStmCaEWeRfp3pR7XUNoA3zCHt5fnPu3s',
hf_token: str = '',
**kwargs
):
config = override_dgpu_config(
account=account,
permission=permission,
key=cleos.private_keys[account],
key=key,
node_url=cleos.endpoint,
ipfs_url=ipfs_node[1].endpoint,
hf_token=hf_token,

View File

@ -165,7 +165,7 @@ def run(*args, **kwargs):
@run.command()
def db():
from .db import open_new_database
from skynet.frontend.chatbot.db import open_new_database
logging.basicConfig(level=logging.INFO)
with open_new_database(cleanup=False) as db_params:
@ -197,125 +197,52 @@ def dgpu(
@run.command()
@click.option('--loglevel', '-l', default='INFO', help='logging level')
@click.option(
'--db-host', '-h', default='localhost:5432')
@click.option(
'--db-user', '-u', default='skynet')
@click.option(
'--db-pass', '-u', default='password')
def telegram(
loglevel: str,
db_host: str,
db_user: str,
db_pass: str
):
import asyncio
from .frontend.telegram import SkynetTelegramFrontend
from skynet.frontend.chatbot.telegram import TelegramChatbot
from skynet.frontend.chatbot.db import FrontendUserDB
logging.basicConfig(level=loglevel)
config = load_skynet_toml()
tg_token = config.telegram.tg_token
key = config.telegram.key
account = config.telegram.account
permission = config.telegram.permission
node_url = config.telegram.node_url
hyperion_url = config.telegram.hyperion_url
ipfs_url = config.telegram.ipfs_url
try:
explorer_domain = config.telegram.explorer_domain
except ConfigParsingError:
explorer_domain = DEFAULT_EXPLORER_DOMAIN
try:
ipfs_domain = config.telegram.ipfs_domain
except ConfigParsingError:
ipfs_domain = DEFAULT_IPFS_DOMAIN
config = load_skynet_toml().telegram
async def _async_main():
frontend = SkynetTelegramFrontend(
tg_token,
account,
permission,
node_url,
hyperion_url,
db_host, db_user, db_pass,
ipfs_url,
key=key,
explorer_domain=explorer_domain,
ipfs_domain=ipfs_domain
)
async with frontend.open():
await frontend.bot.infinity_polling()
async with FrontendUserDB(
config.db_user,
config.db_pass,
config.db_host,
config.db_name
) as db:
bot = TelegramChatbot(config, db)
await bot.run()
asyncio.run(_async_main())
@run.command()
@click.option('--loglevel', '-l', default='INFO', help='logging level')
@click.option(
'--db-host', '-h', default='localhost:5432')
@click.option(
'--db-user', '-u', default='skynet')
@click.option(
'--db-pass', '-u', default='password')
def discord(
loglevel: str,
db_host: str,
db_user: str,
db_pass: str
):
import asyncio
from .frontend.discord import SkynetDiscordFrontend
from skynet.frontend.chatbot.discord import DiscordChatbot
from skynet.frontend.chatbot.db import FrontendUserDB
logging.basicConfig(level=loglevel)
config = load_skynet_toml()
dc_token = config.discord.dc_token
key = config.discord.key
account = config.discord.account
permission = config.discord.permission
node_url = config.discord.node_url
hyperion_url = config.discord.hyperion_url
ipfs_url = config.discord.ipfs_url
try:
explorer_domain = config.discord.explorer_domain
except ConfigParsingError:
explorer_domain = DEFAULT_EXPLORER_DOMAIN
try:
ipfs_domain = config.discord.ipfs_domain
except ConfigParsingError:
ipfs_domain = DEFAULT_IPFS_DOMAIN
config = load_skynet_toml().discord
async def _async_main():
frontend = SkynetDiscordFrontend(
# dc_token,
account,
permission,
node_url,
hyperion_url,
db_host, db_user, db_pass,
ipfs_url,
key=key,
explorer_domain=explorer_domain,
ipfs_domain=ipfs_domain
)
async with frontend.open():
await frontend.bot.start(dc_token)
async with FrontendUserDB(
config.db_user,
config.db_pass,
config.db_host,
config.db_name
) as db:
bot = DiscordChatbot(config, db)
await bot.run()
asyncio.run(_async_main())

View File

@ -26,16 +26,29 @@ class DgpuConfig(msgspec.Struct):
poll_time: float = 0.5 # wait time for polling updates from contract
log_level: str = 'info'
log_file: str = 'dgpu.log' # log file path (only used when tui = true)
proto_version: int = 0
class FrontendConfig(msgspec.Struct):
account: str
permission: str
key: str
node_url: str
hyperion_url: str
ipfs_url: str
token: str
db_host: str
db_user: str
db_pass: str
db_name: str = 'skynet'
node_url: str = 'https://testnet.telos.net'
hyperion_url: str = 'https://testnet.skygpu.net'
ipfs_domain: str = 'ipfs.skygpu.net'
explorer_domain: str = 'explorer.skygpu.net'
request_timeout: int = 60 * 3
proto_version: int = 0
reward: str = '20.0000 GPU'
receiver: str = 'gpu.scd'
result_max_width: int = 1280
result_max_height: int = 1280
class PinnerConfig(msgspec.Struct):

View File

@ -14,91 +14,91 @@ MODELS: dict[str, ModelDesc] = {
'runwayml/stable-diffusion-v1-5': ModelDesc(
short='stable',
mem=6,
attrs={'size': {'w': 512, 'h': 512}},
attrs={'size': {'w': 512, 'h': 512}, 'step': 28},
tags=['txt2img']
),
'stabilityai/stable-diffusion-2-1-base': ModelDesc(
short='stable2',
mem=6,
attrs={'size': {'w': 512, 'h': 512}},
attrs={'size': {'w': 512, 'h': 512}, 'step': 28},
tags=['txt2img']
),
'snowkidy/stable-diffusion-xl-base-0.9': ModelDesc(
short='stablexl0.9',
mem=8.3,
attrs={'size': {'w': 1024, 'h': 1024}},
attrs={'size': {'w': 1024, 'h': 1024}, 'step': 28},
tags=['txt2img']
),
'Linaqruf/anything-v3.0': ModelDesc(
short='hdanime',
mem=6,
attrs={'size': {'w': 512, 'h': 512}},
attrs={'size': {'w': 512, 'h': 512}, 'step': 28},
tags=['txt2img']
),
'hakurei/waifu-diffusion': ModelDesc(
short='waifu',
mem=6,
attrs={'size': {'w': 512, 'h': 512}},
attrs={'size': {'w': 512, 'h': 512}, 'step': 28},
tags=['txt2img']
),
'nitrosocke/Ghibli-Diffusion': ModelDesc(
short='ghibli',
mem=6,
attrs={'size': {'w': 512, 'h': 512}},
attrs={'size': {'w': 512, 'h': 512}, 'step': 28},
tags=['txt2img']
),
'dallinmackay/Van-Gogh-diffusion': ModelDesc(
short='van-gogh',
mem=6,
attrs={'size': {'w': 512, 'h': 512}},
attrs={'size': {'w': 512, 'h': 512}, 'step': 28},
tags=['txt2img']
),
'lambdalabs/sd-pokemon-diffusers': ModelDesc(
short='pokemon',
mem=6,
attrs={'size': {'w': 512, 'h': 512}},
attrs={'size': {'w': 512, 'h': 512}, 'step': 28},
tags=['txt2img']
),
'Envvi/Inkpunk-Diffusion': ModelDesc(
short='ink',
mem=6,
attrs={'size': {'w': 512, 'h': 512}},
attrs={'size': {'w': 512, 'h': 512}, 'step': 28},
tags=['txt2img']
),
'nousr/robo-diffusion': ModelDesc(
short='robot',
mem=6,
attrs={'size': {'w': 512, 'h': 512}},
attrs={'size': {'w': 512, 'h': 512}, 'step': 28},
tags=['txt2img']
),
'black-forest-labs/FLUX.1-schnell': ModelDesc(
short='flux',
mem=24,
attrs={'size': {'w': 1024, 'h': 1024}},
attrs={'size': {'w': 1024, 'h': 1024}, 'step': 4},
tags=['txt2img']
),
'black-forest-labs/FLUX.1-Fill-dev': ModelDesc(
short='flux-inpaint',
mem=24,
attrs={'size': {'w': 1024, 'h': 1024}},
attrs={'size': {'w': 1024, 'h': 1024}, 'step': 28},
tags=['inpaint']
),
'diffusers/stable-diffusion-xl-1.0-inpainting-0.1': ModelDesc(
short='stablexl-inpaint',
mem=8.3,
attrs={'size': {'w': 1024, 'h': 1024}},
attrs={'size': {'w': 1024, 'h': 1024}, 'step': 28},
tags=['inpaint']
),
'prompthero/openjourney': ModelDesc(
short='midj',
mem=6,
attrs={'size': {'w': 512, 'h': 512}},
attrs={'size': {'w': 512, 'h': 512}, 'step': 28},
tags=['txt2img', 'img2img']
),
'stabilityai/stable-diffusion-xl-base-1.0': ModelDesc(
short='stablexl',
mem=8.3,
attrs={'size': {'w': 1024, 'h': 1024}},
attrs={'size': {'w': 1024, 'h': 1024}, 'step': 28},
tags=['txt2img']
),
}
@ -225,8 +225,6 @@ Noise is added to the image you use as an init image for img2img, and then the\
HELP_UNKWNOWN_PARAM = 'don\'t have any info on that.'
GROUP_ID = -1001541979235
MP_ENABLED_ROLES = ['god']
MIN_STEP = 1
@ -273,221 +271,3 @@ TG_MAX_WIDTH = 1280
TG_MAX_HEIGHT = 1280
DEFAULT_SINGLE_CARD_MAP = 'cuda:0'
GPU_CONTRACT_ABI = {
"version": "eosio::abi/1.2",
"types": [],
"structs": [
{
"name": "account",
"base": "",
"fields": [
{"name": "user", "type": "name"},
{"name": "balance", "type": "asset"},
{"name": "nonce", "type": "uint64"}
]
},
{
"name": "card",
"base": "",
"fields": [
{"name": "id", "type": "uint64"},
{"name": "owner", "type": "name"},
{"name": "card_name", "type": "string"},
{"name": "version", "type": "string"},
{"name": "total_memory", "type": "uint64"},
{"name": "mp_count", "type": "uint32"},
{"name": "extra", "type": "string"}
]
},
{
"name": "clean",
"base": "",
"fields": []
},
{
"name": "config",
"base": "",
"fields": [
{"name": "token_contract", "type": "name"},
{"name": "token_symbol", "type": "symbol"}
]
},
{
"name": "dequeue",
"base": "",
"fields": [
{"name": "user", "type": "name"},
{"name": "request_id", "type": "uint64"}
]
},
{
"name": "enqueue",
"base": "",
"fields": [
{"name": "user", "type": "name"},
{"name": "request_body", "type": "string"},
{"name": "binary_data", "type": "string"},
{"name": "reward", "type": "asset"},
{"name": "min_verification", "type": "uint32"}
]
},
{
"name": "gcfgstruct",
"base": "",
"fields": [
{"name": "token_contract", "type": "name"},
{"name": "token_symbol", "type": "symbol"}
]
},
{
"name": "submit",
"base": "",
"fields": [
{"name": "worker", "type": "name"},
{"name": "request_id", "type": "uint64"},
{"name": "request_hash", "type": "checksum256"},
{"name": "result_hash", "type": "checksum256"},
{"name": "ipfs_hash", "type": "string"}
]
},
{
"name": "withdraw",
"base": "",
"fields": [
{"name": "user", "type": "name"},
{"name": "quantity", "type": "asset"}
]
},
{
"name": "work_request_struct",
"base": "",
"fields": [
{"name": "id", "type": "uint64"},
{"name": "user", "type": "name"},
{"name": "reward", "type": "asset"},
{"name": "min_verification", "type": "uint32"},
{"name": "nonce", "type": "uint64"},
{"name": "body", "type": "string"},
{"name": "binary_data", "type": "string"},
{"name": "timestamp", "type": "time_point_sec"}
]
},
{
"name": "work_result_struct",
"base": "",
"fields": [
{"name": "id", "type": "uint64"},
{"name": "request_id", "type": "uint64"},
{"name": "user", "type": "name"},
{"name": "worker", "type": "name"},
{"name": "result_hash", "type": "checksum256"},
{"name": "ipfs_hash", "type": "string"},
{"name": "submited", "type": "time_point_sec"}
]
},
{
"name": "workbegin",
"base": "",
"fields": [
{"name": "worker", "type": "name"},
{"name": "request_id", "type": "uint64"},
{"name": "max_workers", "type": "uint32"}
]
},
{
"name": "workcancel",
"base": "",
"fields": [
{"name": "worker", "type": "name"},
{"name": "request_id", "type": "uint64"},
{"name": "reason", "type": "string"}
]
},
{
"name": "worker",
"base": "",
"fields": [
{"name": "account", "type": "name"},
{"name": "joined", "type": "time_point_sec"},
{"name": "left", "type": "time_point_sec"},
{"name": "url", "type": "string"}
]
},
{
"name": "worker_status_struct",
"base": "",
"fields": [
{"name": "worker", "type": "name"},
{"name": "status", "type": "string"},
{"name": "started", "type": "time_point_sec"}
]
}
],
"actions": [
{"name": "clean", "type": "clean", "ricardian_contract": ""},
{"name": "config", "type": "config", "ricardian_contract": ""},
{"name": "dequeue", "type": "dequeue", "ricardian_contract": ""},
{"name": "enqueue", "type": "enqueue", "ricardian_contract": ""},
{"name": "submit", "type": "submit", "ricardian_contract": ""},
{"name": "withdraw", "type": "withdraw", "ricardian_contract": ""},
{"name": "workbegin", "type": "workbegin", "ricardian_contract": ""},
{"name": "workcancel", "type": "workcancel", "ricardian_contract": ""}
],
"tables": [
{
"name": "cards",
"index_type": "i64",
"key_names": [],
"key_types": [],
"type": "card"
},
{
"name": "gcfgstruct",
"index_type": "i64",
"key_names": [],
"key_types": [],
"type": "gcfgstruct"
},
{
"name": "queue",
"index_type": "i64",
"key_names": [],
"key_types": [],
"type": "work_request_struct"
},
{
"name": "results",
"index_type": "i64",
"key_names": [],
"key_types": [],
"type": "work_result_struct"
},
{
"name": "status",
"index_type": "i64",
"key_names": [],
"key_types": [],
"type": "worker_status_struct"
},
{
"name": "users",
"index_type": "i64",
"key_names": [],
"key_types": [],
"type": "account"
},
{
"name": "workers",
"index_type": "i64",
"key_names": [],
"key_types": [],
"type": "worker"
}
],
"ricardian_clauses": [],
"error_messages": [],
"abi_extensions": [],
"variants": [],
"action_results": []
}

312
skynet/contract.py 100644
View File

@ -0,0 +1,312 @@
import time
import msgspec
from leap import CLEOS
from leap.protocol import Name
from skynet.types import (
Config, ConfigV0, ConfigV1,
Account, AccountV0, AccountV1,
WorkerV0,
Request, RequestV0, RequestV1,
BodyV0,
WorkerStatusV0,
ResultV0
)
class ConfigNotFound(BaseException):
...
class AccountNotFound(BaseException):
...
class WorkerNotFound(BaseException):
...
class RequestNotFound(BaseException):
...
class WorkerStatusNotFound(BaseException):
...
class GPUContractAPI:
def __init__(self, cleos: CLEOS, proto_version: int = 0):
self.receiver = 'gpu.scd'
self._cleos = cleos
self.proto_version = proto_version
# views into data
async def get_config(self) -> Config:
rows = await self._cleos.aget_table(
self.receiver, self.receiver, 'config',
resp_cls=ConfigV1 if self.proto_version > 1 else ConfigV0
)
if len(rows) == 0:
raise ConfigNotFound()
return rows[0]
async def get_user(self, user: str) -> Account:
rows = await self._cleos.aget_table(
self.receiver, self.receiver, 'users',
key_type='name',
lower_bound=user,
upper_bound=user,
resp_cls=AccountV1 if self.proto_version > 1 else AccountV0
)
if len(rows) == 0:
raise AccountNotFound(user)
return rows[0]
async def get_users(self) -> list[Account]:
return await self._cleos.aget_table(
self.receiver, self.receiver, 'users', resp_cls=AccountV1 if self.proto_version > 0 else AccountV0)
async def get_worker(self, worker: str) -> WorkerV0:
rows = await self._cleos.aget_table(
self.receiver, self.receiver, 'workers',
key_type='name',
lower_bound=worker,
upper_bound=worker,
resp_cls=WorkerV0
)
if len(rows) == 0:
raise WorkerNotFound(worker)
return rows[0]
async def get_workers(self) -> list[WorkerV0]:
return await self._cleos.aget_table(self.receiver, self.receiver, 'workers', resp_cls=WorkerV0)
async def get_queue(self) -> Request:
return await self._cleos.aget_table(
self.receiver, self.receiver, 'queue', resp_cls=RequestV1 if self.proto_version > 0 else RequestV0)
async def get_request(self, request_id: int) -> Request:
rows = await self._cleos.aget_table(
self.receiver, self.receiver, 'queue',
lower_bound=request_id,
upper_bound=request_id,
resp_cls=RequestV1 if self.proto_version > 0 else RequestV0
)
if len(rows) == 0:
raise RequestNotFound(request_id)
return rows[0]
async def get_requests_since(self, seconds: int) -> list[Request]:
return await self._cleos.aget_table(
self.receiver, self.receiver, 'queue',
index_position=2,
key_type='i64',
lower_bound=int(time.time()) - seconds,
resp_cls=RequestV1 if self.proto_version > 0 else RequestV0
)
async def get_statuses_for_request(self, request_id: int) -> list[WorkerStatusV0]:
return await self._cleos.aget_table(
self.receiver, str(Name.from_int(request_id)), 'status',
resp_cls=WorkerStatusV0
)
async def get_worker_status_for_request(self, request_id: int, worker: str) -> WorkerStatusV0:
rows = await self._cleos.aget_table(
self.receiver, str(Name.from_int(request_id)), 'status',
key_type='name',
lower_bound=worker,
upper_bound=worker,
resp_cls=WorkerStatusV0
)
if len(rows) == 0:
raise WorkerStatusNotFound(request_id)
return rows[0]
async def get_results(self, request_id: int) -> list[ResultV0]:
return await self._cleos.aget_table(
self.receiver, self.receiver, 'results',
index_position=2,
key_type='i64',
lower_bound=request_id,
upper_bound=request_id,
resp_cls=ResultV0
)
async def get_worker_results(self, worker: str) -> list[ResultV0]:
return await self._cleos.aget_table(
self.receiver, self.receiver, 'results',
index_position=4,
key_type='name',
lower_bound=worker,
upper_bound=worker,
resp_cls=ResultV0
)
# system actions
async def init_config(self, token_account: str, token_symbol: str):
return await self._cleos.a_push_action(
self.receiver,
'config',
[token_account, token_symbol],
self.receiver
)
async def clean_tables(self, nuke: bool = False):
return await self._cleos.a_push_action(
self.receiver,
'clean',
[nuke],
self.receiver
)
# balance actions
async def deposit(self, user: str, quantity: str):
return await self._cleos.a_push_action(
'eosio.token',
'transfer',
[user, self.receiver, quantity, 'testing gpu deposit'],
user,
key=self._cleos.private_keys[user]
)
async def withdraw(self, user: str, quantity: str):
return await self._cleos.a_push_action(
self.receiver,
'withdraw',
[user, quantity],
user,
key=self._cleos.private_keys[user]
)
# worker actions
async def register_worker(
self,
worker: str,
url: str
):
return await self._cleos.a_push_action(
self.receiver,
'regworker',
[worker, url],
worker,
key=self._cleos.private_keys[worker]
)
async def unregister_worker(
self,
worker: str,
reason: str
):
return await self._cleos.a_push_action(
self.receiver,
'unregworker',
[worker, reason],
worker,
key=self._cleos.private_keys[worker]
)
async def accept_work(
self,
worker: str,
request_id: int,
max_workers: int = 10
):
return await self._cleos.a_push_action(
self.receiver,
'workbegin',
[worker, request_id, max_workers],
worker,
key=self._cleos.private_keys[worker]
)
async def cancel_work(
self,
worker: str,
request_id: int,
reason: str
):
return await self._cleos.a_push_action(
self.receiver,
'workcancel',
[worker, request_id, reason],
worker,
key=self._cleos.private_keys[worker]
)
async def submit_work(
self,
worker: str,
request_id: int,
result_hash: str,
ipfs_hash: str,
request_hash: str | None = None
):
args = [worker, request_id, result_hash, ipfs_hash]
if request_hash:
args.insert(2, request_hash)
return await self._cleos.a_push_action(
self.receiver,
'submit',
args,
worker,
key=self._cleos.private_keys[worker]
)
# user actions
async def enqueue(
self,
account: str,
body: BodyV0,
binary_data: str = '',
reward: str = '1.0000 TLOS',
min_verification: int = 1
) -> int:
body = msgspec.json.encode(body).decode('utf-8')
result = await self._cleos.a_push_action(
self.receiver,
'enqueue',
[
account,
body,
binary_data,
reward,
min_verification
],
account,
key=self._cleos.private_keys[account]
)
console = result['processed']['action_traces'][0]['console']
nonce_index = -1
timestamp_index = -2
lines = console.rstrip().split('\n')
nonce = int(lines[nonce_index])
timestamp = lines[timestamp_index]
return RequestV1(
id=int(nonce),
user=account,
reward=reward,
min_verification=min_verification,
body=body,
binary_data=binary_data,
timestamp=timestamp
)
async def dequeue(self, user: str, request_id: int):
return await self._cleos.a_push_action(
self.receiver,
'dequeue',
[user, request_id],
user,
key=self._cleos.private_keys[user]
)

View File

@ -4,10 +4,14 @@ from contextlib import asynccontextmanager as acm
import trio
import urwid
from leap import CLEOS
from skynet.config import Config
from skynet.ipfs import AsyncIPFSHTTP
from skynet.contract import GPUContractAPI
from skynet.dgpu.tui import init_tui, WorkerMonitor
from skynet.dgpu.daemon import dgpu_serve_forever
from skynet.dgpu.network import NetConnector, maybe_open_contract_state_mngr
from skynet.dgpu.network import maybe_open_contract_state_mngr
@acm
@ -19,17 +23,24 @@ async def open_worker(config: Config):
if config.tui:
tui = init_tui(config)
conn = NetConnector(config)
cleos = CLEOS(endpoint=config.node_url)
cleos.import_key(config.account, config.key)
abi = cleos.get_abi('gpu.scd')
cleos.load_abi('gpu.scd', abi)
ipfs_api = AsyncIPFSHTTP(config.ipfs_url)
contract = GPUContractAPI(cleos)
try:
async with maybe_open_contract_state_mngr(conn) as state_mngr:
async with maybe_open_contract_state_mngr(contract) as state_mngr:
n: trio.Nursery
async with trio.open_nursery() as n:
if tui:
n.start_soon(tui.run)
n.start_soon(dgpu_serve_forever, config, conn, state_mngr)
n.start_soon(dgpu_serve_forever, config, contract, ipfs_api, state_mngr)
yield conn, state_mngr
yield contract, ipfs_api, state_mngr
n.cancel_scope.cancel()

View File

@ -1,6 +1,7 @@
import os
import logging
import contextlib
from functools import partial
from hashlib import sha256
import trio
import msgspec
@ -9,20 +10,21 @@ from skynet.config import DgpuConfig as Config
from skynet.types import (
BodyV0
)
from skynet.contract import GPUContractAPI
from skynet.constants import MODELS
from skynet.ipfs import AsyncIPFSHTTP, get_ipfs_img
from skynet.dgpu.errors import DGPUComputeError
from skynet.dgpu.tui import maybe_update_tui, maybe_update_tui_async
from skynet.dgpu.compute import maybe_load_model, compute_one
from skynet.dgpu.network import (
NetConnector,
ContractState,
)
async def maybe_update_tui_balance(conn: NetConnector):
async def maybe_update_tui_balance(contract: GPUContractAPI):
async def _fn(tui):
# update balance
balance = await conn.get_worker_balance()
balance = (await contract.get_user(tui.config.account)).balance
tui.set_header_text(new_balance=f'balance: {balance}')
await maybe_update_tui_async(_fn)
@ -30,7 +32,8 @@ async def maybe_update_tui_balance(conn: NetConnector):
async def maybe_serve_one(
config: Config,
conn: NetConnector,
contract: GPUContractAPI,
ipfs_api: AsyncIPFSHTTP,
state_mngr: ContractState,
):
logging.info(f'maybe serve request pi: {state_mngr.poll_index}')
@ -64,7 +67,7 @@ async def maybe_serve_one(
and
model in config.model_blacklist
):
logging.warning('model not blacklisted!, skip...')
logging.warning('model is blacklisted!, skip...')
return
# if worker already produced a result for this request
@ -73,7 +76,7 @@ async def maybe_serve_one(
return
# skip if workers in non_compete already on it
if state_mngr.should_compete_for_id(req.id):
if not state_mngr.should_compete_for_id(req.id):
logging.info('worker in configured non_compete list already working on request, skip...')
return
@ -91,7 +94,7 @@ async def maybe_serve_one(
# user `GPUConnector` to IO with
# storage layer to seed the compute
# task.
img = await conn.get_input_data(_input)
img = await get_ipfs_img(f'https://{config.ipfs_domain}/ipfs/{_input}')
inputs.append(img)
logging.info(f'retrieved {_input}!')
break
@ -101,24 +104,12 @@ async def maybe_serve_one(
f'IPFS fetch input error !?! retries left {retry - r - 1}\n'
)
# compute unique request hash used on submit
hash_str = (
str(req.nonce)
+
req.body
+
req.binary_data
)
logging.debug(f'hashing: {hash_str}')
request_hash = sha256(hash_str.encode('utf-8')).hexdigest()
logging.info(f'calculated request hash: {request_hash}')
total_step = body.params.step
mode = body.method
# TODO: validate request
resp = await conn.begin_work(req.id)
resp = await contract.accept_work(config.account, req.id)
if not resp or 'code' in resp:
logging.info('begin_work error, probably being worked on already... skip.')
return
@ -137,16 +128,18 @@ async def maybe_serve_one(
used by torch each step of the inference, it will use a
trio.from_thread to unblock the main thread and pump the event loop
'''
output_hash, output = await trio.to_thread.run_sync(
partial(
compute_one,
model,
req.id,
mode, body.params,
inputs=inputs,
should_cancel=state_mngr.should_cancel_work,
)
)
with open(os.devnull, 'w') as devnull:
with contextlib.redirect_stdout(devnull):
output_hash, output = await trio.to_thread.run_sync(
partial(
compute_one,
model,
req.id,
mode, body.params,
inputs=inputs,
should_cancel=state_mngr.should_cancel_work,
)
)
case _:
raise DGPUComputeError(
@ -155,27 +148,40 @@ async def maybe_serve_one(
maybe_update_tui(lambda tui: tui.set_progress(total_step))
ipfs_hash = await conn.publish_on_ipfs(output, typ=output_type)
ipfs_hash = await ipfs_api.publish(output, type=output_type)
await conn.submit_work(req.id, request_hash, output_hash, ipfs_hash)
maybe_request_hash = None
if config.proto_version == 0:
maybe_request_hash = req.hash_v0()
await maybe_update_tui_balance(conn)
await contract.submit_work(
config.account, req.id, output_hash, ipfs_hash, request_hash=maybe_request_hash)
await state_mngr.update_state()
await maybe_update_tui_balance(contract)
await state_mngr.update_state()
except BaseException as err:
if 'network cancel' not in str(err):
logging.exception('Failed to serve model request !?\n')
await state_mngr.update_state()
if state_mngr.is_request_in_progress(req.id):
await conn.cancel_work(req.id, 'reason not provided')
await contract.cancel_work(config.account, req.id, 'reason not provided')
async def dgpu_serve_forever(
config: Config,
conn: NetConnector,
contract: GPUContractAPI,
ipfs_api: AsyncIPFSHTTP,
state_mngr: ContractState
):
await maybe_update_tui_balance(conn)
await maybe_update_tui_balance(contract)
maybe_update_tui(lambda tui: tui.set_header_text(new_worker_name=config.account))
last_poll_idx = -1
try:
@ -187,7 +193,7 @@ async def dgpu_serve_forever(
last_poll_idx = state_mngr.poll_index
await maybe_serve_one(config, conn, state_mngr)
await maybe_serve_one(config, contract, ipfs_api, state_mngr)
except KeyboardInterrupt:
...

View File

@ -1,9 +1,7 @@
import io
import json
import time
import random
import logging
from pathlib import Path
from contextlib import asynccontextmanager as acm
from functools import partial
@ -13,25 +11,15 @@ import anyio
import httpx
import outcome
import msgspec
from PIL import Image
from leap.cleos import CLEOS
from leap.protocol import Asset
from skynet.dgpu.tui import maybe_update_tui
from skynet.config import DgpuConfig as Config, load_skynet_toml
from skynet.config import load_skynet_toml
from skynet.contract import GPUContractAPI
from skynet.types import (
ConfigV0,
AccountV0,
BodyV0,
RequestV0,
Request,
WorkerStatusV0,
ResultV0
)
from skynet.constants import GPU_CONTRACT_ABI
from skynet.ipfs import (
AsyncIPFSHTTP,
get_ipfs_file,
)
REQUEST_UPDATE_TIME: int = 3
@ -56,239 +44,6 @@ async def failable(fn: partial, ret_fail=None):
return o.unwrap()
class NetConnector:
'''
An API for connecting to and conducting various "high level"
network-service operations in the skynet.
- skynet user account creds
- hyperion API
- IPFs client
- CLEOS client
'''
def __init__(self, config: Config):
self.config = config
self.cleos = CLEOS(endpoint=config.node_url)
self.cleos.load_abi('gpu.scd', GPU_CONTRACT_ABI)
self.ipfs_client = AsyncIPFSHTTP(config.ipfs_url)
maybe_update_tui(lambda tui: tui.set_header_text(new_worker_name=self.config.account))
# blockchain helpers
async def get_work_requests_last_hour(self) -> list[RequestV0]:
logging.info('get_work_requests_last_hour')
rows = await failable(
partial(
self.cleos.aget_table,
'gpu.scd', 'gpu.scd', 'queue',
index_position=2,
key_type='i64',
lower_bound=int(time.time()) - 3600,
resp_cls=RequestV0
), ret_fail=[])
logging.info(f'found {len(rows)} requests on queue')
return rows
async def get_status_by_request_id(self, request_id: int) -> list[WorkerStatusV0]:
logging.info('get_status_by_request_id')
rows = await failable(
partial(
self.cleos.aget_table,
'gpu.scd', request_id, 'status', resp_cls=WorkerStatusV0), ret_fail=[])
logging.info(f'found status for workers: {[r.worker for r in rows]}')
return rows
async def get_global_config(self) -> ConfigV0:
logging.info('get_global_config')
rows = await failable(
partial(
self.cleos.aget_table,
'gpu.scd', 'gpu.scd', 'config',
resp_cls=ConfigV0))
if rows:
cfg = rows[0]
logging.info(f'config found: {cfg}')
return cfg
else:
logging.error('global config not found, is the contract initialized?')
return None
async def get_worker_balance(self) -> str:
logging.info('get_worker_balance')
rows = await failable(
partial(
self.cleos.aget_table,
'gpu.scd', 'gpu.scd', 'users',
index_position=1,
key_type='name',
lower_bound=self.config.account,
upper_bound=self.config.account,
resp_cls=AccountV0
))
if rows:
b = rows[0].balance
logging.info(f'balance: {b}')
return b
else:
logging.info('no balance info found')
return None
async def begin_work(self, request_id: int):
'''
Publish to the bc that the worker is beginning a model-computation
step.
'''
logging.info(f'begin_work on #{request_id}')
return await failable(
partial(
self.cleos.a_push_action,
'gpu.scd',
'workbegin',
list({
'worker': self.config.account,
'request_id': request_id,
'max_workers': 2
}.values()),
self.config.account, self.config.key,
permission=self.config.permission
)
)
async def cancel_work(self, request_id: int, reason: str):
logging.info(f'cancel_work on #{request_id}')
return await failable(
partial(
self.cleos.a_push_action,
'gpu.scd',
'workcancel',
list({
'worker': self.config.account,
'request_id': request_id,
'reason': reason
}.values()),
self.config.account, self.config.key,
permission=self.config.permission
)
)
async def maybe_withdraw_all(self):
logging.info('maybe_withdraw_all')
balance = await self.get_worker_balance()
if not balance:
return
balance_amount = float(balance.split(' ')[0])
if balance_amount > 0:
await failable(
partial(
self.cleos.a_push_action,
'gpu.scd',
'withdraw',
list({
'user': self.config.account,
'quantity': Asset.from_str(balance)
}.values()),
self.config.account, self.config.key,
permission=self.config.permission
)
)
async def find_results(self) -> list[ResultV0]:
logging.info('find_results')
rows = await failable(
partial(
self.cleos.aget_table,
'gpu.scd', 'gpu.scd', 'results',
index_position=4,
key_type='name',
lower_bound=self.config.account,
upper_bound=self.config.account,
resp_cls=ResultV0
)
)
return rows
async def submit_work(
self,
request_id: int,
request_hash: str,
result_hash: str,
ipfs_hash: str
):
logging.info(f'submit_work #{request_id}')
return await failable(
partial(
self.cleos.a_push_action,
'gpu.scd',
'submit',
list({
'worker': self.config.account,
'request_id': request_id,
'request_hash': request_hash,
'result_hash': result_hash,
'ipfs_hash': ipfs_hash
}.values()),
self.config.account, self.config.key,
permission=self.config.permission
)
)
# IPFS helpers
async def publish_on_ipfs(self, raw, typ: str = 'png'):
Path('ipfs-staging').mkdir(exist_ok=True)
logging.info('publish_on_ipfs')
target_file = ''
match typ:
case 'png':
raw: Image
target_file = 'ipfs-staging/image.png'
raw.save(target_file)
case _:
raise ValueError(f'Unsupported output type: {typ}')
file_info = await self.ipfs_client.add(Path(target_file))
file_cid = file_info['Hash']
logging.info(f'added file to ipfs, CID: {file_cid}')
await self.ipfs_client.pin(file_cid)
logging.info(f'pinned {file_cid}')
return file_cid
async def get_input_data(self, ipfs_hash: str) -> Image:
'''
Retrieve an input (image) from the IPFs layer.
Normally used to retreive seed (visual) content previously
generated/validated by the network to be fed to some
consuming AI model.
'''
link = f'https://{self.config.ipfs_domain}/ipfs/{ipfs_hash}'
res = await get_ipfs_file(link, timeout=1)
if not res or res.status_code != 200:
logging.warning(f'couldn\'t get ipfs binary data at {link}!')
# attempt to decode as image
input_data = Image.open(io.BytesIO(res.read()))
logging.info('decoded as image successfully')
return input_data
def convert_reward_to_int(reward_str):
int_part, decimal_part = (
reward_str.split('.')[0],
@ -299,12 +54,16 @@ def convert_reward_to_int(reward_str):
class ContractState:
def __init__(self, conn: NetConnector):
self._conn = conn
def __init__(
self,
contract: GPUContractAPI
):
self.contract = contract
self._config = load_skynet_toml().dgpu
self._poll_index = 0
self._queue: list[RequestV0] = []
self._queue: list[Request] = []
self._status_by_rid: dict[int, list[WorkerStatusV0]] = {}
self._results: list[ResultV0] = []
@ -315,10 +74,10 @@ class ContractState:
return self._poll_index
async def _fetch_results(self):
self._results = await self._conn.find_results()
self._results = await self.contract.get_worker_results(self._config.account)
async def _fetch_statuses_for_id(self, rid: int):
self._status_by_rid[rid] = await self._conn.get_status_by_request_id(rid)
self._status_by_rid[rid] = await self.contract.get_statuses_for_request(rid)
async def update_state(self):
'''
@ -326,7 +85,7 @@ class ContractState:
'''
# raw queue from chain
_queue = await self._conn.get_work_requests_last_hour()
_queue = await self.contract.get_requests_since(3600)
# filter out invalids
self._queue = []
@ -380,7 +139,7 @@ class ContractState:
return len(self._queue)
@property
def first(self) -> RequestV0 | None:
def first(self) -> Request | None:
if len(self._queue) > 0:
return self._queue[0]
@ -391,7 +150,7 @@ class ContractState:
return set((
status.worker
for status in self._status_by_rid[request_id]
if status.worker != self._conn.config.account
if status.worker != self._config.account
))
# predicates
@ -405,7 +164,7 @@ class ContractState:
return request_id in self._status_by_rid
def should_compete_for_id(self, request_id: int) -> bool:
return bool(
return not bool(
self._conn.config.non_compete &
self.competitors_for_id(request_id)
)
@ -416,7 +175,7 @@ class ContractState:
logging.info(f'request #{request_id} no longer in queue, likely its been filled by another worker, cancelling work...')
return True
should_cancel = self.should_compete_for_id(request_id)
should_cancel = not self.should_compete_for_id(request_id)
logging.info(f'cancel: {should_cancel}')
return should_cancel
@ -425,7 +184,7 @@ class ContractState:
__state_mngr = None
@acm
async def maybe_open_contract_state_mngr(conn: NetConnector):
async def maybe_open_contract_state_mngr(contract: GPUContractAPI):
global __state_mngr
if __state_mngr:
@ -434,7 +193,7 @@ async def maybe_open_contract_state_mngr(conn: NetConnector):
config = load_skynet_toml().dgpu
mngr = ContractState(conn)
mngr = ContractState(contract)
async with trio.open_nursery() as n:
await mngr.update_state()
n.start_soon(mngr._state_update_task, config.poll_time)

View File

@ -8,7 +8,7 @@ from skynet.config import DgpuConfig as Config
class WorkerMonitor:
def __init__(self):
def __init__(self, config: Config):
self.requests = []
self.header_info = {}
@ -63,6 +63,7 @@ class WorkerMonitor:
event_loop=self.event_loop,
unhandled_input=self._exit_on_q
)
self.config = config
def _create_listbox_body(self, requests):
"""
@ -197,7 +198,7 @@ def init_tui(config: Config):
global _tui
assert not _tui
setup_logging_for_tui(config)
_tui = WorkerMonitor()
_tui = WorkerMonitor(config)
return _tui

View File

@ -1,6 +1,10 @@
import random
from ..constants import *
from ..constants import (
MODELS,
get_model_by_shortname,
MAX_STEP, MIN_STEP, MAX_WIDTH, MAX_HEIGHT, MAX_GUIDANCE
)
class ConfigRequestFormatError(BaseException):
@ -26,17 +30,17 @@ class ConfigSizeDivisionByEight(BaseException):
def validate_user_config_request(req: str):
params = req.split(' ')
if len(params) < 3:
if len(params) < 2:
raise ConfigRequestFormatError('config request format incorrect')
else:
try:
attr = params[1]
attr = params[0]
match attr:
case 'model' | 'algo':
attr = 'model'
val = params[2]
val = params[1]
shorts = [model_info.short for model_info in MODELS.values()]
if val not in shorts:
raise ConfigUnknownAlgorithm(f'no model named {val}')
@ -44,38 +48,38 @@ def validate_user_config_request(req: str):
val = get_model_by_shortname(val)
case 'step':
val = int(params[2])
val = int(params[1])
val = max(min(val, MAX_STEP), MIN_STEP)
case 'width':
val = max(min(int(params[2]), MAX_WIDTH), 16)
val = max(min(int(params[1]), MAX_WIDTH), 16)
if val % 8 != 0:
raise ConfigSizeDivisionByEight(
'size must be divisible by 8!')
case 'height':
val = max(min(int(params[2]), MAX_HEIGHT), 16)
val = max(min(int(params[1]), MAX_HEIGHT), 16)
if val % 8 != 0:
raise ConfigSizeDivisionByEight(
'size must be divisible by 8!')
case 'seed':
val = params[2]
val = params[1]
if val == 'auto':
val = None
else:
val = int(params[2])
val = int(params[1])
case 'guidance':
val = float(params[2])
val = float(params[1])
val = max(min(val, MAX_GUIDANCE), 0)
case 'strength':
val = float(params[2])
val = float(params[1])
val = max(min(val, 0.99), 0.01)
case 'upscaler':
val = params[2]
val = params[1]
if val == 'off':
val = None
elif val != 'x4':
@ -83,7 +87,7 @@ def validate_user_config_request(req: str):
f'\"{val}\" is not a valid upscaler')
case 'autoconf':
val = params[2]
val = params[1]
if val == 'on':
val = True

View File

@ -0,0 +1,484 @@
import io
import json
import asyncio
import logging
from abc import ABC, abstractmethod, abstractproperty
from PIL import Image, UnidentifiedImageError
from random import randint
from decimal import Decimal
from hashlib import sha256
from datetime import datetime, timedelta
import msgspec
from leap import CLEOS
from leap.hyperion import HyperionAPI
from skynet.ipfs import AsyncIPFSHTTP, get_ipfs_file
from skynet.types import BodyV0, BodyV0Params
from skynet.config import FrontendConfig
from skynet.constants import (
MODELS, GPU_CONTRACT_ABI,
HELP_TEXT,
HELP_TOPICS,
HELP_UNKWNOWN_PARAM,
COOL_WORDS,
DONATION_INFO,
UNKNOWN_CMD_TEXT
)
from skynet.frontend import validate_user_config_request
from skynet.frontend.chatbot.db import FrontendUserDB
from skynet.frontend.chatbot.types import (
BaseUser,
BaseChatRoom,
BaseCommands,
BaseFileInput,
BaseMessage
)
def perform_auto_conf(config: dict) -> dict:
model = MODELS[config['model']]
maybe_step = model.attrs.get('step', None)
if maybe_step:
config['step'] = maybe_step
maybe_width = model.attrs.get('width', None)
if maybe_width:
config['width'] = maybe_step
maybe_height = model.attrs.get('height', None)
if maybe_height:
config['height'] = maybe_step
return config
def sanitize_params(params: dict) -> dict:
if (
'seed' not in params
or
params['seed'] is None
):
params['seed'] = randint(0, 0xffffffff)
s_params = {}
for key, val in params.items():
if isinstance(val, Decimal):
val = str(val)
s_params[key] = val
return s_params
class RequestTimeoutError(BaseException):
...
class BaseChatbot(ABC):
def __init__(
self,
config: FrontendConfig,
db: FrontendUserDB
):
self.db = db
self.config = config
self.ipfs = AsyncIPFSHTTP(config.ipfs_url)
self.cleos = CLEOS(endpoint=config.node_url)
self.cleos.load_abi(config.receiver, GPU_CONTRACT_ABI)
self.cleos.import_key(config.account, config.key)
self.hyperion = HyperionAPI(config.hyperion_url)
async def init(self):
...
@abstractmethod
async def run(self):
...
@abstractproperty
def main_group(self) -> BaseChatRoom:
...
@abstractmethod
async def new_msg(self, chat: BaseChatRoom, text: str, **kwargs) -> BaseMessage:
'''
Send text to a chat/channel.
'''
...
@abstractmethod
async def reply_to(self, msg: BaseMessage, text: str, **kwargs) -> BaseMessage:
'''
Reply to existing message by sending new message.
'''
...
@abstractmethod
async def edit_msg(self, msg: BaseMessage, text: str, **kwargs):
'''
Edit an existing message.
'''
...
async def create_status_msg(self, msg: BaseMessage, init_text: str, force_user: BaseUser | None = None) -> tuple[BaseUser, BaseMessage, dict]:
# maybe init user
user = msg.author
if force_user:
user = force_user
user_row = await self.db.get_or_create_user(user.id)
# create status msg
status_msg = await self.reply_to(msg, init_text)
# start tracking of request in db
await self.db.new_user_request(user.id, msg.id, status_msg.id, status=init_text)
return [user, status_msg, user_row]
async def update_status_msg(self, msg: BaseMessage, text: str):
'''
Update an existing status message, also mirrors changes on db
'''
await self.db.update_user_request_by_sid(msg.id, text)
await self.edit_msg(msg, text)
async def append_status_msg(self, msg: BaseMessage, text: str):
'''
Append text to an existing status message
'''
request = await self.db.get_user_request_by_sid(msg.id)
await self.update_status_msg(msg, request['status'] + text)
@abstractmethod
async def update_request_status_timeout(self, status_msg: BaseMessage):
'''
Notify users when we timedout trying to find a matching submit
'''
...
@abstractmethod
async def update_request_status_step_0(self, status_msg: BaseMessage, user_msg: BaseMessage):
'''
First step in request status message lifecycle, should notify which user sent the request
and that we are about to broadcast the request to chain
'''
...
@abstractmethod
async def update_request_status_step_1(self, status_msg: BaseMessage, tx_result: dict):
'''
Second step in request status message lifecycle, should notify enqueue transaction
was processed by chain, and provide a link to the tx in the chain explorer
'''
...
@abstractmethod
async def update_request_status_step_2(self, status_msg: BaseMessage, submit_tx_hash: str):
'''
Third step in request status message lifecycle, should notify matching submit transaction
was found, and provide a link to the tx in the chain explorer
'''
...
@abstractmethod
async def update_request_status_final(
self,
og_msg: BaseMessage,
status_msg: BaseMessage,
user: BaseUser,
params: BodyV0Params,
inputs: list[BaseFileInput],
submit_tx_hash: str,
worker: str,
result_url: str,
result_img: bytes | None
):
'''
Last step in request status message lifecycle, should delete status message and send a
new message replying to the original user's message, generate the appropiate
reply caption and if provided also sent the found result img
'''
...
async def handle_request(
self,
msg: BaseMessage,
force_user: BaseUser | None = None
):
if msg.chat.is_private:
return
if (
len(msg.text) == 0
and
msg.command != BaseCommands.REDO
):
await self.reply_to(msg, 'empty prompt ignored.')
return
# maybe initialize user db row and send a new msg thats gonna
# be updated throughout the request lifecycle
user, status_msg, user_row = await self.create_status_msg(
msg, f'started processing a {msg.command} request...', force_user=force_user)
# if this is a redo msg, we attempt to get the input params from db
# else use msg properties
match msg.command:
case BaseCommands.TXT2IMG | BaseCommands.IMG2IMG:
prompt = msg.text
command = msg.command
inputs = msg.inputs
case BaseCommands.REDO:
prompt = await self.db.get_last_prompt_of(user.id)
command = await self.db.get_last_method_of(user.id)
inputs = await self.db.get_last_inputs_of(user.id)
if not prompt:
await self.reply_to(msg, 'no last prompt found, try doing a non-redo request first')
return
case _:
await self.reply_to(msg, f'unknown request of type {msg.command}')
return
if (
msg.command == BaseCommands.IMG2IMG
and
len(inputs) == 0
):
await self.edit_msg(status_msg, 'seems you tried to do an img2img command without sending image')
return
# maybe apply recomended settings to this request
del user_row['id']
if user_row['autoconf']:
user_row = perform_auto_conf(user_row)
user_row = sanitize_params(user_row)
body = BodyV0(
method=command,
params=BodyV0Params(
prompt=prompt,
**user_row
)
)
# publish inputs to ipfs
input_cids = []
for i in inputs:
await i.publish(self.ipfs, user_row)
input_cids.append(i.cid)
inputs_str = ','.join((i for i in input_cids))
# unless its a redo request, update db user data
if command != BaseCommands.REDO:
await self.db.update_user_stats(
user.id,
command,
last_prompt=prompt,
last_inputs=inputs
)
await self.update_request_status_step_0(status_msg, msg)
# prepare and send enqueue request
request_time = datetime.now().isoformat()
str_body = msgspec.json.encode(body).decode('utf-8')
enqueue_receipt = await self.cleos.a_push_action(
self.config.receiver,
'enqueue',
[
self.config.account,
str_body,
inputs_str,
self.config.reward,
1
],
self.config.account,
key=self.cleos.private_keys[self.config.account],
permission=self.config.permission
)
await self.update_request_status_step_1(status_msg, enqueue_receipt)
# wait and search submit request using hyperion endpoint
console = enqueue_receipt['processed']['action_traces'][0]['console']
console_lines = console.split('\n')
request_id = None
request_hash = None
if self.config.proto_version == 0:
'''
v0 has req_id:nonce printed in enqueue console output
to search for a result request_hash arg on submit has
to match the sha256 of nonce + body + input_str
'''
request_id, nonce = console_lines[-1].rstrip().split(':')
request_hash = sha256(
(nonce + str_body + inputs_str).encode('utf-8')).hexdigest().upper()
request_id = int(request_id)
elif self.config.proto_version == 1:
'''
v1 uses a global unique nonce and prints it on enqueue
console output to search for a result request_id arg
on submit has to match the printed req_id
'''
request_id = int(console_lines[-1].rstrip())
else:
raise NotImplementedError
worker = None
submit_tx_hash = None
result_cid = None
for i in range(1, self.config.request_timeout + 1):
try:
submits = await self.hyperion.aget_actions(
account=self.config.account,
filter=f'{self.config.receiver}:submit',
sort='desc',
after=request_time
)
if self.config.proto_version == 0:
actions = [
action
for action in submits['actions']
if action['act']['data']['request_hash'] == request_hash
]
elif self.config.proto_version == 1:
actions = [
action
for action in submits['actions']
if action['act']['data']['request_id'] == request_id
]
else:
raise NotImplementedError
if len(actions) > 0:
action = actions[0]
submit_tx_hash = action['trx_id']
data = action['act']['data']
result_cid = data['ipfs_hash']
worker = data['worker']
logging.info(f'found matching submit! tx: {submit_tx_hash} cid: {result_cid}')
break
except json.JSONDecodeError:
if i < self.config.request_timeout:
logging.error('network error while searching for submit, retry...')
await asyncio.sleep(1)
# if we found matching submit submit_tx_hash, worker, and result_cid will not be None
if not result_cid:
await self.update_request_status_timeout(status_msg)
raise RequestTimeoutError
await self.update_request_status_step_2(status_msg, submit_tx_hash)
# attempt to get the image and send it
result_link = f'https://{self.config.ipfs_domain}/ipfs/{result_cid}'
get_img_response = await get_ipfs_file(result_link)
result_img = None
if get_img_response and get_img_response.status_code == 200:
try:
with Image.open(io.BytesIO(get_img_response.read())) as img:
w, h = img.size
if (
w > self.config.result_max_width
or
h > self.config.result_max_height
):
max_size = (self.config.result_max_width, self.config.result_max_height)
logging.warning(
f'raw result is of size {img.size}, resizing to {max_size}')
img.thumbnail(max_size)
tmp_buf = io.BytesIO()
img.save(tmp_buf, format='PNG')
result_img = tmp_buf.getvalue()
except UnidentifiedImageError:
logging.warning(f'couldn\'t get ipfs result at {result_link}!')
await self.update_request_status_final(
msg, status_msg, user, body.params, inputs, submit_tx_hash, worker, result_link, result_img)
await self.db.increment_generated(user.id)
async def send_help(self, msg: BaseMessage):
if len(msg.text) == 0:
await self.reply_to(msg, HELP_TEXT)
else:
if msg.text in HELP_TOPICS:
await self.reply_to(msg, HELP_TOPICS[msg.text])
else:
await self.reply_to(msg, HELP_UNKWNOWN_PARAM)
async def send_cool_words(self, msg: BaseMessage):
await self.reply_to(msg, '\n'.join(COOL_WORDS))
async def get_queue(self, msg: BaseMessage):
an_hour_ago = datetime.now() - timedelta(hours=1)
queue = await self.cleos.aget_table(
self.config.receiver, self.config.receiver, 'queue',
index_position=2,
key_type='i64',
sort='desc',
lower_bound=int(an_hour_ago.timestamp())
)
await self.reply_to(
msg, f'Requests on skynet queue: {len(queue)}')
async def set_config(self, msg: BaseMessage):
try:
attr, val, reply_txt = validate_user_config_request(msg.text)
await self.db.update_user_config(msg.author.id, attr, val)
except BaseException as e:
reply_txt = str(e)
finally:
await self.reply_to(msg, reply_txt)
async def user_stats(self, msg: BaseMessage):
await self.db.get_or_create_user(msg.author.id)
generated, joined, role = await self.db.get_user_stats(msg.author.id)
stats_str = f'generated: {generated}\n'
stats_str += f'joined: {joined}\n'
stats_str += f'role: {role}\n'
await self.reply_to(msg, stats_str)
async def donation_info(self, msg: BaseMessage):
await self.reply_to(msg, DONATION_INFO)
async def say(self, msg: BaseMessage):
if (
msg.chat.is_private
or
not msg.author.is_admin
):
return
await self.new_msg(self.main_group, msg.text)
async def echo_unknown(self, msg: BaseMessage):
await self.reply_to(msg, UNKNOWN_CMD_TEXT)

View File

@ -0,0 +1,424 @@
import logging
import random
import string
import time
from datetime import datetime
import docker
import psycopg2
import asyncpg
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from contextlib import contextmanager as cm
from skynet.constants import (
DEFAULT_ROLE, DEFAULT_MODEL, DEFAULT_STEP,
DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_GUIDANCE,
DEFAULT_STRENGTH, DEFAULT_UPSCALER
)
from skynet.frontend.chatbot.types import BaseFileInput
DB_INIT_SQL = """
CREATE SCHEMA IF NOT EXISTS skynet;
CREATE TABLE IF NOT EXISTS skynet.user(
id BIGSERIAL PRIMARY KEY NOT NULL,
generated INT NOT NULL,
joined TIMESTAMP NOT NULL,
last_method TEXT,
last_prompt TEXT,
last_inputs TEXT,
role VARCHAR(128) NOT NULL
);
CREATE TABLE IF NOT EXISTS skynet.user_config(
id BIGSERIAL NOT NULL,
model VARCHAR(512) NOT NULL,
step INT NOT NULL,
width INT NOT NULL,
height INT NOT NULL,
seed NUMERIC,
guidance DECIMAL NOT NULL,
strength DECIMAL NOT NULL,
upscaler VARCHAR(128),
autoconf BOOLEAN DEFAULT TRUE,
CONSTRAINT fk_config
FOREIGN KEY(id)
REFERENCES skynet.user(id)
);
CREATE TABLE IF NOT EXISTS skynet.user_requests(
id BIGSERIAL NOT NULL,
user_id BIGSERIAL NOT NULL,
sent TIMESTAMP NOT NULL,
status TEXT NOT NULL,
status_msg BIGSERIAL PRIMARY KEY NOT NULL,
CONSTRAINT fk_user_req
FOREIGN KEY(user_id)
REFERENCES skynet.user(id)
);
"""
@cm
def open_new_database(cleanup: bool = True):
"""
Context manager that spins up a temporary Postgres Docker container,
creates a 'skynet' user and database, and yields (container, password, host).
Stops the container on exit if 'cleanup' is True.
"""
root_password = "".join(random.choice(string.ascii_lowercase) for _ in range(12))
skynet_password = "".join(random.choice(string.ascii_lowercase) for _ in range(12))
dclient = docker.from_env()
container = dclient.containers.run(
"postgres",
name="skynet-test-postgres",
ports={"5432/tcp": None},
environment={"POSTGRES_PASSWORD": root_password},
detach=True,
)
try:
# Wait for Postgres to be ready
for log_line in container.logs(stream=True):
line = log_line.decode().rstrip()
logging.info(line)
if (
"database system is ready to accept connections" in line
or "database system is shut down" in line
):
break
container.reload()
port_info = container.ports["5432/tcp"][0]
port = port_info["HostPort"]
db_host = f"localhost:{port}"
# Let PostgreSQL settle
time.sleep(1)
logging.info("Creating 'skynet' database...")
conn = psycopg2.connect(
user="postgres", password=root_password, host="localhost", port=port
)
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
conn.autocommit = True
cursor = conn.cursor()
cursor.execute(f"CREATE USER skynet WITH PASSWORD '{skynet_password}'")
cursor.execute("CREATE DATABASE skynet")
cursor.execute("GRANT ALL PRIVILEGES ON DATABASE skynet TO skynet")
cursor.close()
conn.close()
logging.info("Database setup complete.")
yield container, skynet_password, db_host
finally:
if container and cleanup:
container.stop()
class FrontendUserDB:
"""
A class that manages the connection pool for the 'skynet' database,
initializes the schema if needed, and provides high-level methods
for interacting with the 'skynet' tables.
"""
def __init__(
self,
db_user: str,
db_pass: str,
db_host: str,
db_name: str
):
self.db_user = db_user
self.db_pass = db_pass
self.db_host = db_host
self.db_name = db_name
self.pool: asyncpg.Pool | None = None
async def __aenter__(self) -> "FrontendUserDB":
dsn = f"postgres://{self.db_user}:{self.db_pass}@{self.db_host}/{self.db_name}"
self.pool = await asyncpg.create_pool(dsn=dsn)
await self._init_db()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
if self.pool:
await self.pool.close()
async def _init_db(self):
"""
Ensures the 'skynet' schema and tables exist. Also checks for
missing columns and adds them if necessary.
"""
async with self.pool.acquire() as conn:
# Check if schema is already initialized
result = await conn.fetch("""
SELECT DISTINCT table_schema
FROM information_schema.tables
WHERE table_schema = 'skynet'
""")
if not result:
await conn.execute(DB_INIT_SQL)
# Check if 'autoconf' column exists in user_config
col_check = await conn.fetch("""
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'user_config' AND column_name = 'autoconf'
""")
if not col_check:
await conn.execute(
"ALTER TABLE skynet.user_config ADD COLUMN autoconf BOOLEAN DEFAULT TRUE;"
)
# -------------
# USER METHODS
# -------------
async def get_user_config(self, user_id: int):
"""
Fetches the user_config for the given user ID.
Returns the record if found, otherwise None.
"""
async with self.pool.acquire() as conn:
records = await conn.fetch(
"SELECT * FROM skynet.user_config WHERE id = $1", user_id
)
return dict(records[0]) if len(records) == 1 else None
async def get_user(self, user_id: int):
"""Alias for get_user_config (same data returned)."""
return await self.get_user_config(user_id)
async def new_user(self, user_id: int):
"""
Inserts a new user in skynet.user and its corresponding user_config record.
Raises ValueError if the user already exists.
"""
existing = await self.get_user(user_id)
if existing:
raise ValueError("User already present in DB")
logging.info(f"New user! {user_id}")
now = datetime.utcnow()
async with self.pool.acquire() as conn:
async with conn.transaction():
await conn.execute(
"""
INSERT INTO skynet.user(
id, generated, joined,
last_method, last_prompt, last_inputs, role
)
VALUES($1, 0, $2, 'txt2img', NULL, NULL, $3)
""",
user_id,
now,
DEFAULT_ROLE,
)
await conn.execute(
"""
INSERT INTO skynet.user_config(
id, model, step, width,
height, guidance, strength, upscaler
)
VALUES($1, $2, $3, $4, $5, $6, $7, $8)
""",
user_id,
DEFAULT_MODEL,
DEFAULT_STEP,
DEFAULT_WIDTH,
DEFAULT_HEIGHT,
DEFAULT_GUIDANCE,
DEFAULT_STRENGTH,
DEFAULT_UPSCALER,
)
async def get_or_create_user(self, user_id: int):
"""
Retrieves a user_config record for the given user_id.
If none exists, creates the user and returns the new record.
"""
user_cfg = await self.get_user(user_id)
if not user_cfg:
await self.new_user(user_id)
user_cfg = await self.get_user(user_id)
return user_cfg
async def update_user(self, user_id: int, attr: str, val):
"""
Generic function to update a single field in skynet.user for a given user_id.
"""
async with self.pool.acquire() as conn:
await conn.execute(
f"UPDATE skynet.user SET {attr} = $2 WHERE id = $1", user_id, val
)
async def update_user_config(self, user_id: int, attr: str, val):
"""
Generic function to update a single field in skynet.user_config for a given user_id.
"""
async with self.pool.acquire() as conn:
await conn.execute(
f"UPDATE skynet.user_config SET {attr} = $2 WHERE id = $1", user_id, val
)
async def get_user_stats(self, user_id: int):
"""
Returns (generated, joined, role) for the given user_id.
"""
async with self.pool.acquire() as conn:
records = await conn.fetch(
"""
SELECT generated, joined, role
FROM skynet.user
WHERE id = $1
""",
user_id,
)
return records[0] if records else None
async def increment_generated(self, user_id: int):
"""
Increments the 'generated' count for a given user by 1.
"""
async with self.pool.acquire() as conn:
await conn.execute(
"""
UPDATE skynet.user
SET generated = generated + 1
WHERE id = $1
""",
user_id,
)
async def update_user_stats(
self,
user_id: int,
method: str,
last_prompt: str | None = None,
last_inputs: list | None = None
):
"""
Updates various 'last_*' fields in skynet.user.
"""
await self.update_user(user_id, "last_method", method)
if last_prompt is not None:
await self.update_user(user_id, "last_prompt", last_prompt)
last_inputs_str = None
if isinstance(last_inputs, list):
last_inputs_str = ','.join((f'{f.id}:{f.cid}' for f in last_inputs))
await self.update_user(user_id, "last_inputs", last_inputs_str)
logging.info("Updated user stats: %s", (method, last_prompt, last_inputs_str))
# ----------------------
# USER REQUESTS METHODS
# ----------------------
async def get_user_request(self, request_id: int):
"""
Fetches all matching rows for a given request_id.
"""
async with self.pool.acquire() as conn:
return await conn.fetch(
"SELECT * FROM skynet.user_requests WHERE id = $1", request_id
)
async def get_user_request_by_sid(self, status_msg_id: int):
"""
Fetches exactly one row (first row) by status_msg primary key.
"""
async with self.pool.acquire() as conn:
records = await conn.fetch(
"SELECT * FROM skynet.user_requests WHERE status_msg = $1", status_msg_id
)
return records[0] if records else None
async def new_user_request(
self,
user_id: int,
request_id: int,
status_msg_id: int,
status: str = "started processing request..."
):
"""
Inserts a new row in skynet.user_requests.
"""
now = datetime.utcnow()
async with self.pool.acquire() as conn:
async with conn.transaction():
await conn.execute(
"""
INSERT INTO skynet.user_requests(
id, user_id, sent, status, status_msg
)
VALUES($1, $2, $3, $4, $5)
""",
request_id, user_id, now, status, status_msg_id
)
async def update_user_request(self, request_id: int, status: str):
"""
Updates the 'status' for a user request identified by 'request_id'.
"""
async with self.pool.acquire() as conn:
await conn.execute(
"""
UPDATE skynet.user_requests
SET status = $2
WHERE id = $1
""",
request_id, status
)
async def update_user_request_by_sid(self, sid: int, status: str):
"""
Updates the 'status' for a user request identified by 'status_msg'.
"""
async with self.pool.acquire() as conn:
await conn.execute(
"""
UPDATE skynet.user_requests
SET status = $2
WHERE status_msg = $1
""",
sid, status
)
# ----------------------------
# Convenience "Get Last" Helpers
# ----------------------------
async def get_last_method_of(self, user_id: int) -> str | None:
async with self.pool.acquire() as conn:
return await conn.fetchval(
"SELECT last_method FROM skynet.user WHERE id = $1", user_id
)
async def get_last_prompt_of(self, user_id: int) -> str | None:
async with self.pool.acquire() as conn:
return await conn.fetchval(
"SELECT last_prompt FROM skynet.user WHERE id = $1", user_id
)
async def get_last_inputs_of(self, user_id: int) -> list[BaseFileInput] | None:
async with self.pool.acquire() as conn:
last_inputs_str = await conn.fetchval(
"SELECT last_inputs FROM skynet.user WHERE id = $1", user_id
)
if not last_inputs_str:
return []
last_inputs = []
for i in last_inputs_str.split(','):
id, cid = i.split(':')
last_inputs.from_values(id, cid)
return last_inputs

View File

@ -0,0 +1,406 @@
import logging
from typing import Self, Awaitable
from datetime import datetime, timezone
import discord
from discord import (
User as DCUser,
Member,
Message as DCMessage,
Attachment,
DMChannel
)
from discord.abc import Messageable
from discord.ext import commands
from skynet.config import FrontendConfig
from skynet.types import BodyV0Params
from skynet.constants import VERSION
from skynet.frontend.chatbot import BaseChatbot
from skynet.frontend.chatbot.db import FrontendUserDB
from skynet.frontend.chatbot.types import (
BaseUser,
BaseChatRoom,
BaseFileInput,
BaseCommands,
BaseMessage
)
GROUP_ID = -1
ADMIN_USER_ID = -1
def timestamp_pretty():
return datetime.now(timezone.utc).strftime('%H:%M:%S')
class DiscordUser(BaseUser):
def __init__(self, user: DCUser | Member):
self._user = user
@property
def id(self) -> int:
return self._user.id
@property
def name(self) -> str:
return self._user.name
@property
def is_admin(self) -> bool:
return self.id == ADMIN_USER_ID
class DiscordChatRoom(BaseChatRoom):
def __init__(self, channel: Messageable):
self._channel = channel
@property
def id(self) -> int:
return self._channel.id
@property
def is_private(self) -> bool:
return isinstance(self._channel, DMChannel)
class DiscordFileInput(BaseFileInput):
def __init__(
self,
attachment: Attachment | None = None,
id: int | None = None,
cid: int | None = None
):
self._attachment = attachment
self._id = id
self._cid = cid
self._raw = None
def from_values(id: int, cid: str) -> Self:
return DiscordFileInput(id=id, cid=cid)
@property
def id(self) -> int:
if self._id:
return self._id
return self._attachment.id
@property
def cid(self) -> str:
if self._cid:
return self._cid
raise ValueError
def set_cid(self, cid: str):
self._cid = cid
async def download(self) -> bytes:
self._raw = await self._attachment.read()
return self._raw
class DiscordMessage(BaseMessage):
def __init__(self, cmd: BaseCommands | None, msg: DCMessage):
self._msg = msg
self._cmd = cmd
self._chat = DiscordChatRoom(msg.channel)
self._inputs: list[DiscordFileInput] | None = None
self._author = None
@property
def id(self) -> int:
return self._msg.id
@property
def chat(self) -> DiscordChatRoom:
return self._chat
@property
def text(self) -> str:
# remove command name, slash and first space
return self._msg.contents[len(self._cmd) + 2:]
@property
def author(self) -> DiscordUser:
if self._author:
return self._author
return DiscordUser(self._msg.author)
@property
def command(self) -> str | None:
return self._cmd
@property
def inputs(self) -> list[DiscordFileInput]:
if self._inputs is None:
self._inputs = []
if self._msg.attachments:
self._inputs = [
DiscordFileInput(attachment=a)
for a in self._msg.attachments
]
return self._inputs
def generate_reply_embed(
config: FrontendConfig,
user: DiscordUser,
params: BodyV0Params,
tx_hash: str,
worker: str,
) -> discord.Embed:
embed = discord.Embed(
title='[SKYNET Transaction Explorer]',
url=f'https://{config.explorer_domain}/v2/explore/transaction/{tx_hash}',
color=discord.Color.blue())
prompt = params.prompt
if len(prompt) > 256:
prompt = prompt[:256]
gen_str = f'generated by {user.name}\n'
gen_str += f'performed by {worker}\n'
gen_str += f'reward: {config.reward}\n'
embed.add_field(
name='General Info', value=f'```{gen_str}```', inline=False)
# meta_str = f'__by {user.name}__\n'
# meta_str += f'*performed by {worker}*\n'
# meta_str += f'__**reward: {reward}**__\n'
embed.add_field(name='Prompt', value=f'```{prompt}\n```', inline=False)
# meta_str = f'`prompt:` {prompt}\n'
meta_str = f'seed: {params.seed}\n'
meta_str += f'step: {params.step}\n'
if params.guidance:
meta_str += f'guidance: {params.guidance}\n'
if params.strength:
meta_str += f'strength: {params.strength}\n'
meta_str += f'algo: {params.model}\n'
if params.upscaler:
meta_str += f'upscaler: {params.upscaler}\n'
embed.add_field(name='Parameters', value=f'```{meta_str}```', inline=False)
foot_str = f'Made with Skynet v{VERSION}\n'
foot_str += 'JOIN THE SWARM: https://discord.gg/PAabjJtZAF'
embed.set_footer(text=foot_str)
return embed
def append_command_handler(client: discord.Client, command: str, help_txt: str, fn: Awaitable):
@client.command(name=command, help=help_txt)
async def wrap_msg_and_handle(ctx: commands.Context):
msg = DiscordMessage(cmd=command, msg=ctx.message)
for file in msg.inputs:
await file.download()
await fn(msg)
class DiscordChatbot(BaseChatbot):
def __init__(
self,
config: FrontendConfig,
db: FrontendUserDB,
):
super().__init__(config, db)
intents = discord.Intents(
messages=True,
guilds=True,
typing=True,
members=True,
presences=True,
reactions=True,
message_content=True,
voice_states=True
)
client = discord.Client(
command_prefix='/',
intents=intents
)
@client.event
async def on_ready():
print(f'{client.user.name} has connected to Discord!')
for guild in client.guilds:
for channel in guild.channels:
if channel.name == "skynet":
await channel.send('Skynet bot online') # view=SkynetView(self.bot))
await channel.send(
'Welcome to Skynet\'s Discord Bot,\n\n'
'Skynet operates as a decentralized compute layer, offering a wide array of '
'support for diverse AI paradigms through the use of blockchain technology. '
'Our present focus is image generation, powered by 11 distinct models.\n\n'
'To begin exploring, use the \'/help\' command or directly interact with the'
'provided buttons. Here is an example command to generate an image:\n\n'
'\'/txt2img a big red tractor in a giant field of corn\''
)
print("\n==============")
print("Logged in as")
print(client.user.name)
print(client.user.id)
print("==============")
@client.event
async def on_message(message: DCMessage):
if message.author == client.user:
return
await self.process_commands(message)
@client.event
async def on_command_error(ctx, error):
if isinstance(error, commands.MissingRequiredArgument):
await ctx.send('You missed a required argument, please try again.')
append_command_handler(client, BaseCommands.HELP, 'Responds with help text', self.send_help)
append_command_handler(client, BaseCommands.COOL, 'Display a list of cool prompt words', self.send_cool_words)
append_command_handler(client, BaseCommands.QUEUE, 'Get information on current skynet queue', self.get_queue)
append_command_handler(client, BaseCommands.CONFIG, 'Allows user to configure inference params', self.set_config)
append_command_handler(client, BaseCommands.STATS, 'See user statistics', self.user_stats)
append_command_handler(client, BaseCommands.DONATE, 'See donation information', self.donation_info)
append_command_handler(client, BaseCommands.SAY, 'Admin command to make bot speak', self.say)
append_command_handler(client, BaseCommands.TXT2IMG, 'Generate an image from a prompt', self.handle_request)
append_command_handler(client, BaseCommands.REDO, 'Re-generate image using last prompt', self.handle_request)
self.client = client
self._main_room: DiscordChatRoom | None = None
async def init(self):
dc_channel = await self.client.get_channel(GROUP_ID)
self._main_room = DiscordChatRoom(channel=dc_channel)
logging.info('initialized')
async def run(self):
await self.init()
await self.client.run(self.config.token)
@property
def main_group(self) -> DiscordChatRoom:
return self._main_room
async def new_msg(self, chat: DiscordChatRoom, text: str, **kwargs) -> DiscordMessage:
dc_msg = await chat._channel.send(text, **kwargs)
return DiscordMessage(cmd=None, msg=dc_msg)
async def reply_to(self, msg: DiscordMessage, text: str, **kwargs) -> DiscordMessage:
dc_msg = await msg._msg.reply(content=text, **kwargs)
return DiscordMessage(cmd=None, msg=dc_msg)
async def edit_msg(self, msg: DiscordMessage, text: str, **kwargs):
await msg._msg.edit(content=text, **kwargs)
async def create_status_msg(self, msg: DiscordMessage, init_text: str, force_user: DiscordUser | None = None) -> tuple[BaseUser, BaseMessage, dict]:
# maybe init user
user = msg.author
if force_user:
user = force_user
user_row = await self.db.get_or_create_user(user.id)
# create status msg
embed = discord.Embed(
title='live updates',
description=init_text,
color=discord.Color.blue()
)
status_msg = await self.new_msg(msg.chat, None, embed=embed)
# start tracking of request in db
await self.db.new_user_request(user.id, msg.id, status_msg.id, status=init_text)
return [user, status_msg, user_row]
async def update_status_msg(self, msg: DiscordMessage, text: str):
await self.db.update_user_request_by_sid(msg.id, text)
embed = discord.Embed(
title='live updates',
description=text,
color=discord.Color.blue()
)
await self.edit_msg(msg, None, embed=embed)
async def append_status_msg(self, msg: DiscordMessage, text: str):
request = await self.db.get_user_request_by_sid(msg.id)
await self.update_status_msg(msg, request['status'] + text)
async def update_request_status_timeout(self, status_msg: DiscordMessage):
await self.append_status_msg(
status_msg,
f'\n[{timestamp_pretty()}] **timeout processing request**',
)
async def update_request_status_step_0(self, status_msg: DiscordMessage, user_msg: DiscordMessage):
await self.update_status_msg(
status_msg,
f'processing a \'{status_msg.cmd}\' request by {status_msg.author.name}\n'
f'[{timestamp_pretty()}] *broadcasting transaction to chain...* '
)
async def update_request_status_step_1(self, status_msg: DiscordMessage, tx_result: dict):
enqueue_tx_id = tx_result['transaction_id']
enqueue_tx_link = f'[**Your request on Skynet Explorer**](https://{self.config.explorer_domain}/v2/explore/transaction/{enqueue_tx_id})'
await self.append_status_msg(
status_msg,
'**broadcasted!** \n'
f'{enqueue_tx_link}\n'
f'[{timestamp_pretty()}] *workers are processing request...* '
)
async def update_request_status_step_2(self, status_msg: DiscordMessage, submit_tx_hash: str):
tx_link = f'[**Your result on Skynet Explorer**](https://{self.config.explorer_domain}/v2/explore/transaction/{submit_tx_hash})'
await self.append_status_msg(
status_msg,
'**request processed!**\n'
f'{tx_link}\n'
f'[{timestamp_pretty()}] *trying to download image...*\n '
)
async def update_request_status_final(
self,
og_msg: DiscordMessage,
status_msg: DiscordMessage,
user: DiscordUser,
params: BodyV0Params,
inputs: list[DiscordFileInput],
submit_tx_hash: str,
worker: str,
result_url: str,
result_img: bytes | None
):
embed = generate_reply_embed(
self.config, user, params, submit_tx_hash, worker)
if not result_img:
# result found on chain but failed to fetch img from ipfs
await self.append_status_msg(status_msg, f'[{timestamp_pretty()}] *Couldn\'t get IPFS hosted img [**here**]({result_url})!*')
return
await status_msg._msg.delete()
embed.set_image(url=result_url)
match len(inputs):
case 0:
await self.new_msg(og_msg.chat, None, embed=embed)
case _:
_input = inputs[-1]
dc_file = discord.File(_input._raw, filename=f'image-{og_msg.id}.png')
embed.set_thumbnail(url=f'attachment://image-{og_msg.id}.png')
await self.new_msg(og_msg.chat, None, embed=embed, file=dc_file)

View File

@ -0,0 +1,425 @@
import json
import logging
import traceback
from typing import Self, Awaitable
from datetime import datetime, timezone
from telebot.types import (
User as TGUser,
Chat as TGChat,
PhotoSize as TGPhotoSize,
Message as TGMessage,
CallbackQuery,
InputMediaPhoto,
InlineKeyboardButton,
InlineKeyboardMarkup
)
from telebot.async_telebot import AsyncTeleBot, ExceptionHandler
from telebot.formatting import hlink
from skynet.types import BodyV0Params
from skynet.config import FrontendConfig
from skynet.constants import VERSION
from skynet.frontend.chatbot import BaseChatbot
from skynet.frontend.chatbot.db import FrontendUserDB
from skynet.frontend.chatbot.types import (
BaseUser,
BaseChatRoom,
BaseFileInput,
BaseCommands,
BaseMessage
)
GROUP_ID = -1001541979235
TEST_GROUP_ID = -4099622703
ADMIN_USER_ID = 383385940
# Chatbot types impls
class TelegramUser(BaseUser):
def __init__(self, user: TGUser):
self._user = user
@property
def id(self) -> int:
return self._user.id
@property
def name(self) -> str:
if self._user.username:
return f'@{self._user.username}'
return f'{self._user.first_name} id: {self.id}'
@property
def is_admin(self) -> bool:
return self.id == ADMIN_USER_ID
class TelegramChatRoom(BaseChatRoom):
def __init__(self, chat: TGChat):
self._chat = chat
@property
def id(self) -> int:
return self._chat.id
@property
def is_private(self) -> bool:
return self._chat.type == 'private'
class TelegramFileInput(BaseFileInput):
def __init__(
self,
photo: TGPhotoSize | None = None,
id: int | None = None,
cid: str | None = None
):
self._photo = photo
self._id = id
self._cid = cid
self._raw = None
def from_values(id: int, cid: str) -> Self:
return TelegramFileInput(id=id, cid=cid)
@property
def id(self) -> int:
if self._id:
return self._id
return self._photo.file_id
@property
def cid(self) -> str:
if self._cid:
return self._cid
raise ValueError
def set_cid(self, cid: str):
self._cid = cid
async def download(self, bot: AsyncTeleBot) -> bytes:
file_path = (await bot.get_file(self.id)).file_path
self._raw = await bot.download_file(file_path)
return self._raw
class TelegramMessage(BaseMessage):
def __init__(self, cmd: BaseCommands | None, msg: TGMessage):
self._msg = msg
self._cmd = cmd
self._chat = TelegramChatRoom(msg.chat)
self._inputs: list[TelegramFileInput] | None = None
self._author = None
@property
def id(self) -> int:
return self._msg.message_id
@property
def chat(self) -> TelegramChatRoom:
return self._chat
@property
def text(self) -> str:
# remove command name, slash and first space
if self._msg.text:
return self._msg.text[len(self._cmd) + 2:]
return self._msg.caption[len(self._cmd) + 2:]
@property
def author(self) -> TelegramUser:
if self._author:
return self._author
return TelegramUser(self._msg.from_user)
@property
def command(self) -> str | None:
return self._cmd
@property
def inputs(self) -> list[TelegramFileInput]:
if self._inputs is None:
self._inputs = []
if self._msg.photo:
self._inputs = [
TelegramFileInput(photo=p)
for p in self._msg.photo
]
return self._inputs
# generic tg utils
def timestamp_pretty():
return datetime.now(timezone.utc).strftime('%H:%M:%S')
class TGExceptionHandler(ExceptionHandler):
def handle(exception):
traceback.print_exc()
def build_redo_menu():
btn_redo = InlineKeyboardButton("Redo", callback_data='{\"method\": \"redo\"}')
inline_keyboard = InlineKeyboardMarkup()
inline_keyboard.add(btn_redo)
return inline_keyboard
def prepare_metainfo_caption(user: TelegramUser, worker: str, reward: str, params: BodyV0Params) -> str:
prompt = params.prompt
if len(prompt) > 256:
prompt = prompt[:256]
meta_str = f'<u>by {user.name}</u>\n'
meta_str += f'<i>performed by {worker}</i>\n'
meta_str += f'<b><u>reward: {reward}</u></b>\n'
meta_str += f'<code>prompt:</code> {prompt}\n'
meta_str += f'<code>seed: {params.seed}</code>\n'
meta_str += f'<code>step: {params.step}</code>\n'
if params.guidance:
meta_str += f'<code>guidance: {params.guidance}</code>\n'
if params.strength:
meta_str += f'<code>strength: {params.strength}</code>\n'
meta_str += f'<code>algo: {params.model}</code>\n'
meta_str += f'<b><u>Made with Skynet v{VERSION}</u></b>\n'
meta_str += '<b>JOIN THE SWARM: @skynetgpu</b>'
return meta_str
def generate_reply_caption(
config: FrontendConfig,
user: TelegramUser,
params: BodyV0Params,
tx_hash: str,
worker: str,
):
explorer_link = hlink(
'SKYNET Transaction Explorer',
f'https://{config.explorer_domain}/v2/explore/transaction/{tx_hash}'
)
meta_info = prepare_metainfo_caption(user, worker, config.reward, params)
final_msg = '\n'.join([
'Worker finished your task!',
explorer_link,
f'PARAMETER INFO:\n{meta_info}'
])
final_msg = '\n'.join([
f'<b><i>{explorer_link}</i></b>',
f'{meta_info}'
])
return final_msg
def append_handler(bot: AsyncTeleBot, command: str, fn: Awaitable):
@bot.message_handler(commands=[command])
async def wrap_msg_and_handle(tg_msg: TGMessage):
await fn(TelegramMessage(cmd=command, msg=tg_msg))
class TelegramChatbot(BaseChatbot):
def __init__(
self,
config: FrontendConfig,
db: FrontendUserDB,
):
super().__init__(config, db)
bot = AsyncTeleBot(config.token, exception_handler=TGExceptionHandler)
append_handler(bot, BaseCommands.HELP, self.send_help)
append_handler(bot, BaseCommands.COOL, self.send_cool_words)
append_handler(bot, BaseCommands.QUEUE, self.get_queue)
append_handler(bot, BaseCommands.CONFIG, self.set_config)
append_handler(bot, BaseCommands.STATS, self.user_stats)
append_handler(bot, BaseCommands.DONATE, self.donation_info)
append_handler(bot, BaseCommands.SAY, self.say)
append_handler(bot, BaseCommands.TXT2IMG, self.handle_request)
append_handler(bot, BaseCommands.IMG2IMG, self.handle_request)
@bot.message_handler(func=lambda _: True, content_types=['photo', 'document'])
async def handle_img2img(tg_msg: TGMessage):
msg = TelegramMessage(cmd='img2img', msg=tg_msg)
for file in msg.inputs:
await file.download(bot)
await self.handle_request(msg)
append_handler(bot, BaseCommands.REDO, self.handle_request)
@bot.message_handler(func=lambda _: True)
async def unknown_cmd(tg_msg: TGMessage):
if tg_msg.text[0] == '/':
msg = TelegramMessage(cmd='unknown', msg=tg_msg)
await self.echo_unknown(msg)
@bot.callback_query_handler(func=lambda _: True)
async def callback_query(call: CallbackQuery):
call_json = json.loads(call.data)
method = call_json.get('method')
match method:
case 'redo':
msg = await self.new_msg(self.main_group, 'processing a redo request...')
msg._cmd = 'redo'
await self.handle_request(msg, force_user=TelegramUser(user=call.from_user))
await bot.delete_message(chat_id=self.main_group.id, message_id=msg.id)
self.bot = bot
self._main_room: TelegramChatRoom | None = None
async def init(self):
tg_group = await self.bot.get_chat(TEST_GROUP_ID)
self._main_room = TelegramChatRoom(chat=tg_group)
logging.info('initialized')
async def run(self):
await self.init()
await self.bot.infinity_polling()
@property
def main_group(self) -> TelegramChatRoom:
return self._main_room
async def new_msg(self, chat: TelegramChatRoom, text: str) -> TelegramMessage:
msg = await self.bot.send_message(chat.id, text, parse_mode='HTML')
return TelegramMessage(cmd=None, msg=msg)
async def reply_to(self, msg: TelegramMessage, text: str) -> TelegramMessage:
msg = await self.bot.reply_to(msg._msg, text, parse_mode='HTML')
return TelegramMessage(cmd=None, msg=msg)
async def edit_msg(self, msg: TelegramMessage, text: str):
await self.bot.edit_message_text(
text,
chat_id=msg.chat.id,
message_id=msg.id,
parse_mode='HTML'
)
async def update_request_status_timeout(self, status_msg: TelegramMessage):
'''
Notify users when we timedout trying to find a matching submit
'''
await self.append_status_msg(
status_msg,
f'\n[{timestamp_pretty()}] <b>timeout processing request</b>',
)
async def update_request_status_step_0(self, status_msg: TelegramMessage, user_msg: TelegramMessage):
'''
First step in request status message lifecycle, should notify which user sent the request
and that we are about to broadcast the request to chain
'''
await self.update_status_msg(
status_msg,
f'processing a \'{user_msg.command}\' request by {user_msg.author.name}\n'
f'[{timestamp_pretty()}] <i>broadcasting transaction to chain...</i>'
)
async def update_request_status_step_1(self, status_msg: TelegramMessage, tx_result: dict):
'''
Second step in request status message lifecycle, should notify enqueue transaction
was processed by chain, and provide a link to the tx in the chain explorer
'''
enqueue_tx_id = tx_result['transaction_id']
enqueue_tx_link = hlink(
'Your request on Skynet Explorer',
f'https://{self.config.explorer_domain}/v2/explore/transaction/{enqueue_tx_id}'
)
await self.append_status_msg(
status_msg,
f' <b>broadcasted!</b>\n'
f'<b>{enqueue_tx_link}</b>\n'
f'[{timestamp_pretty()}] <i>workers are processing request...</i>',
)
async def update_request_status_step_2(self, status_msg: TelegramMessage, submit_tx_hash: str):
'''
Third step in request status message lifecycle, should notify matching submit transaction
was found, and provide a link to the tx in the chain explorer
'''
tx_link = hlink(
'Your result on Skynet Explorer',
f'https://{self.config.explorer_domain}/v2/explore/transaction/{submit_tx_hash}'
)
await self.append_status_msg(
status_msg,
f' <b>request processed!</b>\n'
f'<b>{tx_link}</b>\n'
f'[{timestamp_pretty()}] <i>trying to download image...</i>\n',
)
async def update_request_status_final(
self,
og_msg: TelegramMessage,
status_msg: TelegramMessage,
user: TelegramUser,
params: BodyV0Params,
inputs: list[TelegramFileInput],
submit_tx_hash: str,
worker: str,
result_url: str,
result_img: bytes | None
):
'''
Last step in request status message lifecycle, should delete status message and send a
new message replying to the original user's message, generate the appropiate
reply caption and if provided also sent the found result img
'''
caption = generate_reply_caption(
self.config, user, params, submit_tx_hash, worker)
await self.bot.delete_message(
chat_id=status_msg.chat.id,
message_id=status_msg.id
)
if not result_img:
# result found on chain but failed to fetch img from ipfs
await self.reply_to(og_msg, caption, reply_markup=build_redo_menu())
return
match len(inputs):
case 0:
await self.bot.send_photo(
status_msg.chat.id,
caption=caption,
photo=result_img,
reply_markup=build_redo_menu(),
parse_mode='HTML'
)
case _:
_input = inputs[-1]
await self.bot.send_media_group(
status_msg.chat.id,
media=[
InputMediaPhoto(_input.id),
InputMediaPhoto(result_img, caption=caption, parse_mode='HTML')
]
)

View File

@ -0,0 +1,116 @@
import io
from abc import ABC, abstractproperty, abstractmethod
from enum import StrEnum
from typing import Self
from pathlib import Path
from PIL import Image
from skynet.ipfs import AsyncIPFSHTTP
class BaseUser(ABC):
@abstractproperty
def id(self) -> int:
...
@abstractproperty
def name(self) -> str:
...
@abstractproperty
def is_admin(self) -> bool:
...
class BaseChatRoom(ABC):
@abstractproperty
def id(self) -> int:
...
@abstractproperty
def is_private(self) -> bool:
...
class BaseFileInput(ABC):
@staticmethod
@abstractmethod
def from_values(id: int, cid: str) -> Self:
...
@abstractproperty
def id(self) -> int:
...
@abstractproperty
def cid(self) -> str:
...
@abstractmethod
async def download(self, *args) -> bytes:
...
@abstractmethod
def set_cid(self, cid: str):
...
async def publish(self, ipfs_api: AsyncIPFSHTTP, user_row: dict):
with Image.open(io.BytesIO(self._raw)) as img:
w, h = img.size
if (
w > user_row['width']
or
h > user_row['height']
):
img.thumbnail((user_row['width'], user_row['height']))
img_path = Path('/tmp/ipfs-staging/img.png')
img.save(img_path, format='PNG')
ipfs_info = await ipfs_api.add(img_path)
ipfs_hash = ipfs_info['Hash']
self.set_cid(ipfs_hash)
await ipfs_api.pin(ipfs_hash)
class BaseCommands(StrEnum):
TXT2IMG = 'txt2img'
IMG2IMG = 'img2img'
REDO = 'redo'
HELP = 'help'
COOL = 'cool'
QUEUE = 'queue'
CONFIG = 'config'
STATS = 'stats'
DONATE = 'donate'
SAY = 'say'
class BaseMessage(ABC):
@abstractproperty
def id(self) -> int:
...
@abstractproperty
def chat(self) -> BaseChatRoom:
...
@abstractproperty
def text(self) -> str:
...
@abstractproperty
def author(self) -> BaseUser:
...
@abstractproperty
def command(self) -> str | None:
...
@abstractproperty
def inputs(self) -> list[BaseFileInput]:
...

View File

@ -1,322 +0,0 @@
from json import JSONDecodeError
import random
import logging
import asyncio
from decimal import Decimal
from hashlib import sha256
from datetime import datetime
from contextlib import (
ExitStack,
AsyncExitStack,
)
from contextlib import asynccontextmanager as acm
from leap.cleos import CLEOS
from leap.sugar import (
Name,
asset_from_str,
collect_stdout,
)
from leap.hyperion import HyperionAPI
# from telebot.types import InputMediaPhoto
import discord
import requests
import io
from PIL import Image, UnidentifiedImageError
from skynet.db import open_database_connection
from skynet.ipfs import get_ipfs_file, AsyncIPFSHTTP
from skynet.constants import *
from . import *
from .bot import DiscordBot
from .utils import *
from .handlers import create_handler_context
from .ui import SkynetView
class SkynetDiscordFrontend:
def __init__(
self,
# token: str,
account: str,
permission: str,
node_url: str,
hyperion_url: str,
db_host: str,
db_user: str,
db_pass: str,
ipfs_url: str,
remote_ipfs_node: str,
key: str,
explorer_domain: str,
ipfs_domain: str
):
# self.token = token
self.account = account
self.permission = permission
self.node_url = node_url
self.hyperion_url = hyperion_url
self.db_host = db_host
self.db_user = db_user
self.db_pass = db_pass
self.ipfs_url = ipfs_url
self.remote_ipfs_node = remote_ipfs_node
self.key = key
self.explorer_domain = explorer_domain
self.ipfs_domain = ipfs_domain
self.bot = DiscordBot(self)
self.cleos = CLEOS(None, None, url=node_url, remote=node_url)
self.hyperion = HyperionAPI(hyperion_url)
self.ipfs_node = AsyncIPFSHTTP(ipfs_url)
self._exit_stack = ExitStack()
self._async_exit_stack = AsyncExitStack()
async def start(self):
if self.remote_ipfs_node:
await self.ipfs_node.connect(self.remote_ipfs_node)
self.db_call = await self._async_exit_stack.enter_async_context(
open_database_connection(
self.db_user, self.db_pass, self.db_host))
create_handler_context(self)
async def stop(self):
await self._async_exit_stack.aclose()
self._exit_stack.close()
@acm
async def open(self):
await self.start()
yield self
await self.stop()
# maybe do this?
# async def update_status_message(
# self, status_msg, new_text: str, **kwargs
# ):
# await self.db_call(
# 'update_user_request_by_sid', status_msg.id, new_text)
# return await self.bot.edit_message_text(
# new_text,
# chat_id=status_msg.chat.id,
# message_id=status_msg.id,
# **kwargs
# )
# async def append_status_message(
# self, status_msg, add_text: str, **kwargs
# ):
# request = await self.db_call('get_user_request_by_sid', status_msg.id)
# await self.update_status_message(
# status_msg,
# request['status'] + add_text,
# **kwargs
# )
async def work_request(
self,
user,
status_msg,
method: str,
params: dict,
ctx: discord.ext.commands.context.Context | discord.Message,
file_id: str | None = None,
binary_data: str = ''
) -> bool:
send = ctx.channel.send
if params['seed'] == None:
params['seed'] = random.randint(0, 0xFFFFFFFF)
sanitized_params = {}
for key, val in params.items():
if isinstance(val, Decimal):
val = str(val)
sanitized_params[key] = val
body = json.dumps({
'method': 'diffuse',
'params': sanitized_params
})
request_time = datetime.now().isoformat()
await status_msg.delete()
msg_text = f'processing a \'{method}\' request by {user.name}\n[{timestamp_pretty()}] *broadcasting transaction to chain...* '
embed = discord.Embed(
title='live updates',
description=msg_text,
color=discord.Color.blue())
message = await send(embed=embed)
reward = '20.0000 GPU'
res = await self.cleos.a_push_action(
'gpu.scd',
'enqueue',
{
'user': Name(self.account),
'request_body': body,
'binary_data': binary_data,
'reward': asset_from_str(reward),
'min_verification': 1
},
self.account, self.key, permission=self.permission
)
if 'code' in res or 'statusCode' in res:
logging.error(json.dumps(res, indent=4))
await self.bot.channel.send(
status_msg,
'skynet has suffered an internal error trying to fill this request')
return False
enqueue_tx_id = res['transaction_id']
enqueue_tx_link = f'[**Your request on Skynet Explorer**](https://{self.explorer_domain}/v2/explore/transaction/{enqueue_tx_id})'
msg_text += f'**broadcasted!** \n{enqueue_tx_link}\n[{timestamp_pretty()}] *workers are processing request...* '
embed = discord.Embed(
title='live updates',
description=msg_text,
color=discord.Color.blue())
await message.edit(embed=embed)
out = collect_stdout(res)
request_id, nonce = out.split(':')
request_hash = sha256(
(nonce + body + binary_data).encode('utf-8')).hexdigest().upper()
request_id = int(request_id)
logging.info(f'{request_id} enqueued.')
tx_hash = None
ipfs_hash = None
for i in range(60):
try:
submits = await self.hyperion.aget_actions(
account=self.account,
filter='gpu.scd:submit',
sort='desc',
after=request_time
)
actions = [
action
for action in submits['actions']
if action[
'act']['data']['request_hash'] == request_hash
]
if len(actions) > 0:
tx_hash = actions[0]['trx_id']
data = actions[0]['act']['data']
ipfs_hash = data['ipfs_hash']
worker = data['worker']
logging.info('Found matching submit!')
break
except JSONDecodeError:
logging.error(f'network error while getting actions, retry..')
await asyncio.sleep(1)
if not ipfs_hash:
timeout_text = f'\n[{timestamp_pretty()}] **timeout processing request**'
embed = discord.Embed(
title='live updates',
description=timeout_text,
color=discord.Color.blue())
await message.edit(embed=embed)
return False
tx_link = f'[**Your result on Skynet Explorer**](https://{self.explorer_domain}/v2/explore/transaction/{tx_hash})'
msg_text += f'**request processed!**\n{tx_link}\n[{timestamp_pretty()}] *trying to download image...*\n '
embed = discord.Embed(
title='live updates',
description=msg_text,
color=discord.Color.blue())
await message.edit(embed=embed)
# attempt to get the image and send it
results = {}
ipfs_link = f'https://{self.ipfs_domain}/ipfs/{ipfs_hash}'
ipfs_link_legacy = ipfs_link + '/image.png'
async def get_and_set_results(link: str):
res = await get_ipfs_file(link)
logging.info(f'got response from {link}')
if not res or res.status_code != 200:
logging.warning(f'couldn\'t get ipfs binary data at {link}!')
else:
try:
with Image.open(io.BytesIO(res.raw)) as image:
tmp_buf = io.BytesIO()
image.save(tmp_buf, format='PNG')
png_img = tmp_buf.getvalue()
results[link] = png_img
except UnidentifiedImageError:
logging.warning(
f'couldn\'t get ipfs binary data at {link}!')
tasks = [
get_and_set_results(ipfs_link),
get_and_set_results(ipfs_link_legacy)
]
await asyncio.gather(*tasks)
png_img = None
if ipfs_link_legacy in results:
png_img = results[ipfs_link_legacy]
if ipfs_link in results:
png_img = results[ipfs_link]
if not png_img:
logging.error(f'couldn\'t get ipfs hosted image at {ipfs_link}!')
embed.add_field(
name='Error', value=f'couldn\'t get ipfs hosted image [**here**]({ipfs_link})!')
await message.edit(embed=embed, view=SkynetView(self))
return True
# reword this function, may not need caption
caption, embed = generate_reply_caption(
user, params, tx_hash, worker, reward, self.explorer_domain)
logging.info(f'success! sending generated image')
await message.delete()
if file_id: # img2img
embed.set_image(url=ipfs_link)
orig_url = f'https://{self.ipfs_domain}/ipfs/' + binary_data
res = requests.get(orig_url, stream=True)
if res.status_code == 200:
with io.BytesIO(res.content) as img:
file = discord.File(img, filename='image.png')
embed.set_thumbnail(url='attachment://image.png')
await send(embed=embed, view=SkynetView(self), file=file)
# orig_url = f'https://{self.ipfs_domain}/ipfs/' \
# + binary_data + '/image.png'
# embed.set_thumbnail(
# url=orig_url)
else:
await send(embed=embed, view=SkynetView(self))
else: # txt2img
embed.set_image(url=ipfs_link)
await send(embed=embed, view=SkynetView(self))
return True

View File

@ -1,89 +0,0 @@
# import os
import discord
import asyncio
# from dotenv import load_dotenv
# from pathlib import Path
from discord.ext import commands
from .ui import SkynetView
# # Auth
# current_dir = Path(__file__).resolve().parent
# # parent_dir = current_dir.parent
# env_file_path = current_dir / ".env"
# load_dotenv(dotenv_path=env_file_path)
#
# discordToken = os.getenv("DISCORD_TOKEN")
# Actual Discord bot.
class DiscordBot(commands.Bot):
def __init__(self, bot, *args, **kwargs):
self.bot = bot
intents = discord.Intents(
messages=True,
guilds=True,
typing=True,
members=True,
presences=True,
reactions=True,
message_content=True,
voice_states=True
)
super().__init__(command_prefix='/', intents=intents, *args, **kwargs)
# async def setup_hook(self):
# db.poll_db.start()
async def on_ready(self):
print(f'{self.user.name} has connected to Discord!')
for guild in self.guilds:
for channel in guild.channels:
if channel.name == "skynet":
await channel.send('Skynet bot online', view=SkynetView(self.bot))
# intro_msg = await channel.send('Welcome to the Skynet discord bot.\nSkynet is a decentralized compute layer, focused on supporting AI paradigms. Skynet leverages blockchain technology to manage work requests and fills. We are currently featuring image generation and support 11 different models. Get started with the /help command, or just click on some buttons. Here is an example command to generate an image:\n/txt2img a big red tractor in a giant field of corn')
intro_msg = await channel.send("Welcome to Skynet's Discord Bot,\n\nSkynet operates as a decentralized compute layer, offering a wide array of support for diverse AI paradigms through the use of blockchain technology. Our present focus is image generation, powered by 11 distinct models.\n\nTo begin exploring, use the '/help' command or directly interact with the provided buttons. Here is an example command to generate an image:\n\n'/txt2img a big red tractor in a giant field of corn'")
# await intro_msg.pin()
print("\n==============")
print("Logged in as")
print(self.user.name)
print(self.user.id)
print("==============")
async def on_message(self, message):
if isinstance(message.channel, discord.DMChannel):
return
elif message.channel.name != 'skynet':
return
elif message.author == self.user:
return
await self.process_commands(message)
# await asyncio.sleep(3)
# await message.channel.send('', view=SkynetView(self.bot))
async def on_command_error(self, ctx, error):
if isinstance(error, commands.MissingRequiredArgument):
await ctx.send('You missed a required argument, please try again.')
# async def on_message(self, message):
# print(f"message from {message.author} what he said {message.content}")
# await message.channel.send(message.content)
# bot=DiscordBot()
# @bot.command(name='config', help='Responds with the configuration')
# async def config(ctx):
# response = "This is the bot configuration" # Put your bot configuration here
# await ctx.send(response)
#
# @bot.command(name='helper', help='Responds with a help')
# async def helper(ctx):
# response = "This is help information" # Put your help response here
# await ctx.send(response)
#
# @bot.command(name='txt2img', help='Responds with an image')
# async def txt2img(ctx, *, arg):
# response = f"This is your prompt: {arg}"
# await ctx.send(response)
# bot.run(discordToken)

View File

@ -1,601 +0,0 @@
import io
import json
import logging
from datetime import datetime, timedelta
from PIL import Image
# from telebot.types import CallbackQuery, Message
from skynet.frontend import validate_user_config_request
from skynet.constants import *
from .ui import SkynetView
def create_handler_context(frontend: 'SkynetDiscordFrontend'):
bot = frontend.bot
cleos = frontend.cleos
db_call = frontend.db_call
work_request = frontend.work_request
ipfs_node = frontend.ipfs_node
@bot.command(name='config', help='Responds with the configuration')
async def set_config(ctx):
user = ctx.author
try:
attr, val, reply_txt = validate_user_config_request(
ctx.message.content)
logging.info(f'user config update: {attr} to {val}')
await db_call('update_user_config', user.id, attr, val)
logging.info('done')
except BaseException as e:
reply_txt = str(e)
finally:
await ctx.reply(content=reply_txt, view=SkynetView(frontend))
bot.remove_command('help')
@bot.command(name='help', help='Responds with a help')
async def help(ctx):
splt_msg = ctx.message.content.split(' ')
if len(splt_msg) == 1:
await ctx.send(content=f'```{HELP_TEXT}```', view=SkynetView(frontend))
else:
param = splt_msg[1]
if param in HELP_TOPICS:
await ctx.send(content=f'```{HELP_TOPICS[param]}```', view=SkynetView(frontend))
else:
await ctx.send(content=f'```{HELP_UNKWNOWN_PARAM}```', view=SkynetView(frontend))
@bot.command(name='cool', help='Display a list of cool prompt words')
async def send_cool_words(ctx):
clean_cool_word = '\n'.join(CLEAN_COOL_WORDS)
await ctx.send(content=f'```{clean_cool_word}```', view=SkynetView(frontend))
@bot.command(name='stats', help='See user statistics')
async def user_stats(ctx):
user = ctx.author
await db_call('get_or_create_user', user.id)
generated, joined, role = await db_call('get_user_stats', user.id)
stats_str = f'```generated: {generated}\n'
stats_str += f'joined: {joined}\n'
stats_str += f'role: {role}\n```'
await ctx.reply(stats_str, view=SkynetView(frontend))
@bot.command(name='donate', help='See donate info')
async def donation_info(ctx):
await ctx.reply(
f'```\n{DONATION_INFO}```', view=SkynetView(frontend))
@bot.command(name='txt2img', help='Responds with an image')
async def send_txt2img(ctx):
# grab user from ctx
user = ctx.author
user_row = await db_call('get_or_create_user', user.id)
# init new msg
init_msg = 'started processing txt2img request...'
status_msg = await ctx.send(init_msg)
await db_call(
'new_user_request', user.id, ctx.message.id, status_msg.id, status=init_msg)
prompt = ' '.join(ctx.message.content.split(' ')[1:])
if len(prompt) == 0:
await status_msg.edit(content='Empty text prompt ignored.'
)
await db_call('update_user_request', status_msg.id, 'Empty text prompt ignored.')
return
logging.info(f'mid: {ctx.message.id}')
user_config = {**user_row}
del user_config['id']
params = {
'prompt': prompt,
**user_config
}
await db_call(
'update_user_stats', user.id, 'txt2img', last_prompt=prompt)
success = await work_request(user, status_msg, 'txt2img', params, ctx)
if success:
await db_call('increment_generated', user.id)
@bot.command(name='redo', help='Redo last request')
async def redo(ctx):
init_msg = 'started processing redo request...'
status_msg = await ctx.send(init_msg)
user = ctx.author
method = await db_call('get_last_method_of', user.id)
prompt = await db_call('get_last_prompt_of', user.id)
file_id = None
binary = ''
if method == 'img2img':
file_id = await db_call('get_last_file_of', user.id)
binary = await db_call('get_last_binary_of', user.id)
if not prompt:
await status_msg.edit(
content='no last prompt found, do a txt2img cmd first!',
view=SkynetView(frontend)
)
return
user_row = await db_call('get_or_create_user', user.id)
await db_call(
'new_user_request', user.id, ctx.message.id, status_msg.id, status=init_msg)
user_config = {**user_row}
del user_config['id']
params = {
'prompt': prompt,
**user_config
}
success = await work_request(
user, status_msg, 'redo', params, ctx,
file_id=file_id,
binary_data=binary
)
if success:
await db_call('increment_generated', user.id)
@bot.command(name='img2img', help='Responds with an image')
async def send_img2img(ctx):
# if isinstance(message_or_query, CallbackQuery):
# query = message_or_query
# message = query.message
# user = query.from_user
# chat = query.message.chat
#
# else:
# message = message_or_query
# user = message.from_user
# chat = message.chat
# reply_id = None
# if chat.type == 'group' and chat.id == GROUP_ID:
# reply_id = message.message_id
#
user = ctx.author
user_row = await db_call('get_or_create_user', user.id)
# init new msg
init_msg = 'started processing img2img request...'
status_msg = await ctx.send(init_msg)
await db_call(
'new_user_request', user.id, ctx.message.id, status_msg.id, status=init_msg)
if not ctx.message.content.startswith('/img2img'):
await ctx.reply(
'For image to image you need to add /img2img to the beggining of your caption'
)
return
prompt = ' '.join(ctx.message.content.split(' ')[1:])
if len(prompt) == 0:
await ctx.reply('Empty text prompt ignored.')
return
# file_id = message.photo[-1].file_id
# file_path = (await bot.get_file(file_id)).file_path
# image_raw = await bot.download_file(file_path)
#
file = ctx.message.attachments[-1]
file_id = str(file.id)
# file bytes
image_raw = await file.read()
user_config = {**user_row}
del user_config['id']
with Image.open(io.BytesIO(image_raw)) as image:
w, h = image.size
if w > user_config['width'] or h > user_config['height']:
logging.warning(f'user sent img of size {image.size}')
image.thumbnail(
(user_config['width'], user_config['height']))
logging.warning(f'resized it to {image.size}')
# if w > 512 or h > 512:
# logging.warning(f'user sent img of size {image.size}')
# image.thumbnail((512, 512))
# logging.warning(f'resized it to {image.size}')
# image.save(f'ipfs-docker-staging/image.png', format='PNG')
image_loc = 'ipfs-staging/image.png'
image.save(image_loc, format='PNG')
ipfs_info = await ipfs_node.add(image_loc)
ipfs_hash = ipfs_info['Hash']
await ipfs_node.pin(ipfs_hash)
logging.info(f'published input image {ipfs_hash} on ipfs')
logging.info(f'mid: {ctx.message.id}')
params = {
'prompt': prompt,
**user_config
}
await db_call(
'update_user_stats',
user.id,
'img2img',
last_prompt=prompt,
last_file=file_id,
last_binary=ipfs_hash
)
success = await work_request(
user, status_msg, 'img2img', params, ctx,
file_id=file_id,
binary_data=ipfs_hash
)
if success:
await db_call('increment_generated', user.id)
# TODO: DELETE BELOW
# user = 'testworker3'
# status_msg = 'status'
# params = {
# 'prompt': arg,
# 'seed': None,
# 'step': 35,
# 'guidance': 7.5,
# 'strength': 0.5,
# 'width': 512,
# 'height': 512,
# 'upscaler': None,
# 'model': 'prompthero/openjourney',
# }
#
# ec = await work_request(user, status_msg, 'txt2img', params, ctx)
# print(ec)
# if ec == 0:
# await db_call('increment_generated', user.id)
# response = f"This is your prompt: {arg}"
# await ctx.send(response)
# generic / simple handlers
# @bot.message_handler(commands=['help'])
# async def send_help(message):
# splt_msg = message.text.split(' ')
#
# if len(splt_msg) == 1:
# await bot.reply_to(message, HELP_TEXT)
#
# else:
# param = splt_msg[1]
# if param in HELP_TOPICS:
# await bot.reply_to(message, HELP_TOPICS[param])
#
# else:
# await bot.reply_to(message, HELP_UNKWNOWN_PARAM)
#
# @bot.message_handler(commands=['cool'])
# async def send_cool_words(message):
# await bot.reply_to(message, '\n'.join(COOL_WORDS))
#
# @bot.message_handler(commands=['queue'])
# async def queue(message):
# an_hour_ago = datetime.now() - timedelta(hours=1)
# queue = await cleos.aget_table(
# 'gpu.scd', 'gpu.scd', 'queue',
# index_position=2,
# key_type='i64',
# sort='desc',
# lower_bound=int(an_hour_ago.timestamp())
# )
# await bot.reply_to(
# message, f'Total requests on skynet queue: {len(queue)}')
# @bot.message_handler(commands=['config'])
# async def set_config(message):
# user = message.from_user.id
# try:
# attr, val, reply_txt = validate_user_config_request(
# message.text)
#
# logging.info(f'user config update: {attr} to {val}')
# await db_call('update_user_config', user, attr, val)
# logging.info('done')
#
# except BaseException as e:
# reply_txt = str(e)
#
# finally:
# await bot.reply_to(message, reply_txt)
#
# @bot.message_handler(commands=['stats'])
# async def user_stats(message):
# user = message.from_user.id
#
# await db_call('get_or_create_user', user)
# generated, joined, role = await db_call('get_user_stats', user)
#
# stats_str = f'generated: {generated}\n'
# stats_str += f'joined: {joined}\n'
# stats_str += f'role: {role}\n'
#
# await bot.reply_to(
# message, stats_str)
#
# @bot.message_handler(commands=['donate'])
# async def donation_info(message):
# await bot.reply_to(
# message, DONATION_INFO)
#
# @bot.message_handler(commands=['say'])
# async def say(message):
# chat = message.chat
# user = message.from_user
#
# if (chat.type == 'group') or (user.id != 383385940):
# return
#
# await bot.send_message(GROUP_ID, message.text[4:])
# generic txt2img handler
# async def _generic_txt2img(message_or_query):
# if isinstance(message_or_query, CallbackQuery):
# query = message_or_query
# message = query.message
# user = query.from_user
# chat = query.message.chat
#
# else:
# message = message_or_query
# user = message.from_user
# chat = message.chat
#
# reply_id = None
# if chat.type == 'group' and chat.id == GROUP_ID:
# reply_id = message.message_id
#
# user_row = await db_call('get_or_create_user', user.id)
#
# # init new msg
# init_msg = 'started processing txt2img request...'
# status_msg = await bot.reply_to(message, init_msg)
# await db_call(
# 'new_user_request', user.id, message.id, status_msg.id, status=init_msg)
#
# prompt = ' '.join(message.text.split(' ')[1:])
#
# if len(prompt) == 0:
# await bot.edit_message_text(
# 'Empty text prompt ignored.',
# chat_id=status_msg.chat.id,
# message_id=status_msg.id
# )
# await db_call('update_user_request', status_msg.id, 'Empty text prompt ignored.')
# return
#
# logging.info(f'mid: {message.id}')
#
# user_config = {**user_row}
# del user_config['id']
#
# params = {
# 'prompt': prompt,
# **user_config
# }
#
# await db_call(
# 'update_user_stats', user.id, 'txt2img', last_prompt=prompt)
#
# ec = await work_request(user, status_msg, 'txt2img', params)
# if ec == 0:
# await db_call('increment_generated', user.id)
#
#
# # generic img2img handler
#
# async def _generic_img2img(message_or_query):
# if isinstance(message_or_query, CallbackQuery):
# query = message_or_query
# message = query.message
# user = query.from_user
# chat = query.message.chat
#
# else:
# message = message_or_query
# user = message.from_user
# chat = message.chat
#
# reply_id = None
# if chat.type == 'group' and chat.id == GROUP_ID:
# reply_id = message.message_id
#
# user_row = await db_call('get_or_create_user', user.id)
#
# # init new msg
# init_msg = 'started processing txt2img request...'
# status_msg = await bot.reply_to(message, init_msg)
# await db_call(
# 'new_user_request', user.id, message.id, status_msg.id, status=init_msg)
#
# if not message.caption.startswith('/img2img'):
# await bot.reply_to(
# message,
# 'For image to image you need to add /img2img to the beggining of your caption'
# )
# return
#
# prompt = ' '.join(message.caption.split(' ')[1:])
#
# if len(prompt) == 0:
# await bot.reply_to(message, 'Empty text prompt ignored.')
# return
#
# file_id = message.photo[-1].file_id
# file_path = (await bot.get_file(file_id)).file_path
# image_raw = await bot.download_file(file_path)
# with Image.open(io.BytesIO(image_raw)) as image:
# w, h = image.size
#
# if w > 512 or h > 512:
# logging.warning(f'user sent img of size {image.size}')
# image.thumbnail((512, 512))
# logging.warning(f'resized it to {image.size}')
#
# image.save(f'ipfs-docker-staging/image.png', format='PNG')
#
# ipfs_hash = ipfs_node.add('image.png')
# ipfs_node.pin(ipfs_hash)
#
# logging.info(f'published input image {ipfs_hash} on ipfs')
#
# logging.info(f'mid: {message.id}')
#
# user_config = {**user_row}
# del user_config['id']
#
# params = {
# 'prompt': prompt,
# **user_config
# }
#
# await db_call(
# 'update_user_stats',
# user.id,
# 'img2img',
# last_file=file_id,
# last_prompt=prompt,
# last_binary=ipfs_hash
# )
#
# ec = await work_request(
# user, status_msg, 'img2img', params,
# file_id=file_id,
# binary_data=ipfs_hash
# )
#
# if ec == 0:
# await db_call('increment_generated', user.id)
#
# generic redo handler
# async def _redo(message_or_query):
# is_query = False
# if isinstance(message_or_query, CallbackQuery):
# is_query = True
# query = message_or_query
# message = query.message
# user = query.from_user
# chat = query.message.chat
#
# elif isinstance(message_or_query, Message):
# message = message_or_query
# user = message.from_user
# chat = message.chat
#
# init_msg = 'started processing redo request...'
# if is_query:
# status_msg = await bot.send_message(chat.id, init_msg)
#
# else:
# status_msg = await bot.reply_to(message, init_msg)
#
# method = await db_call('get_last_method_of', user.id)
# prompt = await db_call('get_last_prompt_of', user.id)
#
# file_id = None
# binary = ''
# if method == 'img2img':
# file_id = await db_call('get_last_file_of', user.id)
# binary = await db_call('get_last_binary_of', user.id)
#
# if not prompt:
# await bot.reply_to(
# message,
# 'no last prompt found, do a txt2img cmd first!'
# )
# return
#
#
# user_row = await db_call('get_or_create_user', user.id)
# await db_call(
# 'new_user_request', user.id, message.id, status_msg.id, status=init_msg)
# user_config = {**user_row}
# del user_config['id']
#
# params = {
# 'prompt': prompt,
# **user_config
# }
#
# await work_request(
# user, status_msg, 'redo', params,
# file_id=file_id,
# binary_data=binary
# )
# "proxy" handlers just request routers
# @bot.message_handler(commands=['txt2img'])
# async def send_txt2img(message):
# await _generic_txt2img(message)
#
# @bot.message_handler(func=lambda message: True, content_types=[
# 'photo', 'document'])
# async def send_img2img(message):
# await _generic_img2img(message)
#
# @bot.message_handler(commands=['img2img'])
# async def img2img_missing_image(message):
# await bot.reply_to(
# message,
# 'seems you tried to do an img2img command without sending image'
# )
#
# @bot.message_handler(commands=['redo'])
# async def redo(message):
# await _redo(message)
#
# @bot.callback_query_handler(func=lambda call: True)
# async def callback_query(call):
# msg = json.loads(call.data)
# logging.info(call.data)
# method = msg.get('method')
# match method:
# case 'redo':
# await _redo(call)
# catch all handler for things we dont support
# @bot.message_handler(func=lambda message: True)
# async def echo_message(message):
# if message.text[0] == '/':
# await bot.reply_to(message, UNKNOWN_CMD_TEXT)

View File

@ -1,325 +0,0 @@
import io
import discord
from PIL import Image
import logging
from skynet.constants import *
from skynet.frontend import validate_user_config_request
class SkynetView(discord.ui.View):
def __init__(self, bot):
self.bot = bot
super().__init__(timeout=None)
self.add_item(RedoButton(
'redo', discord.ButtonStyle.primary, self.bot))
self.add_item(Txt2ImgButton(
'txt2img', discord.ButtonStyle.primary, self.bot))
self.add_item(Img2ImgButton(
'img2img', discord.ButtonStyle.primary, self.bot))
self.add_item(StatsButton(
'stats', discord.ButtonStyle.secondary, self.bot))
self.add_item(DonateButton(
'donate', discord.ButtonStyle.secondary, self.bot))
self.add_item(ConfigButton(
'config', discord.ButtonStyle.secondary, self.bot))
self.add_item(HelpButton(
'help', discord.ButtonStyle.secondary, self.bot))
self.add_item(CoolButton(
'cool', discord.ButtonStyle.secondary, self.bot))
class Txt2ImgButton(discord.ui.Button):
def __init__(self, label: str, style: discord.ButtonStyle, bot):
self.bot = bot
super().__init__(label=label, style=style)
async def callback(self, interaction):
db_call = self.bot.db_call
work_request = self.bot.work_request
msg = await grab('Enter your prompt:', interaction)
# grab user from msg
user = msg.author
user_row = await db_call('get_or_create_user', user.id)
# init new msg
init_msg = 'started processing txt2img request...'
status_msg = await msg.channel.send(init_msg)
await db_call(
'new_user_request', user.id, msg.id, status_msg.id, status=init_msg)
prompt = msg.content
if len(prompt) == 0:
await status_msg.edit(content='Empty text prompt ignored.'
)
await db_call('update_user_request', status_msg.id, 'Empty text prompt ignored.')
return
logging.info(f'mid: {msg.id}')
user_config = {**user_row}
del user_config['id']
params = {
'prompt': prompt,
**user_config
}
await db_call(
'update_user_stats', user.id, 'txt2img', last_prompt=prompt)
success = await work_request(user, status_msg, 'txt2img', params, msg)
if success:
await db_call('increment_generated', user.id)
class Img2ImgButton(discord.ui.Button):
def __init__(self, label: str, style: discord.ButtonStyle, bot):
self.bot = bot
super().__init__(label=label, style=style)
async def callback(self, interaction):
db_call = self.bot.db_call
work_request = self.bot.work_request
ipfs_node = self.bot.ipfs_node
msg = await grab('Attach an Image. Enter your prompt:', interaction)
user = msg.author
user_row = await db_call('get_or_create_user', user.id)
# init new msg
init_msg = 'started processing img2img request...'
status_msg = await msg.channel.send(init_msg)
await db_call(
'new_user_request', user.id, msg.id, status_msg.id, status=init_msg)
# if not msg.content.startswith('/img2img'):
# await msg.reply(
# 'For image to image you need to add /img2img to the beggining of your caption'
# )
# return
prompt = msg.content
if len(prompt) == 0:
await msg.reply('Empty text prompt ignored.')
return
# file_id = message.photo[-1].file_id
# file_path = (await bot.get_file(file_id)).file_path
# image_raw = await bot.download_file(file_path)
#
file = msg.attachments[-1]
file_id = str(file.id)
# file bytes
image_raw = await file.read()
user_config = {**user_row}
del user_config['id']
with Image.open(io.BytesIO(image_raw)) as image:
w, h = image.size
if w > user_config['width'] or h > user_config['height']:
logging.warning(f'user sent img of size {image.size}')
image.thumbnail(
(user_config['width'], user_config['height']))
logging.warning(f'resized it to {image.size}')
# if w > 512 or h > 512:
# logging.warning(f'user sent img of size {image.size}')
# image.thumbnail((512, 512))
# logging.warning(f'resized it to {image.size}')
# image.save(f'ipfs-docker-staging/image.png', format='PNG')
image_loc = 'ipfs-staging/image.png'
image.save(image_loc, format='PNG')
ipfs_info = await ipfs_node.add(image_loc)
ipfs_hash = ipfs_info['Hash']
await ipfs_node.pin(ipfs_hash)
logging.info(f'published input image {ipfs_hash} on ipfs')
logging.info(f'mid: {msg.id}')
params = {
'prompt': prompt,
**user_config
}
await db_call(
'update_user_stats',
user.id,
'img2img',
last_prompt=prompt,
last_file=file_id,
last_binary=ipfs_hash
)
success = await work_request(
user, status_msg, 'img2img', params, msg,
file_id=file_id,
binary_data=ipfs_hash
)
if success:
await db_call('increment_generated', user.id)
class RedoButton(discord.ui.Button):
def __init__(self, label: str, style: discord.ButtonStyle, bot):
self.bot = bot
super().__init__(label=label, style=style)
async def callback(self, interaction):
db_call = self.bot.db_call
work_request = self.bot.work_request
init_msg = 'started processing redo request...'
await interaction.response.send_message(init_msg)
status_msg = await interaction.original_response()
user = interaction.user
method = await db_call('get_last_method_of', user.id)
prompt = await db_call('get_last_prompt_of', user.id)
file_id = None
binary = ''
if method == 'img2img':
file_id = await db_call('get_last_file_of', user.id)
binary = await db_call('get_last_binary_of', user.id)
if not prompt:
await status_msg.edit(
content='no last prompt found, do a txt2img cmd first!',
view=SkynetView(self.bot)
)
return
user_row = await db_call('get_or_create_user', user.id)
await db_call(
'new_user_request', user.id, interaction.id, status_msg.id, status=init_msg)
user_config = {**user_row}
del user_config['id']
params = {
'prompt': prompt,
**user_config
}
success = await work_request(
user, status_msg, 'redo', params, interaction,
file_id=file_id,
binary_data=binary
)
if success:
await db_call('increment_generated', user.id)
class ConfigButton(discord.ui.Button):
def __init__(self, label: str, style: discord.ButtonStyle, bot):
self.bot = bot
super().__init__(label=label, style=style)
async def callback(self, interaction):
db_call = self.bot.db_call
msg = await grab('What params do you want to change? (format: <param> <value>)', interaction)
user = interaction.user
try:
attr, val, reply_txt = validate_user_config_request(
'/config ' + msg.content)
logging.info(f'user config update: {attr} to {val}')
await db_call('update_user_config', user.id, attr, val)
logging.info('done')
except BaseException as e:
reply_txt = str(e)
finally:
await msg.reply(content=reply_txt, view=SkynetView(self.bot))
class StatsButton(discord.ui.Button):
def __init__(self, label: str, style: discord.ButtonStyle, bot):
self.bot = bot
super().__init__(label=label, style=style)
async def callback(self, interaction):
db_call = self.bot.db_call
user = interaction.user
await db_call('get_or_create_user', user.id)
generated, joined, role = await db_call('get_user_stats', user.id)
stats_str = f'```generated: {generated}\n'
stats_str += f'joined: {joined}\n'
stats_str += f'role: {role}\n```'
await interaction.response.send_message(
content=stats_str, view=SkynetView(self.bot))
class DonateButton(discord.ui.Button):
def __init__(self, label: str, style: discord.ButtonStyle, bot):
self.bot = bot
super().__init__(label=label, style=style)
async def callback(self, interaction):
await interaction.response.send_message(
content=f'```\n{DONATION_INFO}```',
view=SkynetView(self.bot))
class CoolButton(discord.ui.Button):
def __init__(self, label: str, style: discord.ButtonStyle, bot):
self.bot = bot
super().__init__(label=label, style=style)
async def callback(self, interaction):
clean_cool_word = '\n'.join(CLEAN_COOL_WORDS)
await interaction.response.send_message(
content=f'```{clean_cool_word}```',
view=SkynetView(self.bot))
class HelpButton(discord.ui.Button):
def __init__(self, label: str, style: discord.ButtonStyle, bot):
self.bot = bot
super().__init__(label=label, style=style)
async def callback(self, interaction):
msg = await grab('What would you like help with? (a for all)', interaction)
param = msg.content
if param == 'a':
await msg.reply(content=f'```{HELP_TEXT}```', view=SkynetView(self.bot))
else:
if param in HELP_TOPICS:
await msg.reply(content=f'```{HELP_TOPICS[param]}```', view=SkynetView(self.bot))
else:
await msg.reply(content=f'```{HELP_UNKWNOWN_PARAM}```', view=SkynetView(self.bot))
async def grab(prompt, interaction):
def vet(m):
return m.author == interaction.user and m.channel == interaction.channel
await interaction.response.send_message(prompt, ephemeral=True)
message = await interaction.client.wait_for('message', check=vet)
return message

View File

@ -1,123 +0,0 @@
import json
import logging
import traceback
from datetime import datetime, timezone
from telebot.types import InlineKeyboardButton, InlineKeyboardMarkup
from telebot.async_telebot import ExceptionHandler
from telebot.formatting import hlink
import discord
from skynet.constants import *
def timestamp_pretty():
return datetime.now(timezone.utc).strftime('%H:%M:%S')
def tg_user_pretty(tguser):
if tguser.username:
return f'@{tguser.username}'
else:
return f'{tguser.first_name} id: {tguser.id}'
class SKYExceptionHandler(ExceptionHandler):
def handle(exception):
traceback.print_exc()
def build_redo_menu():
btn_redo = InlineKeyboardButton(
"Redo", callback_data=json.dumps({'method': 'redo'}))
inline_keyboard = InlineKeyboardMarkup()
inline_keyboard.add(btn_redo)
return inline_keyboard
def prepare_metainfo_caption(user, worker: str, reward: str, meta: dict, embed) -> str:
prompt = meta["prompt"]
if len(prompt) > 256:
prompt = prompt[:256]
gen_str = f'generated by {user.name}\n'
gen_str += f'performed by {worker}\n'
gen_str += f'reward: {reward}\n'
embed.add_field(
name='General Info', value=f'```{gen_str}```', inline=False)
# meta_str = f'__by {user.name}__\n'
# meta_str += f'*performed by {worker}*\n'
# meta_str += f'__**reward: {reward}**__\n'
embed.add_field(name='Prompt', value=f'```{prompt}\n```', inline=False)
# meta_str = f'`prompt:` {prompt}\n'
meta_str = f'seed: {meta["seed"]}\n'
meta_str += f'step: {meta["step"]}\n'
meta_str += f'guidance: {meta["guidance"]}\n'
if meta['strength']:
meta_str += f'strength: {meta["strength"]}\n'
meta_str += f'algo: {meta["model"]}\n'
if meta['upscaler']:
meta_str += f'upscaler: {meta["upscaler"]}\n'
embed.add_field(name='Parameters', value=f'```{meta_str}```', inline=False)
foot_str = f'Made with Skynet v{VERSION}\n'
foot_str += f'JOIN THE SWARM: https://discord.gg/PAabjJtZAF'
embed.set_footer(text=foot_str)
return meta_str
def generate_reply_caption(
user, # discord user
params: dict,
tx_hash: str,
worker: str,
reward: str,
explorer_domain: str
):
explorer_link = discord.Embed(
title='[SKYNET Transaction Explorer]',
url=f'https://{explorer_domain}/v2/explore/transaction/{tx_hash}',
color=discord.Color.blue())
meta_info = prepare_metainfo_caption(
user, worker, reward, params, explorer_link)
# why do we have this?
final_msg = '\n'.join([
'Worker finished your task!',
# explorer_link,
f'PARAMETER INFO:\n{meta_info}'
])
# final_msg += '\n'.join([
# # f'***{explorer_link}***',
# f'{meta_info}'
# ])
logging.info(final_msg)
return final_msg, explorer_link
async def get_global_config(cleos):
return (await cleos.aget_table(
'gpu.scd', 'gpu.scd', 'config'))[0]
async def get_user_nonce(cleos, user: str):
return (await cleos.aget_table(
'gpu.scd', 'gpu.scd', 'users',
index_position=1,
key_type='name',
lower_bound=user,
upper_bound=user
))[0]['nonce']

View File

@ -1,295 +0,0 @@
import io
import random
import logging
import asyncio
from PIL import Image, UnidentifiedImageError
from json import JSONDecodeError
from decimal import Decimal
from hashlib import sha256
from datetime import datetime
from contextlib import AsyncExitStack
from contextlib import asynccontextmanager as acm
from leap.cleos import CLEOS
from leap.protocol import Name, Asset
from leap.hyperion import HyperionAPI
from telebot.types import InputMediaPhoto
from telebot.async_telebot import AsyncTeleBot
from skynet.db import open_database_connection
from skynet.ipfs import get_ipfs_file, AsyncIPFSHTTP
from skynet.constants import *
from . import *
from .utils import *
from .handlers import create_handler_context
class SkynetTelegramFrontend:
def __init__(
self,
token: str,
account: str,
permission: str,
node_url: str,
hyperion_url: str,
db_host: str,
db_user: str,
db_pass: str,
ipfs_node: str,
key: str,
explorer_domain: str,
ipfs_domain: str
):
self.token = token
self.account = account
self.permission = permission
self.node_url = node_url
self.hyperion_url = hyperion_url
self.db_host = db_host
self.db_user = db_user
self.db_pass = db_pass
self.key = key
self.explorer_domain = explorer_domain
self.ipfs_domain = ipfs_domain
self.bot = AsyncTeleBot(token, exception_handler=SKYExceptionHandler)
self.cleos = CLEOS(endpoint=node_url)
self.cleos.load_abi('gpu.scd', GPU_CONTRACT_ABI)
self.hyperion = HyperionAPI(hyperion_url)
self.ipfs_node = AsyncIPFSHTTP(ipfs_node)
self._async_exit_stack = AsyncExitStack()
async def start(self):
self.db_call = await self._async_exit_stack.enter_async_context(
open_database_connection(
self.db_user, self.db_pass, self.db_host))
create_handler_context(self)
async def stop(self):
await self._async_exit_stack.aclose()
@acm
async def open(self):
await self.start()
yield self
await self.stop()
async def update_status_message(
self, status_msg, new_text: str, **kwargs
):
await self.db_call(
'update_user_request_by_sid', status_msg.id, new_text)
return await self.bot.edit_message_text(
new_text,
chat_id=status_msg.chat.id,
message_id=status_msg.id,
**kwargs
)
async def append_status_message(
self, status_msg, add_text: str, **kwargs
):
request = await self.db_call('get_user_request_by_sid', status_msg.id)
await self.update_status_message(
status_msg,
request['status'] + add_text,
**kwargs
)
async def work_request(
self,
user,
status_msg,
method: str,
params: dict,
file_id: str | None = None,
inputs: list[str] = []
) -> bool:
if params['seed'] == None:
params['seed'] = random.randint(0, 0xFFFFFFFF)
sanitized_params = {}
for key, val in params.items():
if isinstance(val, Decimal):
val = str(val)
sanitized_params[key] = val
body = json.dumps({
'method': 'diffuse',
'params': sanitized_params
})
request_time = datetime.now().isoformat()
await self.update_status_message(
status_msg,
f'processing a \'{method}\' request by {tg_user_pretty(user)}\n'
f'[{timestamp_pretty()}] <i>broadcasting transaction to chain...</i>',
parse_mode='HTML'
)
reward = '20.0000 GPU'
res = await self.cleos.a_push_action(
'gpu.scd',
'enqueue',
list({
'user': Name(self.account),
'request_body': body,
'binary_data': ','.join(inputs),
'reward': Asset.from_str(reward),
'min_verification': 1
}.values()),
self.account, self.key, permission=self.permission
)
if 'code' in res or 'statusCode' in res:
logging.error(json.dumps(res, indent=4))
await self.update_status_message(
status_msg,
'skynet has suffered an internal error trying to fill this request')
return False
enqueue_tx_id = res['transaction_id']
enqueue_tx_link = hlink(
'Your request on Skynet Explorer',
f'https://{self.explorer_domain}/v2/explore/transaction/{enqueue_tx_id}'
)
await self.append_status_message(
status_msg,
f' <b>broadcasted!</b>\n'
f'<b>{enqueue_tx_link}</b>\n'
f'[{timestamp_pretty()}] <i>workers are processing request...</i>',
parse_mode='HTML'
)
out = res['processed']['action_traces'][0]['console']
request_id, nonce = out.split(':')
request_hash = sha256(
(nonce + body + ','.join(inputs)).encode('utf-8')).hexdigest().upper()
request_id = int(request_id)
logging.info(f'{request_id} enqueued.')
tx_hash = None
ipfs_hash = None
for i in range(60 * 3):
try:
submits = await self.hyperion.aget_actions(
account=self.account,
filter='gpu.scd:submit',
sort='desc',
after=request_time
)
actions = [
action
for action in submits['actions']
if action[
'act']['data']['request_hash'] == request_hash
]
if len(actions) > 0:
tx_hash = actions[0]['trx_id']
data = actions[0]['act']['data']
ipfs_hash = data['ipfs_hash']
worker = data['worker']
logging.info('Found matching submit!')
break
except JSONDecodeError:
logging.error(f'network error while getting actions, retry..')
await asyncio.sleep(1)
if not ipfs_hash:
await self.update_status_message(
status_msg,
f'\n[{timestamp_pretty()}] <b>timeout processing request</b>',
parse_mode='HTML'
)
return False
tx_link = hlink(
'Your result on Skynet Explorer',
f'https://{self.explorer_domain}/v2/explore/transaction/{tx_hash}'
)
await self.append_status_message(
status_msg,
f' <b>request processed!</b>\n'
f'<b>{tx_link}</b>\n'
f'[{timestamp_pretty()}] <i>trying to download image...</i>\n',
parse_mode='HTML'
)
caption = generate_reply_caption(
user, params, tx_hash, worker, reward, self.explorer_domain)
# attempt to get the image and send it
ipfs_link = f'https://{self.ipfs_domain}/ipfs/{ipfs_hash}'
res = await get_ipfs_file(ipfs_link)
logging.info(f'got response from {ipfs_link}')
if not res or res.status_code != 200:
logging.warning(f'couldn\'t get ipfs binary data at {ipfs_link}!')
else:
try:
with Image.open(io.BytesIO(res.raw)) as image:
w, h = image.size
if w > TG_MAX_WIDTH or h > TG_MAX_HEIGHT:
logging.warning(f'result is of size {image.size}')
image.thumbnail((TG_MAX_WIDTH, TG_MAX_HEIGHT))
tmp_buf = io.BytesIO()
image.save(tmp_buf, format='PNG')
png_img = tmp_buf.getvalue()
except UnidentifiedImageError:
logging.warning(f'couldn\'t get ipfs binary data at {ipfs_link}!')
if not png_img:
await self.update_status_message(
status_msg,
caption,
reply_markup=build_redo_menu(),
parse_mode='HTML'
)
return True
logging.info(f'success! sending generated image')
await self.bot.delete_message(
chat_id=status_msg.chat.id, message_id=status_msg.id)
if file_id: # img2img
await self.bot.send_media_group(
status_msg.chat.id,
media=[
InputMediaPhoto(file_id),
InputMediaPhoto(
png_img,
caption=caption,
parse_mode='HTML'
)
],
)
else: # txt2img
await self.bot.send_photo(
status_msg.chat.id,
caption=caption,
photo=png_img,
reply_markup=build_redo_menu(),
parse_mode='HTML'
)
return True

View File

@ -1,365 +0,0 @@
import io
import json
import logging
from datetime import datetime, timedelta
from PIL import Image
from telebot.types import CallbackQuery, Message
from skynet.frontend import validate_user_config_request, perform_auto_conf
from skynet.constants import *
def create_handler_context(frontend: 'SkynetTelegramFrontend'):
bot = frontend.bot
cleos = frontend.cleos
db_call = frontend.db_call
work_request = frontend.work_request
ipfs_node = frontend.ipfs_node
# generic / simple handlers
@bot.message_handler(commands=['help'])
async def send_help(message):
splt_msg = message.text.split(' ')
if len(splt_msg) == 1:
await bot.reply_to(message, HELP_TEXT)
else:
param = splt_msg[1]
if param in HELP_TOPICS:
await bot.reply_to(message, HELP_TOPICS[param])
else:
await bot.reply_to(message, HELP_UNKWNOWN_PARAM)
@bot.message_handler(commands=['cool'])
async def send_cool_words(message):
await bot.reply_to(message, '\n'.join(COOL_WORDS))
@bot.message_handler(commands=['queue'])
async def queue(message):
an_hour_ago = datetime.now() - timedelta(hours=1)
queue = await cleos.aget_table(
'gpu.scd', 'gpu.scd', 'queue',
index_position=2,
key_type='i64',
sort='desc',
lower_bound=int(an_hour_ago.timestamp())
)
await bot.reply_to(
message, f'Total requests on skynet queue: {len(queue)}')
@bot.message_handler(commands=['config'])
async def set_config(message):
user = message.from_user.id
try:
attr, val, reply_txt = validate_user_config_request(
message.text)
logging.info(f'user config update: {attr} to {val}')
await db_call('update_user_config', user, attr, val)
logging.info('done')
except BaseException as e:
reply_txt = str(e)
finally:
await bot.reply_to(message, reply_txt)
@bot.message_handler(commands=['stats'])
async def user_stats(message):
user = message.from_user.id
await db_call('get_or_create_user', user)
generated, joined, role = await db_call('get_user_stats', user)
stats_str = f'generated: {generated}\n'
stats_str += f'joined: {joined}\n'
stats_str += f'role: {role}\n'
await bot.reply_to(
message, stats_str)
@bot.message_handler(commands=['donate'])
async def donation_info(message):
await bot.reply_to(
message, DONATION_INFO)
@bot.message_handler(commands=['say'])
async def say(message):
chat = message.chat
user = message.from_user
if (chat.type == 'group') or (user.id != 383385940):
return
await bot.send_message(GROUP_ID, message.text[4:])
# generic txt2img handler
async def _generic_txt2img(message_or_query):
if isinstance(message_or_query, CallbackQuery):
query = message_or_query
message = query.message
user = query.from_user
chat = query.message.chat
else:
message = message_or_query
user = message.from_user
chat = message.chat
if chat.type == 'private':
return
reply_id = None
if chat.type == 'group' and chat.id == GROUP_ID:
reply_id = message.message_id
user_row = await db_call('get_or_create_user', user.id)
# init new msg
init_msg = 'started processing txt2img request...'
status_msg = await bot.reply_to(message, init_msg)
await db_call(
'new_user_request', user.id, message.id, status_msg.id, status=init_msg)
prompt = ' '.join(message.text.split(' ')[1:])
if len(prompt) == 0:
await bot.edit_message_text(
'Empty text prompt ignored.',
chat_id=status_msg.chat.id,
message_id=status_msg.id
)
await db_call('update_user_request', status_msg.id, 'Empty text prompt ignored.')
return
logging.info(f'mid: {message.id}')
user_config = {**user_row}
del user_config['id']
if user_config['autoconf']:
user_config = perform_auto_conf(user_config)
params = {
'prompt': prompt,
**user_config
}
await db_call(
'update_user_stats', user.id, 'txt2img', last_prompt=prompt)
success = await work_request(user, status_msg, 'txt2img', params)
if success:
await db_call('increment_generated', user.id)
# generic img2img handler
async def _generic_img2img(message_or_query):
if isinstance(message_or_query, CallbackQuery):
query = message_or_query
message = query.message
user = query.from_user
chat = query.message.chat
else:
message = message_or_query
user = message.from_user
chat = message.chat
if chat.type == 'private':
return
reply_id = None
if chat.type == 'group' and chat.id == GROUP_ID:
reply_id = message.message_id
user_row = await db_call('get_or_create_user', user.id)
# init new msg
init_msg = 'started processing txt2img request...'
status_msg = await bot.reply_to(message, init_msg)
await db_call(
'new_user_request', user.id, message.id, status_msg.id, status=init_msg)
if not message.caption.startswith('/img2img'):
await bot.reply_to(
message,
'For image to image you need to add /img2img to the beggining of your caption'
)
return
prompt = ' '.join(message.caption.split(' ')[1:])
if len(prompt) == 0:
await bot.reply_to(message, 'Empty text prompt ignored.')
return
file_id = message.photo[-1].file_id
file_path = (await bot.get_file(file_id)).file_path
image_raw = await bot.download_file(file_path)
user_config = {**user_row}
del user_config['id']
if user_config['autoconf']:
user_config = perform_auto_conf(user_config)
with Image.open(io.BytesIO(image_raw)) as image:
w, h = image.size
if w > user_config['width'] or h > user_config['height']:
logging.warning(f'user sent img of size {image.size}')
image.thumbnail(
(user_config['width'], user_config['height']))
logging.warning(f'resized it to {image.size}')
image_loc = 'ipfs-staging/image.png'
image.save(image_loc, format='PNG')
ipfs_info = await ipfs_node.add(image_loc)
ipfs_hash = ipfs_info['Hash']
await ipfs_node.pin(ipfs_hash)
logging.info(f'published input image {ipfs_hash} on ipfs')
logging.info(f'mid: {message.id}')
params = {
'prompt': prompt,
**user_config
}
await db_call(
'update_user_stats',
user.id,
'img2img',
last_file=file_id,
last_prompt=prompt,
last_binary=ipfs_hash
)
success = await work_request(
user, status_msg, 'img2img', params,
file_id=file_id,
inputs=ipfs_hash
)
if success:
await db_call('increment_generated', user.id)
# generic redo handler
async def _redo(message_or_query):
is_query = False
if isinstance(message_or_query, CallbackQuery):
is_query = True
query = message_or_query
message = query.message
user = query.from_user
chat = query.message.chat
elif isinstance(message_or_query, Message):
message = message_or_query
user = message.from_user
chat = message.chat
if chat.type == 'private':
return
init_msg = 'started processing redo request...'
if is_query:
status_msg = await bot.send_message(chat.id, init_msg)
else:
status_msg = await bot.reply_to(message, init_msg)
method = await db_call('get_last_method_of', user.id)
prompt = await db_call('get_last_prompt_of', user.id)
file_id = None
binary = ''
if method == 'img2img':
file_id = await db_call('get_last_file_of', user.id)
binary = await db_call('get_last_binary_of', user.id)
if not prompt:
await bot.reply_to(
message,
'no last prompt found, do a txt2img cmd first!'
)
return
user_row = await db_call('get_or_create_user', user.id)
await db_call(
'new_user_request', user.id, message.id, status_msg.id, status=init_msg)
user_config = {**user_row}
del user_config['id']
if user_config['autoconf']:
user_config = perform_auto_conf(user_config)
params = {
'prompt': prompt,
**user_config
}
success = await work_request(
user, status_msg, 'redo', params,
file_id=file_id,
inputs=binary
)
if success:
await db_call('increment_generated', user.id)
# "proxy" handlers just request routers
@bot.message_handler(commands=['txt2img'])
async def send_txt2img(message):
await _generic_txt2img(message)
@bot.message_handler(func=lambda message: True, content_types=[
'photo', 'document'])
async def send_img2img(message):
await _generic_img2img(message)
@bot.message_handler(commands=['img2img'])
async def img2img_missing_image(message):
await bot.reply_to(
message,
'seems you tried to do an img2img command without sending image'
)
@bot.message_handler(commands=['redo'])
async def redo(message):
await _redo(message)
@bot.callback_query_handler(func=lambda call: True)
async def callback_query(call):
msg = json.loads(call.data)
logging.info(call.data)
method = msg.get('method')
match method:
case 'redo':
await _redo(call)
# catch all handler for things we dont support
@bot.message_handler(func=lambda message: True)
async def echo_message(message):
if message.text[0] == '/':
await bot.reply_to(message, UNKNOWN_CMD_TEXT)

View File

@ -1,105 +0,0 @@
import json
import logging
import traceback
from datetime import datetime, timezone
from telebot.types import InlineKeyboardButton, InlineKeyboardMarkup
from telebot.async_telebot import ExceptionHandler
from telebot.formatting import hlink
from skynet.constants import *
def timestamp_pretty():
return datetime.now(timezone.utc).strftime('%H:%M:%S')
def tg_user_pretty(tguser):
if tguser.username:
return f'@{tguser.username}'
else:
return f'{tguser.first_name} id: {tguser.id}'
class SKYExceptionHandler(ExceptionHandler):
def handle(exception):
traceback.print_exc()
def build_redo_menu():
btn_redo = InlineKeyboardButton("Redo", callback_data=json.dumps({'method': 'redo'}))
inline_keyboard = InlineKeyboardMarkup()
inline_keyboard.add(btn_redo)
return inline_keyboard
def prepare_metainfo_caption(tguser, worker: str, reward: str, meta: dict) -> str:
prompt = meta["prompt"]
if len(prompt) > 256:
prompt = prompt[:256]
meta_str = f'<u>by {tg_user_pretty(tguser)}</u>\n'
meta_str += f'<i>performed by {worker}</i>\n'
meta_str += f'<b><u>reward: {reward}</u></b>\n'
meta_str += f'<code>prompt:</code> {prompt}\n'
meta_str += f'<code>seed: {meta["seed"]}</code>\n'
meta_str += f'<code>step: {meta["step"]}</code>\n'
meta_str += f'<code>guidance: {meta["guidance"]}</code>\n'
if meta['strength']:
meta_str += f'<code>strength: {meta["strength"]}</code>\n'
meta_str += f'<code>algo: {meta["model"]}</code>\n'
if meta['upscaler']:
meta_str += f'<code>upscaler: {meta["upscaler"]}</code>\n'
meta_str += f'<b><u>Made with Skynet v{VERSION}</u></b>\n'
meta_str += f'<b>JOIN THE SWARM: @skynetgpu</b>'
return meta_str
def generate_reply_caption(
tguser, # telegram user
params: dict,
tx_hash: str,
worker: str,
reward: str,
explorer_domain: str
):
explorer_link = hlink(
'SKYNET Transaction Explorer',
f'https://{explorer_domain}/v2/explore/transaction/{tx_hash}'
)
meta_info = prepare_metainfo_caption(tguser, worker, reward, params)
final_msg = '\n'.join([
'Worker finished your task!',
explorer_link,
f'PARAMETER INFO:\n{meta_info}'
])
final_msg = '\n'.join([
f'<b><i>{explorer_link}</i></b>',
f'{meta_info}'
])
logging.info(final_msg)
return final_msg
async def get_global_config(cleos):
return (await cleos.aget_table(
'gpu.scd', 'gpu.scd', 'config'))[0]
async def get_user_nonce(cleos, user: str):
return (await cleos.aget_table(
'gpu.scd', 'gpu.scd', 'users',
index_position=1,
key_type='name',
lower_bound=user,
upper_bound=user
))[0]['nonce']

View File

@ -1,8 +1,9 @@
import io
import logging
from pathlib import Path
import httpx
from PIL import Image
class IPFSClientException(Exception):
...
@ -53,14 +54,37 @@ class AsyncIPFSHTTP:
params=kwargs
))['Peers']
async def publish(self, raw, type: str = 'png'):
stage = Path('/tmp/ipfs-staging')
stage.mkdir(exist_ok=True)
logging.info('publish_on_ipfs')
async def get_ipfs_file(ipfs_link: str, timeout: int = 60 * 5):
target_file = ''
match type:
case 'png':
raw: Image
target_file = stage / 'image.png'
raw.save(target_file)
case _:
raise ValueError(f'Unsupported output type: {type}')
file_info = await self.add(Path(target_file))
file_cid = file_info['Hash']
logging.info(f'added file to ipfs, CID: {file_cid}')
await self.pin(file_cid)
logging.info(f'pinned {file_cid}')
return file_cid
async def get_ipfs_img(ipfs_link: str, timeout: int = 3) -> Image:
logging.info(f'attempting to get image at {ipfs_link}')
resp = None
for _ in range(timeout):
try:
async with httpx.AsyncClient() as client:
resp = await client.get(ipfs_link, timeout=3)
resp = await client.get(ipfs_link, timeout=timeout)
except httpx.RequestError as e:
logging.warning(f'Request error: {e}')
@ -71,6 +95,14 @@ async def get_ipfs_file(ipfs_link: str, timeout: int = 60 * 5):
if resp:
logging.info(f'status_code: {resp.status_code}')
else:
logging.error(f'timeout')
logging.error('timeout')
return None
return resp
if resp.status_code != 200:
logging.warning(f'couldn\'t get ipfs binary data at {ipfs_link}!')
return resp
img = Image.open(io.BytesIO(resp.read()))
logging.info('decoded img successfully')
return img

View File

@ -1,4 +1,5 @@
from enum import StrEnum
from hashlib import sha256
from msgspec import Struct
@ -39,6 +40,27 @@ class ConfigV0:
token_contract: str
token_symbol: str
'''
ConfigV1
singleton containing global info about system, definition:
```rust
#[chain(table="config", singleton)]
pub struct Config {
token_account: Name,
token_symbol: Symbol,
global_nonce: u64
}
```
'''
class ConfigV1(Struct):
token_account: str
token_symbol: str
global_nonce: int
type Config = ConfigV0 | ConfigV1
'''
RequestV0
@ -85,6 +107,7 @@ class BodyV0Params(Struct):
strength: str | float | None = None
output_type: str | None = 'png'
upscaler: str | None = None
autoconf: bool | None = None
class BodyV0(Struct):
@ -102,6 +125,50 @@ class RequestV0(Struct):
binary_data: str
timestamp: str
def hash_v0(self) -> str:
hash_str = (
str(self.nonce)
+
self.body
+
self.binary_data
)
return sha256(hash_str.encode('utf-8')).hexdigest()
'''
RequestV1
a request placed on the queue, definition:
NEW: nonce field removed
scope: self.receiver
```rust
#[chain(table="queue")]
pub struct Request {
#[chain(primary)]
id: u64,
user: Name,
reward: Asset,
min_verification: u32,
body: String,
binary_data: String,
#[chain(secondary)]
timestamp: TimePointSec
}
```
'''
class RequestV1(Struct):
id: int
user: str
reward: str
min_verification: int
body: str
binary_data: str
timestamp: str
type Request = RequestV0 | RequestV1
'''
AccountV0
@ -127,6 +194,28 @@ class AccountV0(Struct):
balance: str
nonce: int
'''
AccountV1
a user account, users must deposit tokens in order to enqueue requests, definition:
scope: self.receiver
```rust
#[chain(table="users")]
pub struct Account {
#[chain(primary)]
user: Name,
balance: Asset
}
```
'''
class AccountV1(Struct):
user: str
balance: str
type Account = AccountV0 | AccountV1
'''
WorkerV0

View File

@ -1,6 +1,7 @@
import pytest
from skynet.ipfs import AsyncIPFSHTTP
from skynet.contract import GPUContractAPI
from skynet._testing import override_dgpu_config
@ -24,9 +25,11 @@ def skynet_cleos(cleos_bs):
# cleos.import_key('gpu.scd', priv)
cleos.new_account('gpu.scd', ram=4200000)
contract_path = 'tests/contracts/skygpu-contract/target'
cleos.deploy_contract_from_path(
'gpu.scd',
'tests/contracts/gpu.scd',
contract_path,
contract_name='skygpu',
create_account=False
)
@ -37,9 +40,13 @@ def skynet_cleos(cleos_bs):
'gpu.scd'
)
cleos.new_account('testworker')
testworker_key = '5KRPFxF4RJebqPXqRzwStmCaEWeRfp3pR7XUNoA3zCHt5fnPu3s'
pub_key = cleos.import_key('testworker', testworker_key)
cleos.new_account('testworker', key=pub_key)
yield cleos
cleos.wait_blocks(1)
yield GPUContractAPI(cleos), cleos
@pytest.fixture

View File

@ -1,416 +0,0 @@
{
"____comment": "This file was generated with eosio-abigen. DO NOT EDIT ",
"version": "eosio::abi/1.2",
"types": [],
"structs": [
{
"name": "account",
"base": "",
"fields": [
{
"name": "user",
"type": "name"
},
{
"name": "balance",
"type": "asset"
},
{
"name": "nonce",
"type": "uint64"
}
]
},
{
"name": "card",
"base": "",
"fields": [
{
"name": "id",
"type": "uint64"
},
{
"name": "owner",
"type": "name"
},
{
"name": "card_name",
"type": "string"
},
{
"name": "version",
"type": "string"
},
{
"name": "total_memory",
"type": "uint64"
},
{
"name": "mp_count",
"type": "uint32"
},
{
"name": "extra",
"type": "string"
}
]
},
{
"name": "clean",
"base": "",
"fields": []
},
{
"name": "config",
"base": "",
"fields": [
{
"name": "token_contract",
"type": "name"
},
{
"name": "token_symbol",
"type": "symbol"
}
]
},
{
"name": "dequeue",
"base": "",
"fields": [
{
"name": "user",
"type": "name"
},
{
"name": "request_id",
"type": "uint64"
}
]
},
{
"name": "enqueue",
"base": "",
"fields": [
{
"name": "user",
"type": "name"
},
{
"name": "request_body",
"type": "string"
},
{
"name": "binary_data",
"type": "string"
},
{
"name": "reward",
"type": "asset"
},
{
"name": "min_verification",
"type": "uint32"
}
]
},
{
"name": "global_configuration_struct",
"base": "",
"fields": [
{
"name": "token_contract",
"type": "name"
},
{
"name": "token_symbol",
"type": "symbol"
}
]
},
{
"name": "submit",
"base": "",
"fields": [
{
"name": "worker",
"type": "name"
},
{
"name": "request_id",
"type": "uint64"
},
{
"name": "request_hash",
"type": "checksum256"
},
{
"name": "result_hash",
"type": "checksum256"
},
{
"name": "ipfs_hash",
"type": "string"
}
]
},
{
"name": "withdraw",
"base": "",
"fields": [
{
"name": "user",
"type": "name"
},
{
"name": "quantity",
"type": "asset"
}
]
},
{
"name": "work_request_struct",
"base": "",
"fields": [
{
"name": "id",
"type": "uint64"
},
{
"name": "user",
"type": "name"
},
{
"name": "reward",
"type": "asset"
},
{
"name": "min_verification",
"type": "uint32"
},
{
"name": "nonce",
"type": "uint64"
},
{
"name": "body",
"type": "string"
},
{
"name": "binary_data",
"type": "string"
},
{
"name": "timestamp",
"type": "time_point_sec"
}
]
},
{
"name": "work_result_struct",
"base": "",
"fields": [
{
"name": "id",
"type": "uint64"
},
{
"name": "request_id",
"type": "uint64"
},
{
"name": "user",
"type": "name"
},
{
"name": "worker",
"type": "name"
},
{
"name": "result_hash",
"type": "checksum256"
},
{
"name": "ipfs_hash",
"type": "string"
},
{
"name": "submited",
"type": "time_point_sec"
}
]
},
{
"name": "workbegin",
"base": "",
"fields": [
{
"name": "worker",
"type": "name"
},
{
"name": "request_id",
"type": "uint64"
},
{
"name": "max_workers",
"type": "uint32"
}
]
},
{
"name": "workcancel",
"base": "",
"fields": [
{
"name": "worker",
"type": "name"
},
{
"name": "request_id",
"type": "uint64"
},
{
"name": "reason",
"type": "string"
}
]
},
{
"name": "worker",
"base": "",
"fields": [
{
"name": "account",
"type": "name"
},
{
"name": "joined",
"type": "time_point_sec"
},
{
"name": "left",
"type": "time_point_sec"
},
{
"name": "url",
"type": "string"
}
]
},
{
"name": "worker_status_struct",
"base": "",
"fields": [
{
"name": "worker",
"type": "name"
},
{
"name": "status",
"type": "string"
},
{
"name": "started",
"type": "time_point_sec"
}
]
}
],
"actions": [
{
"name": "clean",
"type": "clean",
"ricardian_contract": ""
},
{
"name": "config",
"type": "config",
"ricardian_contract": ""
},
{
"name": "dequeue",
"type": "dequeue",
"ricardian_contract": ""
},
{
"name": "enqueue",
"type": "enqueue",
"ricardian_contract": ""
},
{
"name": "submit",
"type": "submit",
"ricardian_contract": ""
},
{
"name": "withdraw",
"type": "withdraw",
"ricardian_contract": ""
},
{
"name": "workbegin",
"type": "workbegin",
"ricardian_contract": ""
},
{
"name": "workcancel",
"type": "workcancel",
"ricardian_contract": ""
}
],
"tables": [
{
"name": "cards",
"type": "card",
"index_type": "i64",
"key_names": [],
"key_types": []
},
{
"name": "config",
"type": "global_configuration_struct",
"index_type": "i64",
"key_names": [],
"key_types": []
},
{
"name": "queue",
"type": "work_request_struct",
"index_type": "i64",
"key_names": [],
"key_types": []
},
{
"name": "results",
"type": "work_result_struct",
"index_type": "i64",
"key_names": [],
"key_types": []
},
{
"name": "status",
"type": "worker_status_struct",
"index_type": "i64",
"key_names": [],
"key_types": []
},
{
"name": "users",
"type": "account",
"index_type": "i64",
"key_names": [],
"key_types": []
},
{
"name": "workers",
"type": "worker",
"index_type": "i64",
"key_names": [],
"key_types": []
}
],
"ricardian_clauses": [],
"variants": [],
"action_results": []
}

Binary file not shown.

@ -0,0 +1 @@
Subproject commit 87794ffb45340103159450694c56f241615431b2

View File

@ -1,13 +1,231 @@
import trio
import pytest
from leap.errors import TransactionPushError
from msgspec import json
from skynet.contract import RequestNotFound, ConfigNotFound
from skynet.types import BodyV0, BodyV0Params
from skynet._testing import open_test_worker
async def test_system(skynet_cleos):
gpu, cleos = skynet_cleos
# assert config can only be init once (done by fixture)
with pytest.raises(TransactionPushError):
await gpu.init_config('eosio.token', '4,TLOS')
# test clean function
# fill tables with data
# accounts
users = []
quantity = '1.0000 TLOS'
usr_num = 3
for _ in range(usr_num):
test_user = cleos.new_account()
cleos.transfer_token('eosio', test_user, quantity, 'clean testing')
await gpu.deposit(test_user, quantity)
# will throw if user not found
await gpu.get_user(test_user)
users.append(test_user)
# queue
for user in users:
await gpu.enqueue(
user,
BodyV0(
method='txt2img',
params=BodyV0Params(
prompt='ayy lmao',
model='skygpu/mocker',
step=1,
seed=0,
guidance=10.0
)
),
min_verification=2 # make reqs stay after 1 result
)
# check requests are in queue
queue = await gpu.get_queue()
assert len(queue) == usr_num
# workers
workers = []
quantity = '1.0000 TLOS'
wrk_num = 3
for _ in range(wrk_num):
worker = cleos.new_account()
await gpu.register_worker(worker, 'http://localhost')
# will throw if worker not found
await gpu.get_worker(worker)
workers.append(worker)
# status
for i in range(wrk_num):
req = queue[i]
worker = workers[i]
await gpu.accept_work(worker, req.id)
# will throw is status not found
await gpu.get_worker_status_for_request(req.id, worker)
# results
# make one of the workers finish to populate result
await gpu.submit_work(
workers[0],
queue[0].id,
'ff' * 32,
'null hash'
)
results = await gpu.get_results(queue[0].id)
assert len(results) == 1
# all tables populated
# run clean nuke == false
await gpu.clean_tables()
# assert tables empty
assert len(await gpu.get_queue()) == 0
for req in queue:
assert len(await gpu.get_statuses_for_request(req.id)) == 0
assert len(await gpu.get_results(req.id)) == 0
# check config, accounts and workers still there
await gpu.get_config() # raises if not found
assert len(await gpu.get_users()) == usr_num
assert len(await gpu.get_workers()) == wrk_num
# test nuke
await gpu.clean_tables(nuke=True)
with pytest.raises(ConfigNotFound):
await gpu.get_config()
assert len(await gpu.get_users()) == 0
assert len(await gpu.get_workers()) == 0
# re init config in case other tests run
await gpu.init_config('eosio.token', '4,TLOS')
async def test_balance(skynet_cleos):
gpu, cleos = skynet_cleos
# create fresh account
account = cleos.new_account()
# try call withdraw with no user account reg'd
with pytest.raises(TransactionPushError):
await gpu.withdraw(account, '1.0000 TLOS')
# give tokens and deposit to gpu
quantity = '1000.0000 TLOS'
cleos.transfer_token('eosio', account, quantity)
await gpu.deposit(account, quantity)
# check if balance increased
account_row = await gpu.get_user(account)
assert account_row.balance == quantity
# try call withdraw with more than deposited
with pytest.raises(TransactionPushError):
await gpu.withdraw(account, '1000.0001 TLOS')
# withdraw full correct amount
await gpu.withdraw(account, quantity)
# check if balance decreased
account_row = await gpu.get_user(account)
assert account_row.balance == '0.0000 TLOS'
async def test_worker_reg(skynet_cleos):
gpu, cleos = skynet_cleos
# create fresh account
worker = cleos.new_account()
url = 'https://nvidia.com'
await gpu.register_worker(worker, url)
# find and check vals
worker_row = await gpu.get_worker(worker)
assert worker_row.account == worker
assert worker_row.url == url
assert worker_row.joined != '1970-01-01T00:00:00'
assert worker_row.left == '1970-01-01T00:00:00'
# attempt to register twice
with pytest.raises(TransactionPushError):
await gpu.register_worker(worker, url)
# unregister
reason = 'testing'
await gpu.unregister_worker(worker, reason)
worker_row = await gpu.get_worker(worker)
assert worker_row.account == worker
assert worker_row.url == url
assert worker_row.left != '1970-01-01T00:00:00'
# attempt to unreg twice
with pytest.raises(TransactionPushError):
await gpu.unregister_worker(worker, reason)
async def test_queue(skynet_cleos):
gpu, cleos = skynet_cleos
body = BodyV0(
method='txt2img',
params=BodyV0Params(
prompt='cyberpunk hacker travis bickle dystopic alley graffiti',
model='skygpu/mocker',
step=4,
seed=0,
guidance=10.0
)
)
# create account
account = cleos.new_account()
quantity = '1000.0000 TLOS'
cleos.transfer_token('eosio', account, quantity)
# attempt to create request without prev deposit
with pytest.raises(TransactionPushError):
await gpu.enqueue(account, body)
# deposit tokens into gpu
await gpu.deposit(account, quantity)
# finally enqueue
req = await gpu.enqueue(account, body)
# search by id
req_found = await gpu.get_request(req.id)
assert req == req_found
# search by timestamp
reqs = await gpu.get_requests_since(60 * 60)
assert len(reqs) == 1
assert reqs[0] == req
# attempt to dequeue wrong req
with pytest.raises(TransactionPushError):
await gpu.dequeue(account, 999999)
# dequeue correctly
await gpu.dequeue(account, req.id)
# check deletion
with pytest.raises(RequestNotFound):
await gpu.get_request(req.id)
async def test_full_flow(inject_mockers, skynet_cleos, ipfs_node):
cleos = skynet_cleos
gpu, cleos = skynet_cleos
# create account and deposit tokens into gpu
account = cleos.new_account()
@ -47,6 +265,6 @@ async def test_full_flow(inject_mockers, skynet_cleos, ipfs_node):
)
# open worker and fill request
async with open_test_worker(cleos, ipfs_node) as (_conn, state_mngr):
async with open_test_worker(cleos, ipfs_node) as (_contract, _ipfs_api, state_mngr):
while state_mngr.queue_len > 0:
await trio.sleep(1)

175
uv.lock
View File

@ -1,4 +1,5 @@
version = 1
revision = 1
requires-python = ">=3.10, <3.13"
resolution-markers = [
"python_full_version >= '3.12' and sys_platform == 'darwin'",
@ -442,14 +443,14 @@ wheels = [
[[package]]
name = "discord-py"
version = "2.4.0"
version = "2.5.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohttp" },
]
sdist = { url = "https://files.pythonhosted.org/packages/39/af/80cab4015722d3bee175509b7249a11d5adf77b5ff4c27f268558079d149/discord_py-2.4.0.tar.gz", hash = "sha256:d07cb2a223a185873a1d0ee78b9faa9597e45b3f6186df21a95cec1e9bcdc9a5", size = 1027707 }
sdist = { url = "https://files.pythonhosted.org/packages/59/7e/a778257411e86d834c94ea6d67abacca8f0efac62996882898d55e475748/discord_py-2.5.0.tar.gz", hash = "sha256:f6827909b87ea89bdb2cc49d475cb1fada2e73235a3e4568fc8b113660340c73", size = 1054589 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/23/10/3c44e9331a5ec3bae8b2919d51f611a5b94e179563b1b89eb6423a8f43eb/discord.py-2.4.0-py3-none-any.whl", hash = "sha256:b8af6711c70f7e62160bfbecb55be699b5cb69d007426759ab8ab06b1bd77d1d", size = 1125988 },
{ url = "https://files.pythonhosted.org/packages/8a/d5/2f54c110f6707bf563957349f8ca9cdf883f420699cd61ec8b48018754bb/discord.py-2.5.0-py3-none-any.whl", hash = "sha256:8e1e3b3ff5a112a4ab3a615059a285238eea34edff6de8737db6e1f72ea05195", size = 1154805 },
]
[[package]]
@ -1467,59 +1468,59 @@ wheels = [
[[package]]
name = "propcache"
version = "0.2.1"
version = "0.3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/20/c8/2a13f78d82211490855b2fb303b6721348d0787fdd9a12ac46d99d3acde1/propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64", size = 41735 }
sdist = { url = "https://files.pythonhosted.org/packages/92/76/f941e63d55c0293ff7829dd21e7cf1147e90a526756869a9070f287a68c9/propcache-0.3.0.tar.gz", hash = "sha256:a8fd93de4e1d278046345f49e2238cdb298589325849b2645d4a94c53faeffc5", size = 42722 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/a5/0ea64c9426959ef145a938e38c832fc551843481d356713ececa9a8a64e8/propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6", size = 79296 },
{ url = "https://files.pythonhosted.org/packages/76/5a/916db1aba735f55e5eca4733eea4d1973845cf77dfe67c2381a2ca3ce52d/propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2", size = 45622 },
{ url = "https://files.pythonhosted.org/packages/2d/62/685d3cf268b8401ec12b250b925b21d152b9d193b7bffa5fdc4815c392c2/propcache-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea", size = 45133 },
{ url = "https://files.pythonhosted.org/packages/4d/3d/31c9c29ee7192defc05aa4d01624fd85a41cf98e5922aaed206017329944/propcache-0.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212", size = 204809 },
{ url = "https://files.pythonhosted.org/packages/10/a1/e4050776f4797fc86140ac9a480d5dc069fbfa9d499fe5c5d2fa1ae71f07/propcache-0.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3", size = 219109 },
{ url = "https://files.pythonhosted.org/packages/c9/c0/e7ae0df76343d5e107d81e59acc085cea5fd36a48aa53ef09add7503e888/propcache-0.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d", size = 217368 },
{ url = "https://files.pythonhosted.org/packages/fc/e1/e0a2ed6394b5772508868a977d3238f4afb2eebaf9976f0b44a8d347ad63/propcache-0.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634", size = 205124 },
{ url = "https://files.pythonhosted.org/packages/50/c1/e388c232d15ca10f233c778bbdc1034ba53ede14c207a72008de45b2db2e/propcache-0.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2", size = 195463 },
{ url = "https://files.pythonhosted.org/packages/0a/fd/71b349b9def426cc73813dbd0f33e266de77305e337c8c12bfb0a2a82bfb/propcache-0.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958", size = 198358 },
{ url = "https://files.pythonhosted.org/packages/02/f2/d7c497cd148ebfc5b0ae32808e6c1af5922215fe38c7a06e4e722fe937c8/propcache-0.2.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c", size = 195560 },
{ url = "https://files.pythonhosted.org/packages/bb/57/f37041bbe5e0dfed80a3f6be2612a3a75b9cfe2652abf2c99bef3455bbad/propcache-0.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583", size = 196895 },
{ url = "https://files.pythonhosted.org/packages/83/36/ae3cc3e4f310bff2f064e3d2ed5558935cc7778d6f827dce74dcfa125304/propcache-0.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf", size = 207124 },
{ url = "https://files.pythonhosted.org/packages/8c/c4/811b9f311f10ce9d31a32ff14ce58500458443627e4df4ae9c264defba7f/propcache-0.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034", size = 210442 },
{ url = "https://files.pythonhosted.org/packages/18/dd/a1670d483a61ecac0d7fc4305d91caaac7a8fc1b200ea3965a01cf03bced/propcache-0.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b", size = 203219 },
{ url = "https://files.pythonhosted.org/packages/f9/2d/30ced5afde41b099b2dc0c6573b66b45d16d73090e85655f1a30c5a24e07/propcache-0.2.1-cp310-cp310-win32.whl", hash = "sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4", size = 40313 },
{ url = "https://files.pythonhosted.org/packages/23/84/bd9b207ac80da237af77aa6e153b08ffa83264b1c7882495984fcbfcf85c/propcache-0.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba", size = 44428 },
{ url = "https://files.pythonhosted.org/packages/bc/0f/2913b6791ebefb2b25b4efd4bb2299c985e09786b9f5b19184a88e5778dd/propcache-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16", size = 79297 },
{ url = "https://files.pythonhosted.org/packages/cf/73/af2053aeccd40b05d6e19058419ac77674daecdd32478088b79375b9ab54/propcache-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717", size = 45611 },
{ url = "https://files.pythonhosted.org/packages/3c/09/8386115ba7775ea3b9537730e8cf718d83bbf95bffe30757ccf37ec4e5da/propcache-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3", size = 45146 },
{ url = "https://files.pythonhosted.org/packages/03/7a/793aa12f0537b2e520bf09f4c6833706b63170a211ad042ca71cbf79d9cb/propcache-0.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9", size = 232136 },
{ url = "https://files.pythonhosted.org/packages/f1/38/b921b3168d72111769f648314100558c2ea1d52eb3d1ba7ea5c4aa6f9848/propcache-0.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787", size = 239706 },
{ url = "https://files.pythonhosted.org/packages/14/29/4636f500c69b5edea7786db3c34eb6166f3384b905665ce312a6e42c720c/propcache-0.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465", size = 238531 },
{ url = "https://files.pythonhosted.org/packages/85/14/01fe53580a8e1734ebb704a3482b7829a0ef4ea68d356141cf0994d9659b/propcache-0.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af", size = 231063 },
{ url = "https://files.pythonhosted.org/packages/33/5c/1d961299f3c3b8438301ccfbff0143b69afcc30c05fa28673cface692305/propcache-0.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7", size = 220134 },
{ url = "https://files.pythonhosted.org/packages/00/d0/ed735e76db279ba67a7d3b45ba4c654e7b02bc2f8050671ec365d8665e21/propcache-0.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f", size = 220009 },
{ url = "https://files.pythonhosted.org/packages/75/90/ee8fab7304ad6533872fee982cfff5a53b63d095d78140827d93de22e2d4/propcache-0.2.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54", size = 212199 },
{ url = "https://files.pythonhosted.org/packages/eb/ec/977ffaf1664f82e90737275873461695d4c9407d52abc2f3c3e24716da13/propcache-0.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505", size = 214827 },
{ url = "https://files.pythonhosted.org/packages/57/48/031fb87ab6081764054821a71b71942161619549396224cbb242922525e8/propcache-0.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82", size = 228009 },
{ url = "https://files.pythonhosted.org/packages/1a/06/ef1390f2524850838f2390421b23a8b298f6ce3396a7cc6d39dedd4047b0/propcache-0.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca", size = 231638 },
{ url = "https://files.pythonhosted.org/packages/38/2a/101e6386d5a93358395da1d41642b79c1ee0f3b12e31727932b069282b1d/propcache-0.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e", size = 222788 },
{ url = "https://files.pythonhosted.org/packages/db/81/786f687951d0979007e05ad9346cd357e50e3d0b0f1a1d6074df334b1bbb/propcache-0.2.1-cp311-cp311-win32.whl", hash = "sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034", size = 40170 },
{ url = "https://files.pythonhosted.org/packages/cf/59/7cc7037b295d5772eceb426358bb1b86e6cab4616d971bd74275395d100d/propcache-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3", size = 44404 },
{ url = "https://files.pythonhosted.org/packages/4c/28/1d205fe49be8b1b4df4c50024e62480a442b1a7b818e734308bb0d17e7fb/propcache-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a", size = 79588 },
{ url = "https://files.pythonhosted.org/packages/21/ee/fc4d893f8d81cd4971affef2a6cb542b36617cd1d8ce56b406112cb80bf7/propcache-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0", size = 45825 },
{ url = "https://files.pythonhosted.org/packages/4a/de/bbe712f94d088da1d237c35d735f675e494a816fd6f54e9db2f61ef4d03f/propcache-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d", size = 45357 },
{ url = "https://files.pythonhosted.org/packages/7f/14/7ae06a6cf2a2f1cb382586d5a99efe66b0b3d0c6f9ac2f759e6f7af9d7cf/propcache-0.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4", size = 241869 },
{ url = "https://files.pythonhosted.org/packages/cc/59/227a78be960b54a41124e639e2c39e8807ac0c751c735a900e21315f8c2b/propcache-0.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d", size = 247884 },
{ url = "https://files.pythonhosted.org/packages/84/58/f62b4ffaedf88dc1b17f04d57d8536601e4e030feb26617228ef930c3279/propcache-0.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5", size = 248486 },
{ url = "https://files.pythonhosted.org/packages/1c/07/ebe102777a830bca91bbb93e3479cd34c2ca5d0361b83be9dbd93104865e/propcache-0.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24", size = 243649 },
{ url = "https://files.pythonhosted.org/packages/ed/bc/4f7aba7f08f520376c4bb6a20b9a981a581b7f2e385fa0ec9f789bb2d362/propcache-0.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff", size = 229103 },
{ url = "https://files.pythonhosted.org/packages/fe/d5/04ac9cd4e51a57a96f78795e03c5a0ddb8f23ec098b86f92de028d7f2a6b/propcache-0.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f", size = 226607 },
{ url = "https://files.pythonhosted.org/packages/e3/f0/24060d959ea41d7a7cc7fdbf68b31852331aabda914a0c63bdb0e22e96d6/propcache-0.2.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec", size = 221153 },
{ url = "https://files.pythonhosted.org/packages/77/a7/3ac76045a077b3e4de4859a0753010765e45749bdf53bd02bc4d372da1a0/propcache-0.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348", size = 222151 },
{ url = "https://files.pythonhosted.org/packages/e7/af/5e29da6f80cebab3f5a4dcd2a3240e7f56f2c4abf51cbfcc99be34e17f0b/propcache-0.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6", size = 233812 },
{ url = "https://files.pythonhosted.org/packages/8c/89/ebe3ad52642cc5509eaa453e9f4b94b374d81bae3265c59d5c2d98efa1b4/propcache-0.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6", size = 238829 },
{ url = "https://files.pythonhosted.org/packages/e9/2f/6b32f273fa02e978b7577159eae7471b3cfb88b48563b1c2578b2d7ca0bb/propcache-0.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518", size = 230704 },
{ url = "https://files.pythonhosted.org/packages/5c/2e/f40ae6ff5624a5f77edd7b8359b208b5455ea113f68309e2b00a2e1426b6/propcache-0.2.1-cp312-cp312-win32.whl", hash = "sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246", size = 40050 },
{ url = "https://files.pythonhosted.org/packages/3b/77/a92c3ef994e47180862b9d7d11e37624fb1c00a16d61faf55115d970628b/propcache-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1", size = 44117 },
{ url = "https://files.pythonhosted.org/packages/41/b6/c5319caea262f4821995dca2107483b94a3345d4607ad797c76cb9c36bcc/propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54", size = 11818 },
{ url = "https://files.pythonhosted.org/packages/8d/f0/dc9ec44d2e63c13f816a16398c039329736712440ff82b682dd9a78d2258/propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d", size = 79574 },
{ url = "https://files.pythonhosted.org/packages/99/3a/33a207dfcb3ee1131ea23a2aeb726c3c4994f89546d7eadf8c50627c8b63/propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c", size = 45898 },
{ url = "https://files.pythonhosted.org/packages/af/68/0bde765c9f5dc02b4466d2838600af38c81b184c26c6d3cd44643ac668e3/propcache-0.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3e7420211f5a65a54675fd860ea04173cde60a7cc20ccfbafcccd155225f8bc", size = 45418 },
{ url = "https://files.pythonhosted.org/packages/06/a6/c682669bae41199358e16cc7b1c818f91c5f9e925cc863dabd98ce32716a/propcache-0.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3302c5287e504d23bb0e64d2a921d1eb4a03fb93a0a0aa3b53de059f5a5d737d", size = 205116 },
{ url = "https://files.pythonhosted.org/packages/fb/ae/82cfb50267d9a1baa0340728eb9e32245a68538fef929d7bb786d01c11a8/propcache-0.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e2e068a83552ddf7a39a99488bcba05ac13454fb205c847674da0352602082f", size = 219405 },
{ url = "https://files.pythonhosted.org/packages/ab/16/7b6b2bf8c207cfd0e5ca3d41aea397392de9899867ec024f88c94f9ae2ab/propcache-0.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d913d36bdaf368637b4f88d554fb9cb9d53d6920b9c5563846555938d5450bf", size = 217656 },
{ url = "https://files.pythonhosted.org/packages/f4/eb/41447de61eb5454891658d0fb9b1d7d35d49a4a5dd2e0c86f2c332e8b7e1/propcache-0.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ee1983728964d6070ab443399c476de93d5d741f71e8f6e7880a065f878e0b9", size = 205414 },
{ url = "https://files.pythonhosted.org/packages/03/b6/9719878f8b5b20d37ee663a40f8dcbf888559e4d3be2ba2fe5c790fc28d2/propcache-0.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36ca5e9a21822cc1746023e88f5c0af6fce3af3b85d4520efb1ce4221bed75cc", size = 195746 },
{ url = "https://files.pythonhosted.org/packages/bb/ec/b79c3210ba459800d1a8f1afeb81d7b503893555a7b79c24082ff26d3314/propcache-0.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9ecde3671e62eeb99e977f5221abcf40c208f69b5eb986b061ccec317c82ebd0", size = 198651 },
{ url = "https://files.pythonhosted.org/packages/48/f6/2b0140bc47013e43575973068e72ad51ee9f22f2dad42e6d6e362d715125/propcache-0.3.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d383bf5e045d7f9d239b38e6acadd7b7fdf6c0087259a84ae3475d18e9a2ae8b", size = 195858 },
{ url = "https://files.pythonhosted.org/packages/97/3d/2fa19303d87aa21f9a42dcd870d6088a2a776ff5518e394d50412c3679a6/propcache-0.3.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8cb625bcb5add899cb8ba7bf716ec1d3e8f7cdea9b0713fa99eadf73b6d4986f", size = 197181 },
{ url = "https://files.pythonhosted.org/packages/09/f3/a2170ffc9fa774c1dfd52294113c0fa6cdc5b71dbfd7129bb9378fdd8b42/propcache-0.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5fa159dcee5dba00c1def3231c249cf261185189205073bde13797e57dd7540a", size = 207411 },
{ url = "https://files.pythonhosted.org/packages/d6/1e/cb8a6c82178efffa0b00dc463f36cd086f747345585140aeb95d5cb93666/propcache-0.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7080b0159ce05f179cfac592cda1a82898ca9cd097dacf8ea20ae33474fbb25", size = 210724 },
{ url = "https://files.pythonhosted.org/packages/2b/72/6e273543337a3e22cf462eb836f065a9830b4d41baeb1f58db2695c934f3/propcache-0.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ed7161bccab7696a473fe7ddb619c1d75963732b37da4618ba12e60899fefe4f", size = 203511 },
{ url = "https://files.pythonhosted.org/packages/f3/ea/7412c79bcec06597c967d49789f5a1f7fd76a8654908feeaefafb7447c9a/propcache-0.3.0-cp310-cp310-win32.whl", hash = "sha256:bf0d9a171908f32d54f651648c7290397b8792f4303821c42a74e7805bfb813c", size = 40600 },
{ url = "https://files.pythonhosted.org/packages/a3/42/488c90190491f3e61bd2c2fb0b3d91c1c78778270dde2f0b6633fc9ff723/propcache-0.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:42924dc0c9d73e49908e35bbdec87adedd651ea24c53c29cac103ede0ea1d340", size = 44714 },
{ url = "https://files.pythonhosted.org/packages/45/c9/cf09ff7e6d09f14149094f7cd50d2dec032b24e61af21fc4540da2b17bfb/propcache-0.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9ddd49258610499aab83b4f5b61b32e11fce873586282a0e972e5ab3bcadee51", size = 79568 },
{ url = "https://files.pythonhosted.org/packages/c8/32/2424d89da88cd81b7d148e0d2b3131461b570a02aa9d84a2e567509adb0d/propcache-0.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2578541776769b500bada3f8a4eeaf944530516b6e90c089aa368266ed70c49e", size = 45895 },
{ url = "https://files.pythonhosted.org/packages/f6/91/ee5b6aa7aa31754fefcf0c5180e09223cac380ef195c4ddc8c266eb641ea/propcache-0.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8074c5dd61c8a3e915fa8fc04754fa55cfa5978200d2daa1e2d4294c1f136aa", size = 45427 },
{ url = "https://files.pythonhosted.org/packages/bf/73/38f0128462b8b616181d8c53bd5d04eac41c50c449b07615c65d56ba0a9b/propcache-0.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b58229a844931bca61b3a20efd2be2a2acb4ad1622fc026504309a6883686fbf", size = 232427 },
{ url = "https://files.pythonhosted.org/packages/59/82/f3d4e84f4539dcfc9c3d338282b9e915f5b63c921986ecfdf7af2d12f87c/propcache-0.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e45377d5d6fefe1677da2a2c07b024a6dac782088e37c0b1efea4cfe2b1be19b", size = 239985 },
{ url = "https://files.pythonhosted.org/packages/42/e8/029f58cccbae83c9969a7ee7a06558d5b83a93dfc54e0f4f70234bbaea1b/propcache-0.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ec5060592d83454e8063e487696ac3783cc48c9a329498bafae0d972bc7816c9", size = 238827 },
{ url = "https://files.pythonhosted.org/packages/8b/a2/c373561777c0cb9b9e7b9b9a10b9b3a7b6bde75a2535b962231cecc8fdb8/propcache-0.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15010f29fbed80e711db272909a074dc79858c6d28e2915704cfc487a8ac89c6", size = 231348 },
{ url = "https://files.pythonhosted.org/packages/d7/d2/4673f715beedf6038b485bcd976813149231d9df5bb6196cb69a09c185c9/propcache-0.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a254537b9b696ede293bfdbc0a65200e8e4507bc9f37831e2a0318a9b333c85c", size = 220426 },
{ url = "https://files.pythonhosted.org/packages/e0/f6/1da65f900927bafd4675a16e890618ec7643f2f922bf0e4d84bb38645618/propcache-0.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2b975528998de037dfbc10144b8aed9b8dd5a99ec547f14d1cb7c5665a43f075", size = 220294 },
{ url = "https://files.pythonhosted.org/packages/ff/86/620451bdc02e91b1712cd71890c17077ee97e2a28493836a87e47b8e70ff/propcache-0.3.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:19d36bb351ad5554ff20f2ae75f88ce205b0748c38b146c75628577020351e3c", size = 212492 },
{ url = "https://files.pythonhosted.org/packages/6e/1b/e8f86921ed4016da80faf3b8f515f7829decabdbff106736bfff353bceba/propcache-0.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6032231d4a5abd67c7f71168fd64a47b6b451fbcb91c8397c2f7610e67683810", size = 215113 },
{ url = "https://files.pythonhosted.org/packages/1a/95/a61d86cc49aa0945f6c06f3a4614fc543e311a50558c92861f5e9691a37c/propcache-0.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6985a593417cdbc94c7f9c3403747335e450c1599da1647a5af76539672464d3", size = 228330 },
{ url = "https://files.pythonhosted.org/packages/8f/7d/10dbae48ff2bb189e92c2b3487a48f3229146a25941ad0d485934d1104d4/propcache-0.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6a1948df1bb1d56b5e7b0553c0fa04fd0e320997ae99689488201f19fa90d2e7", size = 231942 },
{ url = "https://files.pythonhosted.org/packages/39/ce/82d16aec96c5513ae7db13ab901a65a1e54c915292fb5b2390e33275b61d/propcache-0.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8319293e85feadbbfe2150a5659dbc2ebc4afdeaf7d98936fb9a2f2ba0d4c35c", size = 223077 },
{ url = "https://files.pythonhosted.org/packages/c8/e0/cb077e8e7a583c733df7f53327fcbdb92e42be59b976ce60bf1d904a0efe/propcache-0.3.0-cp311-cp311-win32.whl", hash = "sha256:63f26258a163c34542c24808f03d734b338da66ba91f410a703e505c8485791d", size = 40455 },
{ url = "https://files.pythonhosted.org/packages/d8/35/57abeb6146fe3c19081eeaf3d9d4cfea256f87f1e5101acf80d3332c1820/propcache-0.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:cacea77ef7a2195f04f9279297684955e3d1ae4241092ff0cfcef532bb7a1c32", size = 44705 },
{ url = "https://files.pythonhosted.org/packages/8d/2c/921f15dc365796ec23975b322b0078eae72995c7b4d49eba554c6a308d70/propcache-0.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e53d19c2bf7d0d1e6998a7e693c7e87300dd971808e6618964621ccd0e01fe4e", size = 79867 },
{ url = "https://files.pythonhosted.org/packages/11/a5/4a6cc1a559d1f2fb57ea22edc4245158cdffae92f7f92afcee2913f84417/propcache-0.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a61a68d630e812b67b5bf097ab84e2cd79b48c792857dc10ba8a223f5b06a2af", size = 46109 },
{ url = "https://files.pythonhosted.org/packages/e1/6d/28bfd3af3a567ad7d667348e7f46a520bda958229c4d545ba138a044232f/propcache-0.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fb91d20fa2d3b13deea98a690534697742029f4fb83673a3501ae6e3746508b5", size = 45635 },
{ url = "https://files.pythonhosted.org/packages/73/20/d75b42eaffe5075eac2f4e168f6393d21c664c91225288811d85451b2578/propcache-0.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67054e47c01b7b349b94ed0840ccae075449503cf1fdd0a1fdd98ab5ddc2667b", size = 242159 },
{ url = "https://files.pythonhosted.org/packages/a5/fb/4b537dd92f9fd4be68042ec51c9d23885ca5fafe51ec24c58d9401034e5f/propcache-0.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997e7b8f173a391987df40f3b52c423e5850be6f6df0dcfb5376365440b56667", size = 248163 },
{ url = "https://files.pythonhosted.org/packages/e7/af/8a9db04ac596d531ca0ef7dde518feaadfcdabef7b17d6a5ec59ee3effc2/propcache-0.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d663fd71491dde7dfdfc899d13a067a94198e90695b4321084c6e450743b8c7", size = 248794 },
{ url = "https://files.pythonhosted.org/packages/9d/c4/ecfc988879c0fd9db03228725b662d76cf484b6b46f7e92fee94e4b52490/propcache-0.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8884ba1a0fe7210b775106b25850f5e5a9dc3c840d1ae9924ee6ea2eb3acbfe7", size = 243912 },
{ url = "https://files.pythonhosted.org/packages/04/a2/298dd27184faa8b7d91cc43488b578db218b3cc85b54d912ed27b8c5597a/propcache-0.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa806bbc13eac1ab6291ed21ecd2dd426063ca5417dd507e6be58de20e58dfcf", size = 229402 },
{ url = "https://files.pythonhosted.org/packages/be/0d/efe7fec316ca92dbf4bc4a9ba49ca889c43ca6d48ab1d6fa99fc94e5bb98/propcache-0.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6f4d7a7c0aff92e8354cceca6fe223973ddf08401047920df0fcb24be2bd5138", size = 226896 },
{ url = "https://files.pythonhosted.org/packages/60/63/72404380ae1d9c96d96e165aa02c66c2aae6072d067fc4713da5cde96762/propcache-0.3.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9be90eebc9842a93ef8335291f57b3b7488ac24f70df96a6034a13cb58e6ff86", size = 221447 },
{ url = "https://files.pythonhosted.org/packages/9d/18/b8392cab6e0964b67a30a8f4dadeaff64dc7022b5a34bb1d004ea99646f4/propcache-0.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bf15fc0b45914d9d1b706f7c9c4f66f2b7b053e9517e40123e137e8ca8958b3d", size = 222440 },
{ url = "https://files.pythonhosted.org/packages/6f/be/105d9ceda0f97eff8c06bac1673448b2db2a497444de3646464d3f5dc881/propcache-0.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5a16167118677d94bb48bfcd91e420088854eb0737b76ec374b91498fb77a70e", size = 234104 },
{ url = "https://files.pythonhosted.org/packages/cb/c9/f09a4ec394cfcce4053d8b2a04d622b5f22d21ba9bb70edd0cad061fa77b/propcache-0.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41de3da5458edd5678b0f6ff66691507f9885f5fe6a0fb99a5d10d10c0fd2d64", size = 239086 },
{ url = "https://files.pythonhosted.org/packages/ea/aa/96f7f9ed6def82db67c972bdb7bd9f28b95d7d98f7e2abaf144c284bf609/propcache-0.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:728af36011bb5d344c4fe4af79cfe186729efb649d2f8b395d1572fb088a996c", size = 230991 },
{ url = "https://files.pythonhosted.org/packages/5a/11/bee5439de1307d06fad176f7143fec906e499c33d7aff863ea8428b8e98b/propcache-0.3.0-cp312-cp312-win32.whl", hash = "sha256:6b5b7fd6ee7b54e01759f2044f936dcf7dea6e7585f35490f7ca0420fe723c0d", size = 40337 },
{ url = "https://files.pythonhosted.org/packages/e4/17/e5789a54a0455a61cb9efc4ca6071829d992220c2998a27c59aeba749f6f/propcache-0.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:2d15bc27163cd4df433e75f546b9ac31c1ba7b0b128bfb1b90df19082466ff57", size = 44404 },
{ url = "https://files.pythonhosted.org/packages/b5/35/6c4c6fc8774a9e3629cd750dc24a7a4fb090a25ccd5c3246d127b70f9e22/propcache-0.3.0-py3-none-any.whl", hash = "sha256:67dda3c7325691c2081510e92c561f465ba61b975f481735aefdfc845d2cd043", size = 12101 },
]
[[package]]
@ -1538,17 +1539,17 @@ wheels = [
[[package]]
name = "psutil"
version = "6.1.1"
version = "7.0.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1f/5a/07871137bb752428aa4b659f910b399ba6f291156bdea939be3e96cae7cb/psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5", size = 508502 }
sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/61/99/ca79d302be46f7bdd8321089762dd4476ee725fce16fc2b2e1dbba8cac17/psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8", size = 247511 },
{ url = "https://files.pythonhosted.org/packages/0b/6b/73dbde0dd38f3782905d4587049b9be64d76671042fdcaf60e2430c6796d/psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377", size = 248985 },
{ url = "https://files.pythonhosted.org/packages/17/38/c319d31a1d3f88c5b79c68b3116c129e5133f1822157dd6da34043e32ed6/psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003", size = 284488 },
{ url = "https://files.pythonhosted.org/packages/9c/39/0f88a830a1c8a3aba27fededc642da37613c57cbff143412e3536f89784f/psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160", size = 287477 },
{ url = "https://files.pythonhosted.org/packages/47/da/99f4345d4ddf2845cb5b5bd0d93d554e84542d116934fde07a0c50bd4e9f/psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3", size = 289017 },
{ url = "https://files.pythonhosted.org/packages/38/53/bd755c2896f4461fd4f36fa6a6dcb66a88a9e4b9fd4e5b66a77cf9d4a584/psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53", size = 250602 },
{ url = "https://files.pythonhosted.org/packages/7b/d7/7831438e6c3ebbfa6e01a927127a6cb42ad3ab844247f3c5b96bea25d73d/psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649", size = 254444 },
{ url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051 },
{ url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535 },
{ url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004 },
{ url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986 },
{ url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544 },
{ url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053 },
{ url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885 },
]
[[package]]
@ -1598,7 +1599,7 @@ wheels = [
[[package]]
name = "py-leap"
version = "0.1a35"
source = { git = "https://github.com/guilledk/py-leap.git?branch=struct_unwrap#18b3c73e724922a060db5f8ea2b9d9727b6152cc" }
source = { git = "https://github.com/guilledk/py-leap.git?branch=struct_unwrap#20f2e1f74e98e3d75984e8e1eee13c3100c17652" }
dependencies = [
{ name = "base58" },
{ name = "cryptos" },
@ -1633,8 +1634,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/61/74/49f5d20c514ccc631b940cc9dfec45dcce418dc84a98463a2e2ebec33904/pycryptodomex-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:52e23a0a6e61691134aa8c8beba89de420602541afaae70f66e16060fdcd677e", size = 2257982 },
{ url = "https://files.pythonhosted.org/packages/92/4b/d33ef74e2cc0025a259936661bb53432c5bbbadc561c5f2e023bcd73ce4c/pycryptodomex-3.21.0-cp36-abi3-win32.whl", hash = "sha256:a3d77919e6ff56d89aada1bd009b727b874d464cb0e2e3f00a49f7d2e709d76e", size = 1779052 },
{ url = "https://files.pythonhosted.org/packages/5b/be/7c991840af1184009fc86267160948350d1bf875f153c97bb471ad944e40/pycryptodomex-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b0e9765f93fe4890f39875e6c90c96cb341767833cfa767f41b490b506fa9ec0", size = 1816307 },
{ url = "https://files.pythonhosted.org/packages/af/ac/24125ad36778914a36f08d61ba5338cb9159382c638d9761ee19c8de822c/pycryptodomex-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:feaecdce4e5c0045e7a287de0c4351284391fe170729aa9182f6bd967631b3a8", size = 1694999 },
{ url = "https://files.pythonhosted.org/packages/93/73/be7a54a5903508070e5508925ba94493a1f326cfeecfff750e3eb250ea28/pycryptodomex-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:365aa5a66d52fd1f9e0530ea97f392c48c409c2f01ff8b9a39c73ed6f527d36c", size = 1769437 },
{ url = "https://files.pythonhosted.org/packages/e5/9f/39a6187f3986841fa6a9f35c6fdca5030ef73ff708b45a993813a51d7d10/pycryptodomex-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3efddfc50ac0ca143364042324046800c126a1d63816d532f2e19e6f2d8c0c31", size = 1619607 },
{ url = "https://files.pythonhosted.org/packages/f8/70/60bb08e9e9841b18d4669fb69d84b64ce900aacd7eb0ebebd4c7b9bdecd3/pycryptodomex-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df2608682db8279a9ebbaf05a72f62a321433522ed0e499bc486a6889b96bf3", size = 1653571 },
{ url = "https://files.pythonhosted.org/packages/c9/6f/191b73509291c5ff0dddec9cc54797b1d73303c12b2e4017b24678e57099/pycryptodomex-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5823d03e904ea3e53aebd6799d6b8ec63b7675b5d2f4a4bd5e3adcb512d03b37", size = 1691548 },
@ -1950,7 +1949,7 @@ wheels = [
[[package]]
name = "scikit-image"
version = "0.25.1"
version = "0.25.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "imageio" },
@ -1962,23 +1961,23 @@ dependencies = [
{ name = "scipy" },
{ name = "tifffile" },
]
sdist = { url = "https://files.pythonhosted.org/packages/83/e5/496a74ccfc1206666b9c7164a16657febdfeb6df0e458cb61286b20102c9/scikit_image-0.25.1.tar.gz", hash = "sha256:d4ab30540d114d37c35fe5c837f89b94aaba2a7643afae8354aa353319e9bbbb", size = 22697578 }
sdist = { url = "https://files.pythonhosted.org/packages/c7/a8/3c0f256012b93dd2cb6fda9245e9f4bff7dc0486880b248005f15ea2255e/scikit_image-0.25.2.tar.gz", hash = "sha256:e5a37e6cd4d0c018a7a55b9d601357e3382826d3888c10d0213fc63bff977dde", size = 22693594 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4c/16/f662cd3bdbe4ca8a20e2ffd47fdb758f164ac01ea48c4e69d2a09d8fae97/scikit_image-0.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:40763a3a089617e6f00f92d46b3475368b9783588a165c2aa854da95b66bb4ff", size = 13985311 },
{ url = "https://files.pythonhosted.org/packages/76/ca/2912515df1e08a60d378d3572edf61248012747eeb593869289ecc47174d/scikit_image-0.25.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:7c6b69f33e5512ee7fc53361b064430f146583f08dc75317667e81d5f8fcd0c6", size = 13188177 },
{ url = "https://files.pythonhosted.org/packages/d0/90/42d55f46fd3d9c7d4495025367bcb10033904f65d512143fa39179fa2de2/scikit_image-0.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9187347d115776ff0ddba3e5d2a04638d291b1a62e3c315d17b71eea351cde8", size = 14153693 },
{ url = "https://files.pythonhosted.org/packages/04/53/2822fe04ae5fc69ea1eba65b8e30a691b7257f93c6ca5621d3d94747d83e/scikit_image-0.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdfca713979ad1873a4b55d94bb1eb4bc713f0c10165b261bf6f7e606f44a00c", size = 14768517 },
{ url = "https://files.pythonhosted.org/packages/86/9c/cf681f591bc17c0eed560d674223ef11c1d63561fd54b8c33ab0822e17fa/scikit_image-0.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:167fb146de80bb2a1493d1a760a9ac81644a8a5de254c3dd12a95d1b662d819c", size = 12809084 },
{ url = "https://files.pythonhosted.org/packages/1c/8a/698138616b782d368d24061339226089f29c42878a9b18046c6a2d9d6422/scikit_image-0.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1bde2d5f1dfb23b3c72ef9fcdb2dd5f42fa353e8bd606aea63590eba5e79565", size = 13999468 },
{ url = "https://files.pythonhosted.org/packages/64/dd/ff4d4123547a59bc156a192c8cd52ea9cfcf178b70d1f48afec9d26ab6f4/scikit_image-0.25.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5112d95cccaa45c434e57efc20c1f721ab439e516e2ed49709ddc2afb7c15c70", size = 13175810 },
{ url = "https://files.pythonhosted.org/packages/1e/28/4d76f333cd0c86ccf34ab74517877117914413d307f936eb8df74ca365aa/scikit_image-0.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f5e313b028f5d7a9f3888ad825ddf4fb78913d7762891abb267b99244b4dd31", size = 14145156 },
{ url = "https://files.pythonhosted.org/packages/27/05/265b62ace7626de13edb7e97f0429a4faae2a95bbc2adb15a28fd5680aba/scikit_image-0.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39ad76aeff754048dabaff83db752aa0655dee425f006678d14485471bdb459d", size = 14784715 },
{ url = "https://files.pythonhosted.org/packages/35/80/faf325a7aef1d07067dab5ff7a890da229b42a641d2e85c98f3675cd36a2/scikit_image-0.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:8dc8b06176c1a2316fa8bc539fd7e96155721628ae5cf51bc1a2c62cb9786581", size = 12788033 },
{ url = "https://files.pythonhosted.org/packages/c5/a8/7d56f4401c05a186a5e82aab53977029a3f88cc0f1bd6c1fb4f4dd524262/scikit_image-0.25.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ebf83699d60134909647395a0bf07db3859646de7192b088e656deda6bc15e95", size = 13982151 },
{ url = "https://files.pythonhosted.org/packages/80/0e/d78876faaf552cf575205160aa82849fc493977a5b0cdf093f6bbb1586fe/scikit_image-0.25.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:408086520eed036340e634ab7e4f648e00238f711bac61ab933efeb11464a238", size = 13231342 },
{ url = "https://files.pythonhosted.org/packages/e0/ae/78a8dba652cdaed8a5f5dd56cf8f11ed64e44151a4813e3312916a7dff46/scikit_image-0.25.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bd709faa87795869ccd21f32490c37989ca5846571495822f4b9430fb42c34c", size = 14173769 },
{ url = "https://files.pythonhosted.org/packages/d7/77/6d1da74cb0b7ba07750d6ef7e48f87807b53df1cf4a090775115dd9cc5ea/scikit_image-0.25.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b15c0265c072a46ff4720784d756d8f8e5d63567639aa8451f6673994d6846", size = 15002945 },
{ url = "https://files.pythonhosted.org/packages/df/ad/cddec5c0bcde8936c15f07593419f6d94ed33b058737948a0d59fb1142a0/scikit_image-0.25.1-cp312-cp312-win_amd64.whl", hash = "sha256:a689a0d091e0bd97d7767309abdeb27c43be210d075abb34e71657add920c22b", size = 12895262 },
{ url = "https://files.pythonhosted.org/packages/11/cb/016c63f16065c2d333c8ed0337e18a5cdf9bc32d402e4f26b0db362eb0e2/scikit_image-0.25.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d3278f586793176599df6a4cf48cb6beadae35c31e58dc01a98023af3dc31c78", size = 13988922 },
{ url = "https://files.pythonhosted.org/packages/30/ca/ff4731289cbed63c94a0c9a5b672976603118de78ed21910d9060c82e859/scikit_image-0.25.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5c311069899ce757d7dbf1d03e32acb38bb06153236ae77fcd820fd62044c063", size = 13192698 },
{ url = "https://files.pythonhosted.org/packages/39/6d/a2aadb1be6d8e149199bb9b540ccde9e9622826e1ab42fe01de4c35ab918/scikit_image-0.25.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be455aa7039a6afa54e84f9e38293733a2622b8c2fb3362b822d459cc5605e99", size = 14153634 },
{ url = "https://files.pythonhosted.org/packages/96/08/916e7d9ee4721031b2f625db54b11d8379bd51707afaa3e5a29aecf10bc4/scikit_image-0.25.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c464b90e978d137330be433df4e76d92ad3c5f46a22f159520ce0fdbea8a09", size = 14767545 },
{ url = "https://files.pythonhosted.org/packages/5f/ee/c53a009e3997dda9d285402f19226fbd17b5b3cb215da391c4ed084a1424/scikit_image-0.25.2-cp310-cp310-win_amd64.whl", hash = "sha256:60516257c5a2d2f74387c502aa2f15a0ef3498fbeaa749f730ab18f0a40fd054", size = 12812908 },
{ url = "https://files.pythonhosted.org/packages/c4/97/3051c68b782ee3f1fb7f8f5bb7d535cf8cb92e8aae18fa9c1cdf7e15150d/scikit_image-0.25.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f4bac9196fb80d37567316581c6060763b0f4893d3aca34a9ede3825bc035b17", size = 14003057 },
{ url = "https://files.pythonhosted.org/packages/19/23/257fc696c562639826065514d551b7b9b969520bd902c3a8e2fcff5b9e17/scikit_image-0.25.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d989d64ff92e0c6c0f2018c7495a5b20e2451839299a018e0e5108b2680f71e0", size = 13180335 },
{ url = "https://files.pythonhosted.org/packages/ef/14/0c4a02cb27ca8b1e836886b9ec7c9149de03053650e9e2ed0625f248dd92/scikit_image-0.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2cfc96b27afe9a05bc92f8c6235321d3a66499995675b27415e0d0c76625173", size = 14144783 },
{ url = "https://files.pythonhosted.org/packages/dd/9b/9fb556463a34d9842491d72a421942c8baff4281025859c84fcdb5e7e602/scikit_image-0.25.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24cc986e1f4187a12aa319f777b36008764e856e5013666a4a83f8df083c2641", size = 14785376 },
{ url = "https://files.pythonhosted.org/packages/de/ec/b57c500ee85885df5f2188f8bb70398481393a69de44a00d6f1d055f103c/scikit_image-0.25.2-cp311-cp311-win_amd64.whl", hash = "sha256:b4f6b61fc2db6340696afe3db6b26e0356911529f5f6aee8c322aa5157490c9b", size = 12791698 },
{ url = "https://files.pythonhosted.org/packages/35/8c/5df82881284459f6eec796a5ac2a0a304bb3384eec2e73f35cfdfcfbf20c/scikit_image-0.25.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8db8dd03663112783221bf01ccfc9512d1cc50ac9b5b0fe8f4023967564719fb", size = 13986000 },
{ url = "https://files.pythonhosted.org/packages/ce/e6/93bebe1abcdce9513ffec01d8af02528b4c41fb3c1e46336d70b9ed4ef0d/scikit_image-0.25.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:483bd8cc10c3d8a7a37fae36dfa5b21e239bd4ee121d91cad1f81bba10cfb0ed", size = 13235893 },
{ url = "https://files.pythonhosted.org/packages/53/4b/eda616e33f67129e5979a9eb33c710013caa3aa8a921991e6cc0b22cea33/scikit_image-0.25.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d1e80107bcf2bf1291acfc0bf0425dceb8890abe9f38d8e94e23497cbf7ee0d", size = 14178389 },
{ url = "https://files.pythonhosted.org/packages/6b/b5/b75527c0f9532dd8a93e8e7cd8e62e547b9f207d4c11e24f0006e8646b36/scikit_image-0.25.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a17e17eb8562660cc0d31bb55643a4da996a81944b82c54805c91b3fe66f4824", size = 15003435 },
{ url = "https://files.pythonhosted.org/packages/34/e3/49beb08ebccda3c21e871b607c1cb2f258c3fa0d2f609fed0a5ba741b92d/scikit_image-0.25.2-cp312-cp312-win_amd64.whl", hash = "sha256:bdd2b8c1de0849964dbc54037f36b4e9420157e67e45a8709a80d727f52c7da2", size = 12899474 },
]
[[package]]
@ -2195,7 +2194,7 @@ wheels = [
[[package]]
name = "tb-nightly"
version = "2.19.0a20250211"
version = "2.20.0a20250221"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "absl-py" },
@ -2210,7 +2209,7 @@ dependencies = [
{ name = "werkzeug" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/5a/90/f0c8effd911d95e4eba1c10836fd446a9cac1f68e62b858b2177aaae963d/tb_nightly-2.19.0a20250211-py3-none-any.whl", hash = "sha256:d2ba2da592308980a4380da77777c0634e618ef2541155717a903986dc0d7adf", size = 5503304 },
{ url = "https://files.pythonhosted.org/packages/a7/14/ef9359b4282f09ef288459c9af073d1671b6204e2310e7038caa9e537b2e/tb_nightly-2.20.0a20250221-py3-none-any.whl", hash = "sha256:86ccbd57ac4e4494b341b394d6e0ee346e3f1da0ea32db6a1406cdccb533dd98", size = 5503543 },
]
[[package]]
@ -2225,14 +2224,14 @@ wheels = [
[[package]]
name = "tifffile"
version = "2025.1.10"
version = "2025.2.18"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d5/fc/697d8dac6936a81eda88e7d4653d567fcb0d504efad3fd28f5272f96fcf9/tifffile-2025.1.10.tar.gz", hash = "sha256:baaf0a3b87bf7ec375fa1537503353f70497eabe1bdde590f2e41cc0346e612f", size = 365585 }
sdist = { url = "https://files.pythonhosted.org/packages/de/1f/96d743b3417425f958dfed2518ad271b346a072d27b6859bb158e601bc21/tifffile-2025.2.18.tar.gz", hash = "sha256:8d731789e691b468746c1615d989bc550ac93cf753e9210865222e90a5a95d11", size = 365412 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/59/50/7bef6a1259a2c4b81823653a69d2d51074f7b8095db2abae5abee962ab87/tifffile-2025.1.10-py3-none-any.whl", hash = "sha256:ed24cf4c99fb13b4f5fb29f8a0d5605e60558c950bccbdca2a6470732a27cfb3", size = 227551 },
{ url = "https://files.pythonhosted.org/packages/63/70/6f363ab13f9903557a567a4471a28ee231b962e34af8e1dd8d1b0f17e64e/tifffile-2025.2.18-py3-none-any.whl", hash = "sha256:54b36c4d5e5b8d8920134413edfe5a7cfb1c7617bb50cddf7e2772edb7149043", size = 226358 },
]
[[package]]