infernet-1.0.0 update

This commit is contained in:
arshan-ritual 2024-06-06 13:18:48 -04:00
parent 2a11fd3953
commit 40a6c590da
No known key found for this signature in database
GPG Key ID: 9C73E95A28F4AE00
98 changed files with 879 additions and 506 deletions

7
.gitignore vendored
View File

@ -38,3 +38,10 @@ venv
# sync scripts # sync scripts
remote_sync remote_sync
# forge generated files
**/broadcast
**/out
# secrets
*-key.json

16
.gitmodules vendored
View File

@ -3,16 +3,16 @@
url = https://github.com/foundry-rs/forge-std url = https://github.com/foundry-rs/forge-std
[submodule "projects/hello-world/contracts/lib/infernet-sdk"] [submodule "projects/hello-world/contracts/lib/infernet-sdk"]
path = projects/hello-world/contracts/lib/infernet-sdk path = projects/hello-world/contracts/lib/infernet-sdk
url = https://github.com/ritual-net/infernet-sdk url = https://github.com/ritual-net/infernet-sdk.git
[submodule "projects/torch-iris/contracts/lib/infernet-sdk"] [submodule "projects/torch-iris/contracts/lib/infernet-sdk"]
path = projects/torch-iris/contracts/lib/infernet-sdk path = projects/torch-iris/contracts/lib/infernet-sdk
url = https://github.com/ritual-net/infernet-sdk url = https://github.com/ritual-net/infernet-sdk.git
[submodule "projects/torch-iris/contracts/lib/forge-std"] [submodule "projects/torch-iris/contracts/lib/forge-std"]
path = projects/torch-iris/contracts/lib/forge-std path = projects/torch-iris/contracts/lib/forge-std
url = https://github.com/foundry-rs/forge-std url = https://github.com/foundry-rs/forge-std
[submodule "projects/onnx-iris/contracts/lib/infernet-sdk"] [submodule "projects/onnx-iris/contracts/lib/infernet-sdk"]
path = projects/onnx-iris/contracts/lib/infernet-sdk path = projects/onnx-iris/contracts/lib/infernet-sdk
url = https://github.com/ritual-net/infernet-sdk url = https://github.com/ritual-net/infernet-sdk.git
[submodule "projects/onnx-iris/contracts/lib/forge-std"] [submodule "projects/onnx-iris/contracts/lib/forge-std"]
path = projects/onnx-iris/contracts/lib/forge-std path = projects/onnx-iris/contracts/lib/forge-std
url = https://github.com/foundry-rs/forge-std url = https://github.com/foundry-rs/forge-std
@ -21,13 +21,13 @@
url = https://github.com/foundry-rs/forge-std url = https://github.com/foundry-rs/forge-std
[submodule "projects/prompt-to-nft/contracts/lib/infernet-sdk"] [submodule "projects/prompt-to-nft/contracts/lib/infernet-sdk"]
path = projects/prompt-to-nft/contracts/lib/infernet-sdk path = projects/prompt-to-nft/contracts/lib/infernet-sdk
url = https://github.com/ritual-net/infernet-sdk url = https://github.com/ritual-net/infernet-sdk.git
[submodule "projects/prompt-to-nft/contracts/lib/solmate"] [submodule "projects/prompt-to-nft/contracts/lib/solmate"]
path = projects/prompt-to-nft/contracts/lib/solmate path = projects/prompt-to-nft/contracts/lib/solmate
url = https://github.com/transmissions11/solmate url = https://github.com/transmissions11/solmate
[submodule "projects/gpt4/contracts/lib/infernet-sdk"] [submodule "projects/gpt4/contracts/lib/infernet-sdk"]
path = projects/gpt4/contracts/lib/infernet-sdk path = projects/gpt4/contracts/lib/infernet-sdk
url = https://github.com/ritual-net/infernet-sdk url = https://github.com/ritual-net/infernet-sdk.git
[submodule "projects/gpt4/contracts/lib/forge-std"] [submodule "projects/gpt4/contracts/lib/forge-std"]
path = projects/gpt4/contracts/lib/forge-std path = projects/gpt4/contracts/lib/forge-std
url = https://github.com/foundry-rs/forge-std url = https://github.com/foundry-rs/forge-std
@ -36,4 +36,10 @@
url = https://github.com/foundry-rs/forge-std url = https://github.com/foundry-rs/forge-std
[submodule "projects/tgi-llm/contracts/lib/infernet-sdk"] [submodule "projects/tgi-llm/contracts/lib/infernet-sdk"]
path = projects/tgi-llm/contracts/lib/infernet-sdk path = projects/tgi-llm/contracts/lib/infernet-sdk
url = https://github.com/ritual-net/infernet-sdk.git
[submodule "projects/payment/contracts/lib/forge-std"]
path = projects/payment/contracts/lib/forge-std
url = https://github.com/foundry-rs/forge-std
[submodule "projects/payment/contracts/lib/infernet-sdk"]
path = projects/payment/contracts/lib/infernet-sdk
url = https://github.com/ritual-net/infernet-sdk url = https://github.com/ritual-net/infernet-sdk

24
CHANGELOG.md Normal file
View File

@ -0,0 +1,24 @@
# Changelog
All notable changes to this project will be documented in this file.
- ##### The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- ##### This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [1.0.0] - 2024-06-06
### Added
- New project `payment` for an end-to-end flow of the payments feature of `infernet
1.0.0`.
### Changed
- All workflows are updated to use `infernet-ml 1.0.0`
- All contracts are updated to use `infernet-sdk 1.0.0`
### Fixed
- Recursive submodule cloning issue with forge's libraries.
## [0.1.0] - 2024-03-21
### Added
- Initial release of the Infernet Container Starter repository.

View File

@ -1,5 +1,9 @@
build-container: include internal.mk
$(MAKE) -C ./projects/$(project)/container build
index_url ?= ''
build-container: get_index_url
$(MAKE) -C ./projects/$(project)/container build index_url=$(index_url)
remove-containers: remove-containers:
docker compose -f deploy/docker-compose.yaml down || true docker compose -f deploy/docker-compose.yaml down || true
@ -8,10 +12,18 @@ remove-containers:
build-multiplatform: build-multiplatform:
$(MAKE) -C ./projects/$(project)/container build-multiplatform $(MAKE) -C ./projects/$(project)/container build-multiplatform
deploy-container: deploy-container: stop-container
$(MAKE) remove-containers
cp ./projects/$(project)/container/config.json deploy/config.json cp ./projects/$(project)/container/config.json deploy/config.json
docker compose -f deploy/docker-compose.yaml up -d docker compose -f deploy/docker-compose.yaml up -d
docker logs infernet-node -f
stop-container:
docker compose -f deploy/docker-compose.yaml kill || true
docker compose -f deploy/docker-compose.yaml rm -f || true
docker kill $(project) || true
docker rm $(project) || true
watch-logs:
docker compose -f deploy/docker-compose.yaml logs -f docker compose -f deploy/docker-compose.yaml logs -f
deploy-contracts: deploy-contracts:

6
PUBLISHING.md Normal file
View File

@ -0,0 +1,6 @@
## Get Rid Of
- [ ] build versions in libraries
- [ ] Node version in dockerfiles
- [ ] `get_index_url`, `index_url` can stay
- [ ] change ci not to use extra index url (requires pypi release)

View File

@ -2,28 +2,30 @@ version: '3'
services: services:
node: node:
image: ritualnetwork/infernet-node:latest image: ritualnetwork/infernet-node:1.0.0
ports: ports:
- "0.0.0.0:4000:4000" - "0.0.0.0:4000:4000"
volumes: volumes:
- ./config.json:/app/config.json - ./config.json:/app/config.json
- node-logs:/logs - node-logs:/logs
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
tty: true
networks: networks:
- network - network
depends_on: depends_on:
- redis - redis
- infernet-anvil
restart: restart:
on-failure on-failure
extra_hosts: extra_hosts:
- "host.docker.internal:host-gateway" - "host.docker.internal:host-gateway"
stop_grace_period: 1m stop_grace_period: 1m
tty: true container_name: infernet-node
redis: redis:
image: redis:latest image: redis:latest
expose: ports:
- "6379" - "6379:6379"
networks: networks:
- network - network
volumes: volumes:
@ -46,10 +48,18 @@ services:
restart: restart:
on-failure on-failure
infernet-anvil:
image: ritualnetwork/infernet-anvil:1.0.0
command: --host 0.0.0.0 --port 3000 --load-state infernet_deployed.json -b 1
ports:
- "8545:3000"
networks:
- network
container_name: infernet-anvil
networks: networks:
network: network:
volumes: volumes:
node-logs: node-logs:
redis-data: redis-data:

10
internal.mk Normal file
View File

@ -0,0 +1,10 @@
ifneq ("$(wildcard gcp.env)","")
include gcp.env
endif
get_index_url:
$(eval token := $(shell gcloud auth print-access-token))
$(eval index_url := "https://_token:$(token)@$(artifact_location)-python.pkg.dev/$(gcp_project)/$(artifact_repo)/simple")
generate-uv-env-file: get_index_url
@echo "`echo $(export_prefix)`UV_EXTRA_INDEX_URL=$(index_url)" > uv.env

View File

@ -7,12 +7,15 @@ ENV PYTHONDONTWRITEBYTECODE 1
ENV PIP_NO_CACHE_DIR 1 ENV PIP_NO_CACHE_DIR 1
ENV RUNTIME docker ENV RUNTIME docker
ENV PYTHONPATH src ENV PYTHONPATH src
ARG index_url
ENV UV_EXTRA_INDEX_URL ${index_url}
RUN apt-get update RUN apt-get update
RUN apt-get install -y git curl RUN apt-get install -y git curl
# install uv # install uv
ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh ADD https://astral.sh/uv/install.sh /install.sh
RUN chmod 755 /install.sh
RUN /install.sh && rm /install.sh RUN /install.sh && rm /install.sh
COPY src/requirements.txt . COPY src/requirements.txt .

View File

@ -5,8 +5,7 @@ TAG := $(DOCKER_ORG)/example-$(EXAMPLE_NAME)-infernet:latest
.phony: build run build-multiplatform try-prompt .phony: build run build-multiplatform try-prompt
build: build:
mkdir -p root-config @docker build -t $(TAG) --build-arg index_url=$(index_url) .
@docker build -t $(TAG) .
run: build run: build
@docker run --env-file $(EXAMPLE_NAME).env -p 3000:3000 $(TAG) @docker run --env-file $(EXAMPLE_NAME).env -p 3000:3000 $(TAG)

View File

@ -16,5 +16,5 @@ make run
## Test the Container ## Test the Container
```bash ```bash
curl -X POST localhost:3000/service_output -H "Content-Type: application/json" \ curl -X POST localhost:3000/service_output -H "Content-Type: application/json" \
-d '{"source": 1, "data": {"text": "can shrimps actually fry rice?"}}' -d '{"source": 1, "data": {"prompt": "can shrimps actually fry rice?"}}'
``` ```

View File

@ -7,7 +7,7 @@
"enabled": true, "enabled": true,
"trail_head_blocks": 0, "trail_head_blocks": 0,
"rpc_url": "http://host.docker.internal:8545", "rpc_url": "http://host.docker.internal:8545",
"coordinator_address": "0x5FbDB2315678afecb367f032d93F642f64180aa3", "registry_address": "0x663F3ad617193148711d28f5334eE4Ed07016602",
"wallet": { "wallet": {
"max_gas_limit": 4000000, "max_gas_limit": 4000000,
"private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d" "private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
@ -23,6 +23,10 @@
"port": 6379 "port": 6379
}, },
"forward_stats": true, "forward_stats": true,
"snapshot_sync": {
"sleep": 3,
"batch_size": 100
},
"containers": [ "containers": [
{ {
"id": "gpt4", "id": "gpt4",
@ -34,19 +38,9 @@
"allowed_ips": [], "allowed_ips": [],
"command": "--bind=0.0.0.0:3000 --workers=2", "command": "--bind=0.0.0.0:3000 --workers=2",
"env": { "env": {
"OPENAI_API_KEY": "barabeem baraboom" "OPENAI_API_KEY": "your-key"
} },
}, "accepted_payments": {}
{
"id": "anvil-node",
"image": "ritualnetwork/infernet-anvil:0.0.0",
"external": true,
"port": "8545",
"allowed_delegate_addresses": [],
"allowed_addresses": [],
"allowed_ips": [],
"command": "",
"env": {}
} }
] ]
} }

View File

@ -1,8 +1,16 @@
import logging import logging
import os
from typing import Any, cast from typing import Any, cast
from eth_abi import decode, encode # type: ignore from eth_abi import decode, encode # type: ignore
from infernet_ml.utils.service_models import InfernetInput, InfernetInputSource from infernet_ml.utils.css_mux import (
ConvoMessage,
CSSCompletionParams,
CSSRequest,
Provider,
)
from infernet_ml.utils.service_models import InfernetInput
from infernet_ml.utils.service_models import JobLocation
from infernet_ml.workflows.inference.css_inference_workflow import CSSInferenceWorkflow from infernet_ml.workflows.inference.css_inference_workflow import CSSInferenceWorkflow
from quart import Quart, request from quart import Quart, request
@ -12,7 +20,9 @@ log = logging.getLogger(__name__)
def create_app() -> Quart: def create_app() -> Quart:
app = Quart(__name__) app = Quart(__name__)
workflow = CSSInferenceWorkflow(provider="OPENAI", endpoint="completions") workflow = CSSInferenceWorkflow(
api_keys={Provider.OPENAI: os.environ["OPENAI_API_KEY"]}
)
workflow.setup() workflow.setup()
@ -24,7 +34,7 @@ def create_app() -> Quart:
return "GPT4 Example Program" return "GPT4 Example Program"
@app.route("/service_output", methods=["POST"]) @app.route("/service_output", methods=["POST"])
async def inference() -> dict[str, Any]: async def inference() -> Any:
req_data = await request.get_json() req_data = await request.get_json()
""" """
InfernetInput has the format: InfernetInput has the format:
@ -33,52 +43,62 @@ def create_app() -> Quart:
""" """
infernet_input: InfernetInput = InfernetInput(**req_data) infernet_input: InfernetInput = InfernetInput(**req_data)
if infernet_input.source == InfernetInputSource.OFFCHAIN: match infernet_input:
prompt = cast(dict[str, Any], infernet_input.data).get("prompt") case InfernetInput(source=JobLocation.OFFCHAIN):
else: prompt = cast(dict[str, Any], infernet_input.data).get("prompt")
# On-chain requests are sent as a generalized hex-string which we will case InfernetInput(source=JobLocation.ONCHAIN):
# decode to the appropriate format. # On-chain requests are sent as a generalized hex-string which we will
(prompt,) = decode( # decode to the appropriate format.
["string"], bytes.fromhex(cast(str, infernet_input.data)) (prompt,) = decode(
) ["string"], bytes.fromhex(cast(str, infernet_input.data))
)
case _:
raise ValueError("Invalid source")
result: dict[str, Any] = workflow.inference( result = workflow.inference(
{ CSSRequest(
"model": "gpt-4-0613", provider=Provider.OPENAI,
"params": { endpoint="completions",
"endpoint": "completions", model="gpt-4-0613",
"messages": [ params=CSSCompletionParams(
{"role": "system", "content": "You are a helpful assistant."}, messages=[
{"role": "user", "content": prompt}, ConvoMessage(
], role="system", content="you are a helpful " "assistant."
}, ),
} ConvoMessage(role="user", content=cast(str, prompt)),
]
),
)
) )
if infernet_input.source == InfernetInputSource.OFFCHAIN: match infernet_input:
""" case InfernetInput(destination=JobLocation.OFFCHAIN):
In case of an off-chain request, the result is returned as is. """
""" In case of an off-chain request, the result is returned as is.
return {"message": result} """
else: return {"message": result}
""" case InfernetInput(destination=JobLocation.ONCHAIN):
In case of an on-chain request, the result is returned in the format: """
{ In case of an on-chain request, the result is returned in the format:
"raw_input": str, {
"processed_input": str, "raw_input": str,
"raw_output": str, "processed_input": str,
"processed_output": str, "raw_output": str,
"proof": str, "processed_output": str,
} "proof": str,
refer to: https://docs.ritual.net/infernet/node/containers for more info. }
""" refer to: https://docs.ritual.net/infernet/node/containers for more
return { info.
"raw_input": "", """
"processed_input": "", return {
"raw_output": encode(["string"], [result]).hex(), "raw_input": "",
"processed_output": "", "processed_input": "",
"proof": "", "raw_output": encode(["string"], [result]).hex(),
} "processed_output": "",
"proof": "",
}
case _:
raise ValueError("Invalid destination")
return app return app

View File

@ -1,5 +1,4 @@
quart==0.19.4 quart==0.19.4
infernet_ml==0.1.0 infernet-ml==1.0.0
PyArweave @ git+https://github.com/ritual-net/pyarweave.git infernet-ml[css_inference]==1.0.0
web3==6.15.0 web3==6.15.0
retry2==0.9.5

@ -1 +1 @@
Subproject commit e4aef94c1768803a16fe19f7ce8b65defd027cfd Subproject commit 52715a217dc51d0de15877878ab8213f6cbbbab5

@ -1 +1 @@
Subproject commit 2d04a7f5ed64738218941e5d7a7270382f191a01 Subproject commit 8e6cd6f5cbd66dc9baacb895a2ed8fe2c9ee3b6f

View File

@ -10,7 +10,7 @@ contract CallContract is Script {
uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY");
vm.startBroadcast(deployerPrivateKey); vm.startBroadcast(deployerPrivateKey);
PromptsGPT promptsGpt = PromptsGPT(0x663F3ad617193148711d28f5334eE4Ed07016602); PromptsGPT promptsGpt = PromptsGPT(0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e);
promptsGpt.promptGPT(vm.envString("prompt")); promptsGpt.promptGPT(vm.envString("prompt"));

View File

@ -14,10 +14,10 @@ contract Deploy is Script {
address deployerAddress = vm.addr(deployerPrivateKey); address deployerAddress = vm.addr(deployerPrivateKey);
console2.log("Loaded deployer: ", deployerAddress); console2.log("Loaded deployer: ", deployerAddress);
address coordinator = 0x5FbDB2315678afecb367f032d93F642f64180aa3; address registry = 0x663F3ad617193148711d28f5334eE4Ed07016602;
// Create consumer // Create consumer
PromptsGPT promptsGPT = new PromptsGPT(coordinator); PromptsGPT promptsGPT = new PromptsGPT(registry);
console2.log("Deployed PromptsGPT: ", address(promptsGPT)); console2.log("Deployed PromptsGPT: ", address(promptsGPT));
// Execute // Execute

View File

@ -12,15 +12,17 @@ contract PromptsGPT is CallbackConsumer {
"| _ / | | | | | | | |/ /\\ \\ | | \n" "| _ / | | | | | | | |/ /\\ \\ | | \n"
"| | \\ \\ _| |_ | | | |__| / ____ \\| |____ \n" "| | \\ \\ _| |_ | | | |__| / ____ \\| |____ \n"
"|_| \\_\\_____| |_| \\____/_/ \\_\\______| \n\n"; "|_| \\_\\_____| |_| \\____/_/ \\_\\______| \n\n";
constructor(address coordinator) CallbackConsumer(coordinator) {} constructor(address registry) CallbackConsumer(registry) {}
function promptGPT(string calldata prompt) public { function promptGPT(string calldata prompt) public {
_requestCompute( _requestCompute(
"gpt4", "gpt4",
abi.encode(prompt), abi.encode(prompt),
20 gwei, 1, // redundancy
1_000_000, address(0), // paymentToken
1 0, // paymentAmount
address(0), // wallet
address(0) // prover
); );
} }
@ -31,7 +33,9 @@ contract PromptsGPT is CallbackConsumer {
address node, address node,
bytes calldata input, bytes calldata input,
bytes calldata output, bytes calldata output,
bytes calldata proof bytes calldata proof,
bytes32 containerId,
uint256 index
) internal override { ) internal override {
console2.log(EXTREMELY_COOL_BANNER); console2.log(EXTREMELY_COOL_BANNER);
(bytes memory raw_output, bytes memory processed_output) = abi.decode(output, (bytes, bytes)); (bytes memory raw_output, bytes memory processed_output) = abi.decode(output, (bytes, bytes));

View File

@ -129,7 +129,7 @@ curl -X GET http://127.0.0.1:4000/api/jobs\?id\=cab6eea8-8b1e-4144-9a70-f905c5ef
And if you have `jq` installed and piped the last output to a file, you can instead run: And if you have `jq` installed and piped the last output to a file, you can instead run:
```bash ```bash
curl -X GET "http://127.0.0.1:4000/api/jobs?id=$(cat last-request.uuid)" | jq . curl -X GET "http://127.0.0.1:4000/api/jobs?id=$(cat last-job.uuid)" | jq .
# returns something like: # returns something like:
[ [
{ {

View File

@ -7,12 +7,15 @@ ENV PYTHONDONTWRITEBYTECODE 1
ENV PIP_NO_CACHE_DIR 1 ENV PIP_NO_CACHE_DIR 1
ENV RUNTIME docker ENV RUNTIME docker
ENV PYTHONPATH src ENV PYTHONPATH src
ARG index_url
ENV UV_EXTRA_INDEX_URL ${index_url}
RUN apt-get update RUN apt-get update
RUN apt-get install -y git curl RUN apt-get install -y git curl
# install uv # install uv
ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh ADD https://astral.sh/uv/install.sh /install.sh
RUN chmod 755 /install.sh
RUN /install.sh && rm /install.sh RUN /install.sh && rm /install.sh
COPY src/requirements.txt . COPY src/requirements.txt .

View File

@ -4,7 +4,7 @@ TAG := $(DOCKER_ORG)/hello-world-infernet:latest
.phony: build run publish .phony: build run publish
build: build:
@docker build -t $(TAG) . @docker build -t $(TAG) --build-arg index_url=$(index_url) .
update-tag: update-tag:
jq ".containers[0].image = \"$(TAG)\"" config.json > updated_config.json && mv updated_config.json config.json jq ".containers[0].image = \"$(TAG)\"" config.json > updated_config.json && mv updated_config.json config.json

View File

@ -50,7 +50,7 @@ file with the following content:
``` ```
Flask>=3.0.0,<4.0.0 Flask>=3.0.0,<4.0.0
gunicorn>=21.2.0,<22.0.0 gunicorn>=22.0.0,<23.0.0
``` ```
## Step 2: create a Dockerfile ## Step 2: create a Dockerfile

View File

@ -1,50 +1,44 @@
{ {
"log_path": "infernet_node.log", "log_path": "infernet_node.log",
"server": { "server": {
"port": 4000 "port": 4000
},
"chain": {
"enabled": true,
"trail_head_blocks": 0,
"rpc_url": "http://host.docker.internal:8545",
"coordinator_address": "0x5FbDB2315678afecb367f032d93F642f64180aa3",
"wallet": {
"max_gas_limit": 4000000,
"private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
}
},
"startup_wait": 1.0,
"docker": {
"username": "your-username",
"password": ""
},
"redis": {
"host": "redis",
"port": 6379
},
"forward_stats": true,
"containers": [
{
"id": "hello-world",
"image": "ritualnetwork/hello-world-infernet:latest",
"external": true,
"port": "3000",
"allowed_delegate_addresses": [],
"allowed_addresses": [],
"allowed_ips": [],
"command": "--bind=0.0.0.0:3000 --workers=2",
"env": {}
}, },
{ "chain": {
"id": "anvil-node", "enabled": true,
"image": "ritualnetwork/infernet-anvil:0.0.0", "trail_head_blocks": 0,
"external": true, "rpc_url": "http://host.docker.internal:8545",
"port": "8545", "registry_address": "0x663F3ad617193148711d28f5334eE4Ed07016602",
"allowed_delegate_addresses": [], "wallet": {
"allowed_addresses": [], "max_gas_limit": 4000000,
"allowed_ips": [], "private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
"command": "", }
"env": {} },
} "startup_wait": 1.0,
] "docker": {
"username": "your-username",
"password": ""
},
"redis": {
"host": "redis",
"port": 6379
},
"forward_stats": true,
"snapshot_sync": {
"sleep": 3,
"batch_size": 100
},
"containers": [
{
"id": "hello-world",
"image": "ritualnetwork/hello-world-infernet:latest",
"external": true,
"port": "3000",
"allowed_delegate_addresses": [],
"allowed_addresses": [],
"allowed_ips": [],
"command": "--bind=0.0.0.0:3000 --workers=2",
"env": {},
"accepted_payments": {}
}
]
} }

View File

@ -1,2 +1,2 @@
Flask>=3.0.0,<4.0.0 Flask>=3.0.0,<4.0.0
gunicorn>=21.2.0,<22.0.0 gunicorn>=22.0.0,<23.0.0

View File

@ -58,7 +58,7 @@ import {console2} from "forge-std/console2.sol";
import {CallbackConsumer} from "infernet-sdk/consumer/Callback.sol"; import {CallbackConsumer} from "infernet-sdk/consumer/Callback.sol";
contract SaysGM is CallbackConsumer { contract SaysGM is CallbackConsumer {
constructor(address coordinator) CallbackConsumer(coordinator) {} constructor(address registry) CallbackConsumer(registry) {}
function sayGM() public { function sayGM() public {
_requestCompute( _requestCompute(
@ -128,9 +128,9 @@ contract Deploy is Script {
address deployerAddress = vm.addr(deployerPrivateKey); address deployerAddress = vm.addr(deployerPrivateKey);
console2.log("Loaded deployer: ", deployerAddress); console2.log("Loaded deployer: ", deployerAddress);
address coordinator = 0x5FbDB2315678afecb367f032d93F642f64180aa3; address registry = 0x663F3ad617193148711d28f5334eE4Ed07016602;
// Create consumer // Create consumer
SaysGM saysGm = new SaysGM(coordinator); SaysGM saysGm = new SaysGM(registry);
console2.log("Deployed SaysHello: ", address(saysGm)); console2.log("Deployed SaysHello: ", address(saysGm));
// Execute // Execute
@ -160,7 +160,7 @@ contract CallContract is Script {
uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY");
vm.startBroadcast(deployerPrivateKey); vm.startBroadcast(deployerPrivateKey);
SaysGM saysGm = SaysGM(0x663F3ad617193148711d28f5334eE4Ed07016602); SaysGM saysGm = SaysGM(0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e);
saysGm.sayGM(); saysGm.sayGM();

@ -1 +1 @@
Subproject commit e4aef94c1768803a16fe19f7ce8b65defd027cfd Subproject commit 52715a217dc51d0de15877878ab8213f6cbbbab5

@ -1 +1 @@
Subproject commit 2d04a7f5ed64738218941e5d7a7270382f191a01 Subproject commit 8e6cd6f5cbd66dc9baacb895a2ed8fe2c9ee3b6f

View File

@ -1,2 +1,3 @@
forge-std/=lib/forge-std/src forge-std/=lib/forge-std/src
infernet-sdk/=lib/infernet-sdk/src infernet-sdk/=lib/infernet-sdk/src
solady/=lib/infernet-sdk/lib/solady/src

View File

@ -10,7 +10,7 @@ contract CallContract is Script {
uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY");
vm.startBroadcast(deployerPrivateKey); vm.startBroadcast(deployerPrivateKey);
SaysGM saysGm = SaysGM(0x663F3ad617193148711d28f5334eE4Ed07016602); SaysGM saysGm = SaysGM(0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e);
saysGm.sayGM(); saysGm.sayGM();

View File

@ -14,9 +14,9 @@ contract Deploy is Script {
address deployerAddress = vm.addr(deployerPrivateKey); address deployerAddress = vm.addr(deployerPrivateKey);
console2.log("Loaded deployer: ", deployerAddress); console2.log("Loaded deployer: ", deployerAddress);
address coordinator = 0x5FbDB2315678afecb367f032d93F642f64180aa3; address registry = 0x663F3ad617193148711d28f5334eE4Ed07016602;
// Create consumer // Create consumer
SaysGM saysGm = new SaysGM(coordinator); SaysGM saysGm = new SaysGM(registry);
console2.log("Deployed SaysHello: ", address(saysGm)); console2.log("Deployed SaysHello: ", address(saysGm));
// Execute // Execute

View File

@ -5,15 +5,17 @@ import {console2} from "forge-std/console2.sol";
import {CallbackConsumer} from "infernet-sdk/consumer/Callback.sol"; import {CallbackConsumer} from "infernet-sdk/consumer/Callback.sol";
contract SaysGM is CallbackConsumer { contract SaysGM is CallbackConsumer {
constructor(address coordinator) CallbackConsumer(coordinator) {} constructor(address registry) CallbackConsumer(registry) {}
function sayGM() public { function sayGM() public {
_requestCompute( _requestCompute(
"hello-world", "hello-world",
bytes("Good morning!"), bytes("Good morning!"),
20 gwei, 1, // redundancy
1_000_000, address(0), // paymentToken
1 0, // paymentAmount
address(0), // wallet
address(0) // prover
); );
} }
@ -24,7 +26,9 @@ contract SaysGM is CallbackConsumer {
address node, address node,
bytes calldata input, bytes calldata input,
bytes calldata output, bytes calldata output,
bytes calldata proof bytes calldata proof,
bytes32 containerId,
uint256 index
) internal override { ) internal override {
console2.log("\n\n" console2.log("\n\n"
"_____ _____ _______ _ _ _\n" "_____ _____ _______ _ _ _\n"
@ -43,6 +47,8 @@ contract SaysGM is CallbackConsumer {
console2.logBytes(input); console2.logBytes(input);
console2.log("output:"); console2.log("output:");
console2.logBytes(output); console2.logBytes(output);
(string memory decoded)= abi.decode(output, (string));
console2.log("decoded output: ", decoded);
console2.log("proof:"); console2.log("proof:");
console2.logBytes(proof); console2.logBytes(proof);
} }

View File

@ -78,7 +78,7 @@ This project already comes with a pre-filled config file. The config file for th
[here](container/config.json): [here](container/config.json):
```bash ```bash
projects/hello-world/config.json projects/hello-world/container/config.json
``` ```
## Requesting an on-chain job ## Requesting an on-chain job
@ -92,7 +92,7 @@ We already have a public [anvil node](https://hub.docker.com/r/ritualnetwork/inf
corresponding infernet sdk contracts deployed, along with a node that has corresponding infernet sdk contracts deployed, along with a node that has
registered itself to listen to on-chain subscription events. registered itself to listen to on-chain subscription events.
* Coordinator Address: `0x5FbDB2315678afecb367f032d93F642f64180aa3` * Registry Address: `0x663F3ad617193148711d28f5334eE4Ed07016602`
* Node Address: `0x70997970C51812dc3A010C7d01b50e0d17dc79C8` (This is the second account in the anvil's accounts.) * Node Address: `0x70997970C51812dc3A010C7d01b50e0d17dc79C8` (This is the second account in the anvil's accounts.)
### Deploying Infernet Node & Infernet's Anvil Testnet ### Deploying Infernet Node & Infernet's Anvil Testnet
@ -140,7 +140,7 @@ eth_sendRawTransaction
eth_getTransactionReceipt eth_getTransactionReceipt
Transaction: 0x23ca6b1d1823ad5af175c207c2505112f60038fc000e1e22509816fa29a3afd6 Transaction: 0x23ca6b1d1823ad5af175c207c2505112f60038fc000e1e22509816fa29a3afd6
Contract created: 0x663f3ad617193148711d28f5334ee4ed07016602 Contract created: 0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e
Gas used: 476669 Gas used: 476669
Block Number: 1 Block Number: 1
@ -152,7 +152,7 @@ eth_getTransactionReceipt
eth_blockNumber eth_blockNumber
``` ```
We can see that a new contract has been created at `0x663f3ad617193148711d28f5334ee4ed07016602`. We can see that a new contract has been created at `0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e`.
That's the address of the `SaysGM` contract. That's the address of the `SaysGM` contract.
### Calling the contract ### Calling the contract

View File

@ -7,12 +7,15 @@ ENV PYTHONDONTWRITEBYTECODE 1
ENV PIP_NO_CACHE_DIR 1 ENV PIP_NO_CACHE_DIR 1
ENV RUNTIME docker ENV RUNTIME docker
ENV PYTHONPATH src ENV PYTHONPATH src
ARG index_url
ENV UV_EXTRA_INDEX_URL ${index_url}
RUN apt-get update RUN apt-get update
RUN apt-get install -y git curl RUN apt-get install -y git curl
# install uv # install uv
ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh ADD https://astral.sh/uv/install.sh /install.sh
RUN chmod 755 /install.sh
RUN /install.sh && rm /install.sh RUN /install.sh && rm /install.sh
COPY src/requirements.txt . COPY src/requirements.txt .

View File

@ -5,7 +5,7 @@ TAG := $(DOCKER_ORG)/example-$(EXAMPLE_NAME)-infernet:latest
.phony: build run build-multiplatform .phony: build run build-multiplatform
build: build:
@docker build -t $(TAG) . @docker build -t $(TAG) --build-arg index_url=$(index_url) .
run: run:
docker run -p 3000:3000 $(TAG) docker run -p 3000:3000 $(TAG)

View File

@ -8,9 +8,9 @@ repository.
## Overview ## Overview
We're making use of We're making use of
the [ONNXInferenceWorkflow](https://github.com/ritual-net/infernet-ml-internal/blob/main/src/ml/workflows/inference/onnx_inference_workflow.py) the [ONNXInferenceWorkflow](https://github.com/ritual-net/infernet-ml/blob/main/src/ml/workflows/inference/onnx_inference_workflow.py)
class to run the model. This is one of many workflows that we currently support in our class to run the model. This is one of many workflows that we currently support in our
[infernet-ml](https://github.com/ritual-net/infernet-ml-internal). Consult the library's [infernet-ml](https://github.com/ritual-net/infernet-ml). Consult the library's
documentation for more info on workflows that documentation for more info on workflows that
are supported. are supported.

View File

@ -7,7 +7,7 @@
"enabled": true, "enabled": true,
"trail_head_blocks": 0, "trail_head_blocks": 0,
"rpc_url": "http://host.docker.internal:8545", "rpc_url": "http://host.docker.internal:8545",
"coordinator_address": "0x5FbDB2315678afecb367f032d93F642f64180aa3", "registry_address": "0x663F3ad617193148711d28f5334eE4Ed07016602",
"wallet": { "wallet": {
"max_gas_limit": 4000000, "max_gas_limit": 4000000,
"private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d" "private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
@ -23,6 +23,10 @@
"port": 6379 "port": 6379
}, },
"forward_stats": true, "forward_stats": true,
"snapshot_sync": {
"sleep": 3,
"batch_size": 100
},
"containers": [ "containers": [
{ {
"id": "onnx-iris", "id": "onnx-iris",
@ -33,18 +37,8 @@
"allowed_addresses": [], "allowed_addresses": [],
"allowed_ips": [], "allowed_ips": [],
"command": "--bind=0.0.0.0:3000 --workers=2", "command": "--bind=0.0.0.0:3000 --workers=2",
"env": {} "env": {},
}, "accepted_payments": {}
{
"id": "anvil-node",
"image": "ritualnetwork/infernet-anvil:0.0.0",
"external": true,
"port": "8545",
"allowed_delegate_addresses": [],
"allowed_addresses": [],
"allowed_ips": [],
"command": "",
"env": {}
} }
] ]
} }

View File

@ -1,12 +1,18 @@
import logging import logging
from typing import Any, cast, List from typing import Any, cast, List
from infernet_ml.utils.common_types import TensorInput
import numpy as np import numpy as np
from eth_abi import decode, encode # type: ignore from eth_abi import decode, encode # type: ignore
from infernet_ml.utils.model_loader import ModelSource from infernet_ml.utils.model_loader import (
from infernet_ml.utils.service_models import InfernetInput, InfernetInputSource HFLoadArgs,
ModelSource,
)
from infernet_ml.utils.service_models import InfernetInput, JobLocation
from infernet_ml.workflows.inference.onnx_inference_workflow import ( from infernet_ml.workflows.inference.onnx_inference_workflow import (
ONNXInferenceWorkflow, ONNXInferenceWorkflow,
ONNXInferenceInput,
ONNXInferenceResult,
) )
from quart import Quart, request from quart import Quart, request
from quart.json.provider import DefaultJSONProvider from quart.json.provider import DefaultJSONProvider
@ -29,10 +35,11 @@ def create_app() -> Quart:
app = Quart(__name__) app = Quart(__name__)
# we are downloading the model from the hub. # we are downloading the model from the hub.
# model repo is located at: https://huggingface.co/Ritual-Net/iris-dataset # model repo is located at: https://huggingface.co/Ritual-Net/iris-dataset
model_source = ModelSource.HUGGINGFACE_HUB
model_args = {"repo_id": "Ritual-Net/iris-dataset", "filename": "iris.onnx"}
workflow = ONNXInferenceWorkflow(model_source=model_source, model_args=model_args) workflow = ONNXInferenceWorkflow(
model_source=ModelSource.HUGGINGFACE_HUB,
load_args=HFLoadArgs(repo_id="Ritual-Net/iris-dataset", filename="iris.onnx"),
)
workflow.setup() workflow.setup()
@app.route("/") @app.route("/")
@ -43,7 +50,7 @@ def create_app() -> Quart:
return "ONNX Iris Classifier Example Program" return "ONNX Iris Classifier Example Program"
@app.route("/service_output", methods=["POST"]) @app.route("/service_output", methods=["POST"])
async def inference() -> dict[str, Any]: async def inference() -> Any:
req_data = await request.get_json() req_data = await request.get_json()
""" """
InfernetInput has the format: InfernetInput has the format:
@ -52,50 +59,56 @@ def create_app() -> Quart:
""" """
infernet_input: InfernetInput = InfernetInput(**req_data) infernet_input: InfernetInput = InfernetInput(**req_data)
if infernet_input.source == InfernetInputSource.OFFCHAIN: match infernet_input:
web2_input = cast(dict[str, Any], infernet_input.data) case InfernetInput(source=JobLocation.OFFCHAIN):
values = cast(List[List[float]], web2_input["input"]) web2_input = cast(dict[str, Any], infernet_input.data)
else: values = cast(List[List[float]], web2_input["input"])
# On-chain requests are sent as a generalized hex-string which we will case InfernetInput(source=JobLocation.ONCHAIN):
# decode to the appropriate format. web3_input: List[int] = decode(
web3_input: List[int] = decode( ["uint256[]"], bytes.fromhex(cast(str, infernet_input.data))
["uint256[]"], bytes.fromhex(cast(str, infernet_input.data)) )[0]
)[0] values = [[float(v) / 1e6 for v in web3_input]]
values = [[float(v) / 1e6 for v in web3_input]]
""" """
The input to the onnx inference workflow needs to conform to ONNX runtime's The input to the onnx inference workflow needs to conform to ONNX runtime's
input_feed format. For more information refer to: input_feed format. For more information refer to:
https://docs.ritual.net/ml-workflows/inference-workflows/onnx_inference_workflow https://docs.ritual.net/ml-workflows/inference-workflows/onnx_inference_workflow
""" """
result: dict[str, Any] = workflow.inference({"input": values}) _input = ONNXInferenceInput(
inputs={"input": TensorInput(shape=(1, 4), dtype="float", values=values)},
)
result: ONNXInferenceResult = workflow.inference(_input)
if infernet_input.source == InfernetInputSource.OFFCHAIN: match infernet_input:
""" case InfernetInput(destination=JobLocation.OFFCHAIN):
In case of an off-chain request, the result is returned as is. """
""" In case of an off-chain request, the result is returned as is.
return result """
else: return result
""" case InfernetInput(destination=JobLocation.ONCHAIN):
In case of an on-chain request, the result is returned in the format: """
{ In case of an on-chain request, the result is returned in the format:
"raw_input": str, {
"processed_input": str, "raw_input": str,
"raw_output": str, "processed_input": str,
"processed_output": str, "raw_output": str,
"proof": str, "processed_output": str,
} "proof": str,
refer to: https://docs.ritual.net/infernet/node/containers for more info. }
""" refer to: https://docs.ritual.net/infernet/node/containers for more
predictions = cast(List[List[List[float]]], result) info.
predictions_normalized = [int(p * 1e6) for p in predictions[0][0]] """
return { predictions = result[0]
"raw_input": "", predictions_normalized = [int(p * 1e6) for p in predictions.values]
"processed_input": "", return {
"raw_output": encode(["uint256[]"], [predictions_normalized]).hex(), "raw_input": "",
"processed_output": "", "processed_input": "",
"proof": "", "raw_output": encode(["uint256[]"], [predictions_normalized]).hex(),
} "processed_output": "",
"proof": "",
}
case _:
raise ValueError("Invalid destination")
return app return app

View File

@ -1,7 +1,4 @@
quart==0.19.4 quart==0.19.4
infernet_ml==0.1.0 infernet-ml==1.0.0
PyArweave @ git+https://github.com/ritual-net/pyarweave.git infernet-ml[onnx_inference]==1.0.0
web3==6.15.0 web3==6.15.0
onnx==1.15.0
onnxruntime==1.16.3
torch==2.1.2

@ -1 +1 @@
Subproject commit e4aef94c1768803a16fe19f7ce8b65defd027cfd Subproject commit 52715a217dc51d0de15877878ab8213f6cbbbab5

@ -1 +1 @@
Subproject commit 2d04a7f5ed64738218941e5d7a7270382f191a01 Subproject commit 8e6cd6f5cbd66dc9baacb895a2ed8fe2c9ee3b6f

View File

@ -10,7 +10,7 @@ contract CallContract is Script {
uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY");
vm.startBroadcast(deployerPrivateKey); vm.startBroadcast(deployerPrivateKey);
IrisClassifier irisClassifier = IrisClassifier(0x663F3ad617193148711d28f5334eE4Ed07016602); IrisClassifier irisClassifier = IrisClassifier(0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e);
irisClassifier.classifyIris(); irisClassifier.classifyIris();

View File

@ -14,9 +14,9 @@ contract Deploy is Script {
address deployerAddress = vm.addr(deployerPrivateKey); address deployerAddress = vm.addr(deployerPrivateKey);
console2.log("Loaded deployer: ", deployerAddress); console2.log("Loaded deployer: ", deployerAddress);
address coordinator = 0x5FbDB2315678afecb367f032d93F642f64180aa3; address registry = 0x663F3ad617193148711d28f5334eE4Ed07016602;
// Create consumer // Create consumer
IrisClassifier classifier = new IrisClassifier(coordinator); IrisClassifier classifier = new IrisClassifier(registry);
console2.log("Deployed IrisClassifier: ", address(classifier)); console2.log("Deployed IrisClassifier: ", address(classifier));
// Execute // Execute

View File

@ -14,7 +14,7 @@ contract IrisClassifier is CallbackConsumer {
"| | \\ \\ _| |_ | | | |__| / ____ \\| |____\n" "| | \\ \\ _| |_ | | | |__| / ____ \\| |____\n"
"|_| \\_\\_____| |_| \\____/_/ \\_\\______|\n\n"; "|_| \\_\\_____| |_| \\____/_/ \\_\\______|\n\n";
constructor(address coordinator) CallbackConsumer(coordinator) {} constructor(address registry) CallbackConsumer(registry) {}
function classifyIris() public { function classifyIris() public {
/// @dev Iris data is in the following format: /// @dev Iris data is in the following format:
@ -38,9 +38,11 @@ contract IrisClassifier is CallbackConsumer {
_requestCompute( _requestCompute(
"onnx-iris", "onnx-iris",
abi.encode(iris_data), abi.encode(iris_data),
20 gwei, 1, // redundancy
1_000_000, address(0), // paymentToken
1 0, // paymentAmount
address(0), // wallet
address(0) // prover
); );
} }
@ -51,7 +53,9 @@ contract IrisClassifier is CallbackConsumer {
address node, address node,
bytes calldata input, bytes calldata input,
bytes calldata output, bytes calldata output,
bytes calldata proof bytes calldata proof,
bytes32 containerId,
uint256 index
) internal override { ) internal override {
console2.log(EXTREMELY_COOL_BANNER); console2.log(EXTREMELY_COOL_BANNER);
(bytes memory raw_output, bytes memory processed_output) = abi.decode(output, (bytes, bytes)); (bytes memory raw_output, bytes memory processed_output) = abi.decode(output, (bytes, bytes));

View File

@ -195,7 +195,7 @@ In your anvil logs you should see the following:
eth_getTransactionReceipt eth_getTransactionReceipt
Transaction: 0xeed605eacdace39a48635f6d14215b386523766f80a113b4484f542d862889a4 Transaction: 0xeed605eacdace39a48635f6d14215b386523766f80a113b4484f542d862889a4
Contract created: 0x663f3ad617193148711d28f5334ee4ed07016602 Contract created: 0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e
Gas used: 714269 Gas used: 714269
Block Number: 1 Block Number: 1
@ -206,7 +206,7 @@ eth_blockNumber
``` ```
beautiful, we can see that a new contract has been created beautiful, we can see that a new contract has been created
at `0x663f3ad617193148711d28f5334ee4ed07016602`. That's the address of at `0x663F3ad617193148711d28f5334eE4Ed07016602`. That's the address of
the `IrisClassifier` contract. We are now going to call this contract. To do so, the `IrisClassifier` contract. We are now going to call this contract. To do so,
we are using we are using
the [CallContract.s.sol](contracts/script/CallContract.s.sol) the [CallContract.s.sol](contracts/script/CallContract.s.sol)

View File

@ -0,0 +1,47 @@
{
"log_path": "infernet_node.log",
"server": {
"port": 4000
},
"chain": {
"enabled": true,
"trail_head_blocks": 0,
"rpc_url": "http://host.docker.internal:8545",
"registry_address": "0x663F3ad617193148711d28f5334eE4Ed07016602",
"wallet": {
"max_gas_limit": 4000000,
"private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d",
"payment_address": "0x60985ee8192B322c3CAbA97A9A9f7298bdc4335C"
}
},
"startup_wait": 1.0,
"docker": {
"username": "your-username",
"password": ""
},
"redis": {
"host": "redis",
"port": 6379
},
"forward_stats": true,
"snapshot_sync": {
"sleep": 3,
"batch_size": 100
},
"containers": [
{
"id": "hello-world",
"image": "ritualnetwork/hello-world-infernet:latest",
"external": true,
"port": "3000",
"allowed_delegate_addresses": [],
"allowed_addresses": [],
"allowed_ips": [],
"command": "--bind=0.0.0.0:3000 --workers=2",
"env": {},
"accepted_payments": {
"0x0000000000000000000000000000000000000000": 1000000000000000000
}
}
]
}

12
projects/payment/contracts/.gitignore vendored Normal file
View File

@ -0,0 +1,12 @@
# Compiler files
cache/
out/
# Ignores broadcast logs
broadcast
# Docs
docs/
# Dotenv file
.env

View File

@ -0,0 +1,14 @@
# phony targets are targets that don't actually create a file
.phony: deploy
# anvil's third default address
sender := 0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a
RPC_URL := http://localhost:8545
# deploying the contract
deploy:
@PRIVATE_KEY=$(sender) forge script script/Deploy.s.sol:Deploy --broadcast --rpc-url $(RPC_URL)
# calling sayGM()
call-contract:
@PRIVATE_KEY=$(sender) forge script script/CallContract.s.sol:CallContract --broadcast --rpc-url $(RPC_URL)

View File

@ -0,0 +1,5 @@
## Payment Flow Example
This example contains all of the code for the payments flow example. Refer
to [the official documentation](https://docs.ritual.net/infernet/node/intro_to_payments)
for a step-by-step guide on how to run this example.

View File

@ -0,0 +1,7 @@
[profile.default]
src = "src"
out = "out"
libs = ["lib"]
via_ir = true
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options

@ -0,0 +1 @@
Subproject commit 52715a217dc51d0de15877878ab8213f6cbbbab5

@ -0,0 +1 @@
Subproject commit 8e6cd6f5cbd66dc9baacb895a2ed8fe2c9ee3b6f

View File

@ -0,0 +1,2 @@
forge-std/=lib/forge-std/src
infernet-sdk/=lib/infernet-sdk/src

View File

@ -0,0 +1,24 @@
// SPDX-License-Identifier: BSD-3-Clause-Clear
pragma solidity ^0.8.0;
import {Script, console2} from "forge-std/Script.sol";
import {SaysGM} from "../src/SaysGM.sol";
contract CallContract is Script {
function run() public {
// Setup wallet
uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY");
uint256 amount = vm.envUint("amount");
address wallet = vm.envAddress("wallet");
vm.startBroadcast(deployerPrivateKey);
address registry = 0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e;
SaysGM saysGm = SaysGM(registry);
saysGm.sayGM(amount, wallet);
vm.stopBroadcast();
}
}

View File

@ -0,0 +1,26 @@
// SPDX-License-Identifier: BSD-3-Clause-Clear
pragma solidity ^0.8.13;
import {Script, console2} from "forge-std/Script.sol";
import {SaysGM} from "../src/SaysGM.sol";
contract Deploy is Script {
function run() public {
// Setup wallet
uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY");
vm.startBroadcast(deployerPrivateKey);
// Log address
address deployerAddress = vm.addr(deployerPrivateKey);
console2.log("Loaded deployer: ", deployerAddress);
address registry = 0x663F3ad617193148711d28f5334eE4Ed07016602;
// Create consumer
SaysGM saysGm = new SaysGM(registry);
console2.log("Deployed SaysHello: ", address(saysGm));
// Execute
vm.stopBroadcast();
vm.broadcast();
}
}

View File

@ -0,0 +1,55 @@
// SPDX-License-Identifier: BSD-3-Clause-Clear
pragma solidity ^0.8.13;
import {console2} from "forge-std/console2.sol";
import {CallbackConsumer} from "infernet-sdk/consumer/Callback.sol";
contract SaysGM is CallbackConsumer {
constructor(address registry) CallbackConsumer(registry) {}
function sayGM(uint256 paymentAmount, address wallet) public {
_requestCompute(
"hello-world",
bytes("Good morning!"),
1, // redundancy
address(0), // paymentToken
paymentAmount,
wallet,
address(0) // prover
);
}
function _receiveCompute(
uint32 subscriptionId,
uint32 interval,
uint16 redundancy,
address node,
bytes calldata input,
bytes calldata output,
bytes calldata proof,
bytes32 containerId,
uint256 index
) internal override {
console2.log("\n\n"
"_____ _____ _______ _ _ _\n"
"| __ \\|_ _|__ __| | | | /\\ | |\n"
"| |__) | | | | | | | | | / \\ | |\n"
"| _ / | | | | | | | |/ /\\ \\ | |\n"
"| | \\ \\ _| |_ | | | |__| / ____ \\| |____\n"
"|_| \\_\\_____| |_| \\____/_/ \\_\\______|\n\n");
console2.log("subscription Id", subscriptionId);
console2.log("interval", interval);
console2.log("redundancy", redundancy);
console2.log("node", node);
console2.log("input:");
console2.logBytes(input);
console2.log("output:");
console2.logBytes(output);
(string memory decoded)= abi.decode(output, (string));
console2.log("decoded output: ", decoded);
console2.log("proof:");
console2.logBytes(proof);
}
}

View File

@ -7,12 +7,15 @@ ENV PYTHONDONTWRITEBYTECODE 1
ENV PIP_NO_CACHE_DIR 1 ENV PIP_NO_CACHE_DIR 1
ENV RUNTIME docker ENV RUNTIME docker
ENV PYTHONPATH src ENV PYTHONPATH src
ARG index_url
ENV UV_EXTRA_INDEX_URL ${index_url}
RUN apt-get update RUN apt-get update
RUN apt-get install -y git curl RUN apt-get install -y git curl
# install uv # install uv
ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh ADD https://astral.sh/uv/install.sh /install.sh
RUN chmod 755 /install.sh
RUN /install.sh && rm /install.sh RUN /install.sh && rm /install.sh
COPY src/requirements.txt . COPY src/requirements.txt .

View File

@ -9,7 +9,7 @@ ifdef CI
mkdir -p wallet # in CI we don't have a wallet directory. This enables to bypass that and ensure that the image mkdir -p wallet # in CI we don't have a wallet directory. This enables to bypass that and ensure that the image
# is built successfully # is built successfully
endif endif
@docker build -t $(TAG) . @docker build -t $(TAG) --build-arg index_url=$(index_url) .
wallet_dir ?= /app/wallet wallet_dir ?= /app/wallet

View File

@ -7,7 +7,7 @@
"enabled": true, "enabled": true,
"trail_head_blocks": 0, "trail_head_blocks": 0,
"rpc_url": "http://host.docker.internal:8545", "rpc_url": "http://host.docker.internal:8545",
"coordinator_address": "0x5FbDB2315678afecb367f032d93F642f64180aa3", "registry_address": "0x663F3ad617193148711d28f5334eE4Ed07016602",
"wallet": { "wallet": {
"max_gas_limit": 4000000, "max_gas_limit": 4000000,
"private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d" "private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
@ -23,6 +23,10 @@
"port": 6379 "port": 6379
}, },
"forward_stats": true, "forward_stats": true,
"snapshot_sync": {
"sleep": 3,
"batch_size": 100
},
"containers": [ "containers": [
{ {
"id": "prompt-to-nft", "id": "prompt-to-nft",
@ -36,18 +40,8 @@
"env": { "env": {
"ARWEAVE_WALLET_FILE_PATH": "wallet/keyfile-arweave.json", "ARWEAVE_WALLET_FILE_PATH": "wallet/keyfile-arweave.json",
"IMAGE_GEN_SERVICE_URL": "http://your.services.ip:port" "IMAGE_GEN_SERVICE_URL": "http://your.services.ip:port"
} },
}, "accepted_payments": {}
{
"id": "anvil-node",
"image": "ritualnetwork/infernet-anvil:0.0.0",
"external": true,
"port": "8545",
"allowed_delegate_addresses": [],
"allowed_addresses": [],
"allowed_ips": [],
"command": "",
"env": {}
} }
] ]
} }

View File

@ -5,9 +5,9 @@ from typing import Any, cast
import aiohttp import aiohttp
from eth_abi import decode, encode # type: ignore from eth_abi import decode, encode # type: ignore
from infernet_ml.utils.arweave import upload, load_wallet from infernet_ml.utils.service_models import InfernetInput, JobLocation
from infernet_ml.utils.service_models import InfernetInput, InfernetInputSource
from quart import Quart, request from quart import Quart, request
from ritual_arweave.file_manager import FileManager
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -29,7 +29,6 @@ async def run_inference(prompt: str, output_path: str) -> None:
def ensure_env_vars() -> None: def ensure_env_vars() -> None:
if not os.getenv("IMAGE_GEN_SERVICE_URL"): if not os.getenv("IMAGE_GEN_SERVICE_URL"):
raise ValueError("IMAGE_GEN_SERVICE_URL environment variable not set") raise ValueError("IMAGE_GEN_SERVICE_URL environment variable not set")
load_wallet()
def create_app() -> Quart: def create_app() -> Quart:
@ -54,50 +53,59 @@ def create_app() -> Quart:
infernet_input: InfernetInput = InfernetInput(**req_data) infernet_input: InfernetInput = InfernetInput(**req_data)
temp_file = "image.png" temp_file = "image.png"
if infernet_input.source == InfernetInputSource.OFFCHAIN: match infernet_input:
prompt: str = cast(dict[str, str], infernet_input.data)["prompt"] case InfernetInput(source=JobLocation.OFFCHAIN):
else: prompt: str = cast(dict[str, str], infernet_input.data)["prompt"]
# On-chain requests are sent as a generalized hex-string which we will case InfernetInput(source=JobLocation.ONCHAIN):
# decode to the appropriate format. # On-chain requests are sent as a generalized hex-string which we will
(prompt, mintTo) = decode( # decode to the appropriate format.
["string", "address"], bytes.fromhex(cast(str, infernet_input.data)) (prompt, mintTo) = decode(
) ["string", "address"], bytes.fromhex(cast(str, infernet_input.data))
log.info("mintTo: %s", mintTo) )
log.info("prompt: %s", prompt) log.info("mintTo: %s", mintTo)
log.info("prompt: %s", prompt)
case _:
raise ValueError("Invalid source")
# run the inference and download the image to a temp file # run the inference and download the image to a temp file
await run_inference(prompt, temp_file) await run_inference(prompt, temp_file)
tx = upload(Path(temp_file), {"Content-Type": "image/png"}) tx = FileManager(wallet_path=os.environ["ARWEAVE_WALLET_FILE_PATH"]).upload(
Path(temp_file), {"Content-Type": "image/png"}
)
if infernet_input.source == InfernetInputSource.OFFCHAIN: match infernet_input:
""" case InfernetInput(destination=JobLocation.OFFCHAIN):
In case of an off-chain request, the result is returned as is. """
""" In case of an off-chain request, the result is returned as is.
return { """
"prompt": prompt, return {
"hash": tx.id, "prompt": prompt,
"image_url": f"https://arweave.net/{tx.id}", "hash": tx.id,
} "image_url": f"https://arweave.net/{tx.id}",
else: }
""" case InfernetInput(destination=JobLocation.ONCHAIN):
In case of an on-chain request, the result is returned in the format: """
{ In case of an on-chain request, the result is returned in the format:
"raw_input": str, {
"processed_input": str, "raw_input": str,
"raw_output": str, "processed_input": str,
"processed_output": str, "raw_output": str,
"proof": str, "processed_output": str,
} "proof": str,
refer to: https://docs.ritual.net/infernet/node/containers for more info. }
""" refer to: https://docs.ritual.net/infernet/node/containers for more
return { info.
"raw_input": infernet_input.data, """
"processed_input": "", return {
"raw_output": encode(["string"], [tx.id]).hex(), "raw_input": infernet_input.data,
"processed_output": "", "processed_input": "",
"proof": "", "raw_output": encode(["string"], [tx.id]).hex(),
} "processed_output": "",
"proof": "",
}
case _:
raise ValueError("Invalid destination")
return app return app

View File

@ -1,5 +1,4 @@
quart==0.19.4 quart==0.19.4
infernet_ml==0.1.0 infernet-ml==1.0.0
PyArweave @ git+https://github.com/ritual-net/pyarweave.git
web3==6.15.0 web3==6.15.0
tqdm==4.66.1 tqdm==4.66.3

@ -1 +1 @@
Subproject commit e4aef94c1768803a16fe19f7ce8b65defd027cfd Subproject commit 52715a217dc51d0de15877878ab8213f6cbbbab5

@ -1 +1 @@
Subproject commit 2d04a7f5ed64738218941e5d7a7270382f191a01 Subproject commit 8e6cd6f5cbd66dc9baacb895a2ed8fe2c9ee3b6f

View File

@ -5,7 +5,8 @@ import {Script, console2} from "forge-std/Script.sol";
import {DiffusionNFT} from "../src/DiffusionNFT.sol"; import {DiffusionNFT} from "../src/DiffusionNFT.sol";
contract CallContract is Script { contract CallContract is Script {
string defaultPrompt = "A picture of a shrimp dunking a basketball"; string defaultPrompt = "A picture of a shrimp dunking a basketball";
function run() public { function run() public {
// Setup wallet // Setup wallet
uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY");
@ -13,7 +14,7 @@ string defaultPrompt = "A picture of a shrimp dunking a basketball";
string memory prompt = vm.envOr("prompt", defaultPrompt); string memory prompt = vm.envOr("prompt", defaultPrompt);
vm.startBroadcast(deployerPrivateKey); vm.startBroadcast(deployerPrivateKey);
DiffusionNFT nft = DiffusionNFT(0x663F3ad617193148711d28f5334eE4Ed07016602); DiffusionNFT nft = DiffusionNFT(0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e);
nft.mint(prompt, mintTo); nft.mint(prompt, mintTo);

View File

@ -14,9 +14,9 @@ contract Deploy is Script {
address deployerAddress = vm.addr(deployerPrivateKey); address deployerAddress = vm.addr(deployerPrivateKey);
console2.log("Loaded deployer: ", deployerAddress); console2.log("Loaded deployer: ", deployerAddress);
address coordinator = 0x5FbDB2315678afecb367f032d93F642f64180aa3; address registry = 0x663F3ad617193148711d28f5334eE4Ed07016602;
// Create consumer // Create consumer
DiffusionNFT nft = new DiffusionNFT(coordinator); DiffusionNFT nft = new DiffusionNFT(registry);
console2.log("Deployed IrisClassifier: ", address(nft)); console2.log("Deployed IrisClassifier: ", address(nft));
// Execute // Execute

View File

@ -11,10 +11,18 @@ contract DiffusionNFT is CallbackConsumer, ERC721 {
"| _ / | | | | | | | |/ /\\ \\ | |\n" "| | \\ \\ _| |_ | | | |__| / ____ \\| |____\n" "| _ / | | | | | | | |/ /\\ \\ | |\n" "| | \\ \\ _| |_ | | | |__| / ____ \\| |____\n"
"|_| \\_\\_____| |_| \\____/_/ \\_\\______|\n\n"; "|_| \\_\\_____| |_| \\____/_/ \\_\\______|\n\n";
constructor(address coordinator) CallbackConsumer(coordinator) ERC721("DiffusionNFT", "DN") {} constructor(address registry) CallbackConsumer(registry) ERC721("DiffusionNFT", "DN") {}
function mint(string memory prompt, address to) public { function mint(string memory prompt, address to) public {
_requestCompute("prompt-to-nft", abi.encode(prompt, to), 20 gwei, 1_000_000, 1); _requestCompute(
"prompt-to-nft",
abi.encode(prompt, to),
1, // redundancy
address(0), // paymentToken
0, // paymentAmount
address(0), // wallet
address(0) // prover
);
} }
uint256 public counter = 0; uint256 public counter = 0;
@ -38,7 +46,6 @@ contract DiffusionNFT is CallbackConsumer, ERC721 {
return collection; return collection;
} }
function _receiveCompute( function _receiveCompute(
uint32 subscriptionId, uint32 subscriptionId,
uint32 interval, uint32 interval,
@ -46,7 +53,9 @@ contract DiffusionNFT is CallbackConsumer, ERC721 {
address node, address node,
bytes calldata input, bytes calldata input,
bytes calldata output, bytes calldata output,
bytes calldata proof bytes calldata proof,
bytes32 containerId,
uint256 index
) internal override { ) internal override {
console2.log(EXTREMELY_COOL_BANNER); console2.log(EXTREMELY_COOL_BANNER);
(bytes memory raw_output, bytes memory processed_output) = abi.decode(output, (bytes, bytes)); (bytes memory raw_output, bytes memory processed_output) = abi.decode(output, (bytes, bytes));

View File

@ -262,7 +262,7 @@ Notice that in [one of the steps above](#check-the-running-containers) we have a
By default, the [`anvil-node`](https://hub.docker.com/r/ritualnetwork/infernet-anvil) image used deploys the By default, the [`anvil-node`](https://hub.docker.com/r/ritualnetwork/infernet-anvil) image used deploys the
[Infernet SDK](https://docs.ritual.net/infernet/sdk/introduction) and other relevant contracts for you: [Infernet SDK](https://docs.ritual.net/infernet/sdk/introduction) and other relevant contracts for you:
- Coordinator: `0x5FbDB2315678afecb367f032d93F642f64180aa3` - Registry: `0x663F3ad617193148711d28f5334eE4Ed07016602`
- Primary node: `0x70997970C51812dc3A010C7d01b50e0d17dc79C8` - Primary node: `0x70997970C51812dc3A010C7d01b50e0d17dc79C8`
### Deploy our NFT Consumer contract ### Deploy our NFT Consumer contract
@ -296,7 +296,7 @@ You should expect to see similar Anvil logs:
eth_getTransactionReceipt eth_getTransactionReceipt
Transaction: 0x0577dc98192d971bafb30d53cb217c9a9c16f92ab435d20a697024a4f122c048 Transaction: 0x0577dc98192d971bafb30d53cb217c9a9c16f92ab435d20a697024a4f122c048
Contract created: 0x663f3ad617193148711d28f5334ee4ed07016602 Contract created: 0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e
Gas used: 1582129 Gas used: 1582129
Block Number: 1 Block Number: 1
@ -307,7 +307,7 @@ eth_getTransactionByHash
``` ```
From our logs, we can see that the `DiffusionNFT` contract has been deployed to address From our logs, we can see that the `DiffusionNFT` contract has been deployed to address
`0x663f3ad617193148711d28f5334ee4ed07016602`. `0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e`.
### Call the contract ### Call the contract

View File

@ -5,6 +5,8 @@ WORKDIR /app
ENV PYTHONUNBUFFERED 1 ENV PYTHONUNBUFFERED 1
ENV PYTHONDONTWRITEBYTECODE 1 ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONPATH src ENV PYTHONPATH src
ARG index_url
ENV UV_EXTRA_INDEX_URL ${index_url}
WORKDIR /app WORKDIR /app
@ -12,7 +14,8 @@ RUN apt-get update
RUN apt-get install -y git curl ffmpeg libsm6 libxext6 RUN apt-get install -y git curl ffmpeg libsm6 libxext6
# install uv # install uv
ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh ADD https://astral.sh/uv/install.sh /install.sh
RUN chmod 755 /install.sh
RUN /install.sh && rm /install.sh RUN /install.sh && rm /install.sh
COPY src/requirements.txt . COPY src/requirements.txt .

View File

@ -5,7 +5,7 @@ TAG := $(DOCKER_ORG)/example-$(EXAMPLE_NAME)-infernet:latest
.phony: build run build-multiplatform .phony: build run build-multiplatform
build: build:
@docker build -t $(TAG) . @docker build -t $(TAG) --build-arg index_url=$(index_url) .
port_mapping ?= 0.0.0.0:3002:3000 port_mapping ?= 0.0.0.0:3002:3000

View File

@ -1,10 +1,9 @@
diffusers~=0.19 diffusers~=0.19
invisible_watermark~=0.1 invisible_watermark~=0.1
transformers==4.36 transformers==4.38.0
accelerate~=0.21 accelerate~=0.21
safetensors~=0.3 safetensors~=0.3
Quart==0.19.4 Quart==0.19.4
jmespath==1.0.1 jmespath==1.0.1
huggingface-hub==0.20.3 huggingface-hub==0.20.3
infernet_ml==0.1.0 infernet-ml==1.0.0
PyArweave @ git+https://github.com/ritual-net/pyarweave.git

View File

@ -11,7 +11,7 @@
"dependencies": { "dependencies": {
"@rainbow-me/rainbowkit": "^2.0.0", "@rainbow-me/rainbowkit": "^2.0.0",
"@tanstack/react-query": "^5.22.2", "@tanstack/react-query": "^5.22.2",
"next": "14.1.0", "next": "14.1.1",
"prettier": "^3.2.5", "prettier": "^3.2.5",
"react": "^18", "react": "^18",
"react-dom": "^18", "react-dom": "^18",
@ -27,6 +27,6 @@
"postcss": "^8", "postcss": "^8",
"tailwindcss": "^3.3.0", "tailwindcss": "^3.3.0",
"eslint": "^8", "eslint": "^8",
"eslint-config-next": "14.1.0" "eslint-config-next": "14.1.1"
} }
} }

View File

@ -7,12 +7,15 @@ ENV PYTHONDONTWRITEBYTECODE 1
ENV PIP_NO_CACHE_DIR 1 ENV PIP_NO_CACHE_DIR 1
ENV RUNTIME docker ENV RUNTIME docker
ENV PYTHONPATH src ENV PYTHONPATH src
ARG index_url
ENV UV_EXTRA_INDEX_URL ${index_url}
RUN apt-get update RUN apt-get update
RUN apt-get install -y git curl RUN apt-get install -y git curl
# install uv # install uv
ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh ADD https://astral.sh/uv/install.sh /install.sh
RUN chmod 755 /install.sh
RUN /install.sh && rm /install.sh RUN /install.sh && rm /install.sh
COPY src/requirements.txt . COPY src/requirements.txt .

View File

@ -5,7 +5,7 @@ TAG := $(DOCKER_ORG)/example-$(EXAMPLE_NAME)-infernet:latest
.phony: build run build-multiplatform .phony: build run build-multiplatform
build: build:
@docker build -t $(TAG) . @docker build -t $(TAG) --build-arg index_url=$(index_url) .
run: run:
docker run -p 3000:3000 --env-file tgi-llm.env $(TAG) docker run -p 3000:3000 --env-file tgi-llm.env $(TAG)

View File

@ -1,52 +1,46 @@
{ {
"log_path": "infernet_node.log", "log_path": "infernet_node.log",
"server": { "server": {
"port": 4000 "port": 4000
},
"chain": {
"enabled": true,
"trail_head_blocks": 0,
"rpc_url": "http://host.docker.internal:8545",
"coordinator_address": "0x5FbDB2315678afecb367f032d93F642f64180aa3",
"wallet": {
"max_gas_limit": 4000000,
"private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
}
},
"startup_wait": 1.0,
"docker": {
"username": "your-username",
"password": ""
},
"redis": {
"host": "redis",
"port": 6379
},
"forward_stats": true,
"containers": [
{
"id": "tgi-llm",
"image": "ritualnetwork/example-tgi-llm-infernet:latest",
"external": true,
"port": "3000",
"allowed_delegate_addresses": [],
"allowed_addresses": [],
"allowed_ips": [],
"command": "--bind=0.0.0.0:3000 --workers=2",
"env": {
"TGI_SERVICE_URL": "http://{your_service_ip}:{your_service_port}"
}
}, },
{ "chain": {
"id": "anvil-node", "enabled": true,
"image": "ritualnetwork/infernet-anvil:0.0.0", "trail_head_blocks": 0,
"external": true, "rpc_url": "http://host.docker.internal:8545",
"port": "8545", "registry_address": "0x663F3ad617193148711d28f5334eE4Ed07016602",
"allowed_delegate_addresses": [], "wallet": {
"allowed_addresses": [], "max_gas_limit": 4000000,
"allowed_ips": [], "private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
"command": "", }
"env": {} },
} "startup_wait": 1.0,
] "docker": {
"username": "your-username",
"password": ""
},
"redis": {
"host": "redis",
"port": 6379
},
"forward_stats": true,
"snapshot_sync": {
"sleep": 3,
"batch_size": 100
},
"containers": [
{
"id": "tgi-llm",
"image": "ritualnetwork/example-tgi-llm-infernet:latest",
"external": true,
"port": "3000",
"allowed_delegate_addresses": [],
"allowed_addresses": [],
"allowed_ips": [],
"command": "--bind=0.0.0.0:3000 --workers=2",
"env": {
"TGI_SERVICE_URL": "http://{your_service_ip}:{your_service_port}"
},
"accepted_payments": {}
}
]
} }

View File

@ -2,10 +2,11 @@ import logging
import os import os
from typing import Any, cast from typing import Any, cast
from eth_abi import decode, encode # type: ignore from eth_abi.abi import decode, encode
from infernet_ml.utils.service_models import InfernetInput, InfernetInputSource from infernet_ml.utils.service_models import InfernetInput, JobLocation
from infernet_ml.workflows.inference.tgi_client_inference_workflow import ( from infernet_ml.workflows.inference.tgi_client_inference_workflow import (
TGIClientInferenceWorkflow, TGIClientInferenceWorkflow,
TgiInferenceRequest,
) )
from quart import Quart, request from quart import Quart, request
@ -16,7 +17,7 @@ def create_app() -> Quart:
app = Quart(__name__) app = Quart(__name__)
workflow = TGIClientInferenceWorkflow( workflow = TGIClientInferenceWorkflow(
server_url=cast(str, os.environ.get("TGI_SERVICE_URL")) server_url=os.environ["TGI_SERVICE_URL"],
) )
workflow.setup() workflow.setup()
@ -38,42 +39,51 @@ def create_app() -> Quart:
""" """
infernet_input: InfernetInput = InfernetInput(**req_data) infernet_input: InfernetInput = InfernetInput(**req_data)
if infernet_input.source == InfernetInputSource.OFFCHAIN: match infernet_input:
prompt = cast(dict[str, Any], infernet_input.data).get("prompt") case InfernetInput(source=JobLocation.OFFCHAIN):
else: prompt = cast(dict[str, Any], infernet_input.data).get("prompt")
# On-chain requests are sent as a generalized hex-string which we will case InfernetInput(source=JobLocation.ONCHAIN):
# decode to the appropriate format. # On-chain requests are sent as a generalized hex-string which we will
(prompt,) = decode( # decode to the appropriate format.
["string"], bytes.fromhex(cast(str, infernet_input.data)) (prompt,) = decode(
) ["string"], bytes.fromhex(cast(str, infernet_input.data))
)
case _:
raise ValueError("Invalid source")
result: dict[str, Any] = workflow.inference({"text": prompt}) result: dict[str, Any] = workflow.inference(
TgiInferenceRequest(text=cast(str, prompt))
)
if infernet_input.source == InfernetInputSource.OFFCHAIN: match infernet_input:
""" case InfernetInput(destination=JobLocation.OFFCHAIN):
In case of an off-chain request, the result is returned as a dict. The """
infernet node expects a dict format. In case of an off-chain request, the result is returned as a dict. The
""" infernet node expects a dict format.
return {"data": result} """
else: return {"data": result}
""" case InfernetInput(destination=JobLocation.ONCHAIN):
In case of an on-chain request, the result is returned in the format: """
{ In case of an on-chain request, the result is returned in the format:
"raw_input": str, {
"processed_input": str, "raw_input": str,
"raw_output": str, "processed_input": str,
"processed_output": str, "raw_output": str,
"proof": str, "processed_output": str,
} "proof": str,
refer to: https://docs.ritual.net/infernet/node/containers for more info. }
""" refer to: https://docs.ritual.net/infernet/node/containers for more
return { info.
"raw_input": "", """
"processed_input": "", return {
"raw_output": encode(["string"], [result]).hex(), "raw_input": "",
"processed_output": "", "processed_input": "",
"proof": "", "raw_output": encode(["string"], [result]).hex(),
} "processed_output": "",
"proof": "",
}
case _:
raise ValueError("Invalid destination")
return app return app

View File

@ -1,6 +1,5 @@
quart==0.19.4 quart==0.19.4
infernet_ml==0.1.0 infernet-ml==1.0.0
PyArweave @ git+https://github.com/ritual-net/pyarweave.git infernet-ml[tgi_inference]==1.0.0
web3==6.15.0 web3==6.15.0
retry2==0.9.5 retry2==0.9.5
text-generation==0.6.1

@ -1 +1 @@
Subproject commit e4aef94c1768803a16fe19f7ce8b65defd027cfd Subproject commit 52715a217dc51d0de15877878ab8213f6cbbbab5

@ -1 +1 @@
Subproject commit 2d04a7f5ed64738218941e5d7a7270382f191a01 Subproject commit 8e6cd6f5cbd66dc9baacb895a2ed8fe2c9ee3b6f

View File

@ -10,7 +10,7 @@ contract CallContract is Script {
uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY");
vm.startBroadcast(deployerPrivateKey); vm.startBroadcast(deployerPrivateKey);
Prompter prompter = Prompter(0x663F3ad617193148711d28f5334eE4Ed07016602); Prompter prompter = Prompter(0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e);
prompter.promptLLM(vm.envString("prompt")); prompter.promptLLM(vm.envString("prompt"));

View File

@ -14,9 +14,9 @@ contract Deploy is Script {
address deployerAddress = vm.addr(deployerPrivateKey); address deployerAddress = vm.addr(deployerPrivateKey);
console2.log("Loaded deployer: ", deployerAddress); console2.log("Loaded deployer: ", deployerAddress);
address coordinator = 0x5FbDB2315678afecb367f032d93F642f64180aa3; address registry = 0x663F3ad617193148711d28f5334eE4Ed07016602;
// Create consumer // Create consumer
Prompter prompter = new Prompter(coordinator); Prompter prompter = new Prompter(registry);
console2.log("Deployed Prompter: ", address(prompter)); console2.log("Deployed Prompter: ", address(prompter));
// Execute // Execute

View File

@ -13,15 +13,17 @@ contract Prompter is CallbackConsumer {
"| | \\ \\ _| |_ | | | |__| / ____ \\| |____ \n" "| | \\ \\ _| |_ | | | |__| / ____ \\| |____ \n"
"|_| \\_\\_____| |_| \\____/_/ \\_\\______| \n\n"; "|_| \\_\\_____| |_| \\____/_/ \\_\\______| \n\n";
constructor(address coordinator) CallbackConsumer(coordinator) {} constructor(address registry) CallbackConsumer(registry) {}
function promptLLM(string calldata prompt) public { function promptLLM(string calldata prompt) public {
_requestCompute( _requestCompute(
"tgi-llm", "tgi-llm",
abi.encode(prompt), abi.encode(prompt),
20 gwei, 1, // redundancy
1_000_000, address(0), // paymentToken
1 0, // paymentAmount
address(0), // wallet
address(0) // prover
); );
} }
@ -32,7 +34,9 @@ contract Prompter is CallbackConsumer {
address node, address node,
bytes calldata input, bytes calldata input,
bytes calldata output, bytes calldata output,
bytes calldata proof bytes calldata proof,
bytes32 containerId,
uint256 index
) internal override { ) internal override {
console2.log(EXTREMELY_COOL_BANNER); console2.log(EXTREMELY_COOL_BANNER);
(bytes memory raw_output, bytes memory processed_output) = abi.decode(output, (bytes, bytes)); (bytes memory raw_output, bytes memory processed_output) = abi.decode(output, (bytes, bytes));

View File

@ -334,7 +334,7 @@ Notice that in [the step above](#check-the-running-containers) we have an Anvil
By default, the [`anvil-node`](https://hub.docker.com/r/ritualnetwork/infernet-anvil) image used deploys the By default, the [`anvil-node`](https://hub.docker.com/r/ritualnetwork/infernet-anvil) image used deploys the
[Infernet SDK](https://docs.ritual.net/infernet/sdk/introduction) and other relevant contracts for you: [Infernet SDK](https://docs.ritual.net/infernet/sdk/introduction) and other relevant contracts for you:
- Coordinator: `0x5FbDB2315678afecb367f032d93F642f64180aa3` - Coordinator: `0x663F3ad617193148711d28f5334eE4Ed07016602`
- Primary node: `0x70997970C51812dc3A010C7d01b50e0d17dc79C8` - Primary node: `0x70997970C51812dc3A010C7d01b50e0d17dc79C8`
### Deploy our `Prompter` smart contract ### Deploy our `Prompter` smart contract
@ -367,7 +367,7 @@ You should expect to see similar Anvil logs:
eth_getTransactionReceipt eth_getTransactionReceipt
Transaction: 0x17a9d17cc515d39eef26b6a9427e04ed6f7ce6572d9756c07305c2df78d93ffe Transaction: 0x17a9d17cc515d39eef26b6a9427e04ed6f7ce6572d9756c07305c2df78d93ffe
Contract created: 0x663f3ad617193148711d28f5334ee4ed07016602 Contract created: 0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e
Gas used: 731312 Gas used: 731312
Block Number: 1 Block Number: 1
@ -378,7 +378,7 @@ eth_getTransactionByHash
``` ```
From our logs, we can see that the `Prompter` contract has been deployed to address From our logs, we can see that the `Prompter` contract has been deployed to address
`0x663f3ad617193148711d28f5334ee4ed07016602`. `0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e`.
### Call the contract ### Call the contract

View File

@ -5,6 +5,8 @@ WORKDIR /app
ENV PYTHONUNBUFFERED 1 ENV PYTHONUNBUFFERED 1
ENV PYTHONDONTWRITEBYTECODE 1 ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONPATH src ENV PYTHONPATH src
ARG index_url
ENV UV_EXTRA_INDEX_URL ${index_url}
WORKDIR /app WORKDIR /app

View File

@ -5,7 +5,7 @@ TAG := $(DOCKER_ORG)/example-$(EXAMPLE_NAME)-infernet:latest
.phony: build run publish .phony: build run publish
build: build:
@docker build -t $(TAG) . @docker build -t $(TAG) --build-arg index_url=$(index_url) .
run: build run: build
docker run --env-file ./gradio_ui.env -p 3001:7860 $(TAG) docker run --env-file ./gradio_ui.env -p 3001:7860 $(TAG)

View File

@ -1,4 +1,4 @@
python-dotenv==1.0.0 python-dotenv==1.0.0
gradio==3.47.1 gradio==4.19.2
huggingface-hub==0.17.3 huggingface-hub==0.17.3
text-generation==0.6.1 text-generation==0.6.1

View File

@ -7,12 +7,15 @@ ENV PYTHONDONTWRITEBYTECODE 1
ENV PIP_NO_CACHE_DIR 1 ENV PIP_NO_CACHE_DIR 1
ENV RUNTIME docker ENV RUNTIME docker
ENV PYTHONPATH src ENV PYTHONPATH src
ARG index_url
ENV UV_EXTRA_INDEX_URL ${index_url}
RUN apt-get update RUN apt-get update
RUN apt-get install -y git curl RUN apt-get install -y git curl
# install uv # install uv
ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh ADD https://astral.sh/uv/install.sh /install.sh
RUN chmod 755 /install.sh
RUN /install.sh && rm /install.sh RUN /install.sh && rm /install.sh
COPY src/requirements.txt . COPY src/requirements.txt .

View File

@ -5,7 +5,7 @@ TAG := $(DOCKER_ORG)/example-$(EXAMPLE_NAME)-infernet:latest
.phony: build run .phony: build run
build: build:
@docker build -t $(TAG) . @docker build -t $(TAG) --build-arg index_url=$(index_url) .
run: run:
docker run -p 3000:3000 $(TAG) docker run -p 3000:3000 $(TAG)

View File

@ -8,9 +8,9 @@ repository.
## Overview ## Overview
We're making use of We're making use of
the [TorchInferenceWorkflow](https://github.com/ritual-net/infernet-ml-internal/blob/main/src/ml/workflows/inference/torch_inference_workflow.py) the [TorchInferenceWorkflow](https://github.com/ritual-net/infernet-ml/blob/main/src/ml/workflows/inference/torch_inference_workflow.py)
class to run the model. This is one of many workflows that we currently support in our class to run the model. This is one of many workflows that we currently support in our
[infernet-ml](https://github.com/ritual-net/infernet-ml-internal). Consult the library's [infernet-ml](https://github.com/ritual-net/infernet-ml). Consult the library's
documentation for more info on workflows that documentation for more info on workflows that
are supported. are supported.

View File

@ -7,7 +7,7 @@
"enabled": true, "enabled": true,
"trail_head_blocks": 0, "trail_head_blocks": 0,
"rpc_url": "http://host.docker.internal:8545", "rpc_url": "http://host.docker.internal:8545",
"coordinator_address": "0x5FbDB2315678afecb367f032d93F642f64180aa3", "registry_address": "0x663F3ad617193148711d28f5334eE4Ed07016602",
"wallet": { "wallet": {
"max_gas_limit": 4000000, "max_gas_limit": 4000000,
"private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d" "private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
@ -23,6 +23,10 @@
"port": 6379 "port": 6379
}, },
"forward_stats": true, "forward_stats": true,
"snapshot_sync": {
"sleep": 3,
"batch_size": 100
},
"containers": [ "containers": [
{ {
"id": "torch-iris", "id": "torch-iris",
@ -33,18 +37,8 @@
"allowed_addresses": [], "allowed_addresses": [],
"allowed_ips": [], "allowed_ips": [],
"command": "--bind=0.0.0.0:3000 --workers=2", "command": "--bind=0.0.0.0:3000 --workers=2",
"env": {} "env": {},
}, "accepted_payments": {}
{
"id": "anvil-node",
"image": "ritualnetwork/infernet-anvil:0.0.0",
"external": true,
"port": "8545",
"allowed_delegate_addresses": [],
"allowed_addresses": [],
"allowed_ips": [],
"command": "",
"env": {}
} }
] ]
} }

View File

@ -1,11 +1,16 @@
import logging import logging
from typing import Any, cast, List from typing import Any, cast, List
from infernet_ml.utils.common_types import TensorInput
from eth_abi import decode, encode # type: ignore from eth_abi import decode, encode # type: ignore
from infernet_ml.utils.model_loader import (
HFLoadArgs,
)
from infernet_ml.utils.model_loader import ModelSource from infernet_ml.utils.model_loader import ModelSource
from infernet_ml.utils.service_models import InfernetInput, InfernetInputSource from infernet_ml.utils.service_models import InfernetInput, JobLocation
from infernet_ml.workflows.inference.torch_inference_workflow import ( from infernet_ml.workflows.inference.torch_inference_workflow import (
TorchInferenceWorkflow, TorchInferenceWorkflow,
TorchInferenceInput,
) )
from quart import Quart, request from quart import Quart, request
@ -21,10 +26,10 @@ def create_app() -> Quart:
app = Quart(__name__) app = Quart(__name__)
# we are downloading the model from the hub. # we are downloading the model from the hub.
# model repo is located at: https://huggingface.co/Ritual-Net/iris-dataset # model repo is located at: https://huggingface.co/Ritual-Net/iris-dataset
model_source = ModelSource.HUGGINGFACE_HUB workflow = TorchInferenceWorkflow(
model_args = {"repo_id": "Ritual-Net/iris-dataset", "filename": "iris.torch"} model_source=ModelSource.HUGGINGFACE_HUB,
load_args=HFLoadArgs(repo_id="Ritual-Net/iris-dataset", filename="iris.torch"),
workflow = TorchInferenceWorkflow(model_source=model_source, model_args=model_args) )
workflow.setup() workflow.setup()
@app.route("/") @app.route("/")
@ -46,16 +51,17 @@ def create_app() -> Quart:
""" """
infernet_input: InfernetInput = InfernetInput(**req_data) infernet_input: InfernetInput = InfernetInput(**req_data)
if infernet_input.source == InfernetInputSource.OFFCHAIN: match infernet_input:
web2_input = cast(dict[str, Any], infernet_input.data) case InfernetInput(source=JobLocation.OFFCHAIN):
values = cast(List[List[float]], web2_input["input"]) web2_input = cast(dict[str, Any], infernet_input.data)
else: values = cast(List[List[float]], web2_input["input"])
# On-chain requests are sent as a generalized hex-string which we will case InfernetInput(source=JobLocation.ONCHAIN):
# decode to the appropriate format. web3_input: List[int] = decode(
web3_input: List[int] = decode( ["uint256[]"], bytes.fromhex(cast(str, infernet_input.data))
["uint256[]"], bytes.fromhex(cast(str, infernet_input.data)) )[0]
)[0] values = [[float(v) / 1e6 for v in web3_input]]
values = [[float(v) / 1e6 for v in web3_input]] case _:
raise ValueError("Invalid source")
""" """
The input to the torch inference workflow needs to conform to this format: The input to the torch inference workflow needs to conform to this format:
@ -66,39 +72,52 @@ def create_app() -> Quart:
} }
For more information refer to: For more information refer to:
https://docs.ritual.net/ml-workflows/inference-workflows/torch_inference_workflow https://infernet-ml.docs.ritual.net/reference/infernet_ml/workflows/inference/torch_inference_workflow/?h=torch
""" """ # noqa: E501
inference_result = workflow.inference({"dtype": "float", "values": values}) log.info("Input values: %s", values)
result = [o.detach().numpy().reshape([-1]).tolist() for o in inference_result] _input = TensorInput(
dtype="float",
shape=(1, 4),
values=values,
)
if infernet_input.source == InfernetInputSource.OFFCHAIN: iris_inference_input = TorchInferenceInput(input=_input)
"""
In case of an off-chain request, the result is returned as is. inference_result = workflow.inference(iris_inference_input)
"""
return {"result": result} result = inference_result.outputs
else:
""" match infernet_input:
In case of an on-chain request, the result is returned in the format: case InfernetInput(destination=JobLocation.OFFCHAIN):
{ """
"raw_input": str, In case of an off-chain request, the result is returned as is.
"processed_input": str, """
"raw_output": str, return {"result": result}
"processed_output": str, case InfernetInput(destination=JobLocation.ONCHAIN):
"proof": str, """
} In case of an on-chain request, the result is returned in the format:
refer to: https://docs.ritual.net/infernet/node/containers for more info. {
""" "raw_input": str,
predictions = cast(List[List[float]], result) "processed_input": str,
predictions_normalized = [int(p * 1e6) for p in predictions[0]] "raw_output": str,
return { "processed_output": str,
"raw_input": "", "proof": str,
"processed_input": "", }
"raw_output": encode(["uint256[]"], [predictions_normalized]).hex(), refer to: https://docs.ritual.net/infernet/node/containers for more
"processed_output": "", info.
"proof": "", """
} predictions_normalized = [int(p * 1e6) for p in result]
return {
"raw_input": "",
"processed_input": "",
"raw_output": encode(["uint256[]"], [predictions_normalized]).hex(),
"processed_output": "",
"proof": "",
}
case _:
raise ValueError("Invalid destination")
return app return app

View File

@ -1,6 +1,6 @@
quart==0.19.4 quart==0.19.4
infernet_ml==0.1.0 infernet-ml==1.0.0
PyArweave @ git+https://github.com/ritual-net/pyarweave.git infernet-ml[torch_inference]==1.0.0
huggingface-hub==0.17.3 huggingface-hub==0.17.3
sk2torch==1.2.0 sk2torch==1.2.0
torch==2.1.2 torch==2.1.2

@ -1 +1 @@
Subproject commit e4aef94c1768803a16fe19f7ce8b65defd027cfd Subproject commit 52715a217dc51d0de15877878ab8213f6cbbbab5

@ -1 +1 @@
Subproject commit 2d04a7f5ed64738218941e5d7a7270382f191a01 Subproject commit 8e6cd6f5cbd66dc9baacb895a2ed8fe2c9ee3b6f

View File

@ -10,7 +10,7 @@ contract CallContract is Script {
uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY");
vm.startBroadcast(deployerPrivateKey); vm.startBroadcast(deployerPrivateKey);
IrisClassifier irisClassifier = IrisClassifier(0x663F3ad617193148711d28f5334eE4Ed07016602); IrisClassifier irisClassifier = IrisClassifier(0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e);
irisClassifier.classifyIris(); irisClassifier.classifyIris();

View File

@ -14,9 +14,9 @@ contract Deploy is Script {
address deployerAddress = vm.addr(deployerPrivateKey); address deployerAddress = vm.addr(deployerPrivateKey);
console2.log("Loaded deployer: ", deployerAddress); console2.log("Loaded deployer: ", deployerAddress);
address coordinator = 0x5FbDB2315678afecb367f032d93F642f64180aa3; address registry = 0x663F3ad617193148711d28f5334eE4Ed07016602;
// Create consumer // Create consumer
IrisClassifier classifier = new IrisClassifier(coordinator); IrisClassifier classifier = new IrisClassifier(registry);
console2.log("Deployed IrisClassifier: ", address(classifier)); console2.log("Deployed IrisClassifier: ", address(classifier));
// Execute // Execute

View File

@ -14,7 +14,7 @@ contract IrisClassifier is CallbackConsumer {
"| | \\ \\ _| |_ | | | |__| / ____ \\| |____\n" "| | \\ \\ _| |_ | | | |__| / ____ \\| |____\n"
"|_| \\_\\_____| |_| \\____/_/ \\_\\______|\n\n"; "|_| \\_\\_____| |_| \\____/_/ \\_\\______|\n\n";
constructor(address coordinator) CallbackConsumer(coordinator) {} constructor(address registry) CallbackConsumer(registry) {}
function classifyIris() public { function classifyIris() public {
/// @dev Iris data is in the following format: /// @dev Iris data is in the following format:
@ -38,9 +38,11 @@ contract IrisClassifier is CallbackConsumer {
_requestCompute( _requestCompute(
"torch-iris", "torch-iris",
abi.encode(iris_data), abi.encode(iris_data),
20 gwei, 1, // redundancy
1_000_000, address(0), // paymentToken
1 0, // paymentAmount
address(0), // wallet
address(0) // prover
); );
} }
@ -51,7 +53,9 @@ contract IrisClassifier is CallbackConsumer {
address node, address node,
bytes calldata input, bytes calldata input,
bytes calldata output, bytes calldata output,
bytes calldata proof bytes calldata proof,
bytes32 containerId,
uint256 index
) internal override { ) internal override {
console2.log(EXTREMELY_COOL_BANNER); console2.log(EXTREMELY_COOL_BANNER);
(bytes memory raw_output, bytes memory processed_output) = abi.decode(output, (bytes, bytes)); (bytes memory raw_output, bytes memory processed_output) = abi.decode(output, (bytes, bytes));

View File

@ -179,7 +179,7 @@ We already have a public [anvil node](https://hub.docker.com/r/ritualnetwork/inf
corresponding infernet sdk contracts deployed, along with a node that has corresponding infernet sdk contracts deployed, along with a node that has
registered itself to listen to on-chain subscription events. registered itself to listen to on-chain subscription events.
* Coordinator Address: `0x5FbDB2315678afecb367f032d93F642f64180aa3` * Registry Address: `0x663F3ad617193148711d28f5334eE4Ed07016602`
* Node Address: `0x70997970C51812dc3A010C7d01b50e0d17dc79C8` (This is the second account in the anvil's accounts.) * Node Address: `0x70997970C51812dc3A010C7d01b50e0d17dc79C8` (This is the second account in the anvil's accounts.)
### Monitoring the EVM Logs ### Monitoring the EVM Logs
@ -210,7 +210,7 @@ eth_sendRawTransaction
eth_getTransactionReceipt eth_getTransactionReceipt
Transaction: 0x8e7e96d0a062285ee6fea864c43c29af65b962d260955e6284ab79dae145b32c Transaction: 0x8e7e96d0a062285ee6fea864c43c29af65b962d260955e6284ab79dae145b32c
Contract created: 0x663f3ad617193148711d28f5334ee4ed07016602 Contract created: 0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e
Gas used: 725947 Gas used: 725947
Block Number: 1 Block Number: 1
@ -224,7 +224,7 @@ eth_blockNumber
``` ```
beautiful, we can see that a new contract has been created beautiful, we can see that a new contract has been created
at `0x663f3ad617193148711d28f5334ee4ed07016602`. That's the address of at `0x13D69Cf7d6CE4218F646B759Dcf334D82c023d8e`. That's the address of
the `IrisClassifier` contract. We are now going to call this contract. To do so, the `IrisClassifier` contract. We are now going to call this contract. To do so,
we are using we are using
the [CallContract.s.sol](contracts/script/CallContract.s.sol) the [CallContract.s.sol](contracts/script/CallContract.s.sol)

20
pyproject.toml Normal file
View File

@ -0,0 +1,20 @@
[project]
name = "infernet-container-starter"
version = "1.0.0"
description = "A repository of infernet examples"
authors = [
{ name = "ritual", email = "hello@ritual.net" }
]
readme = "README.md"
requires-python = ">= 3.11"
[tool.mypy]
exclude = ['**/venv', '**/.venv']
[tool.ruff]
line-length = 89
[tool.isort]
profile = "black"
skip = [".venv", "venv", ".gitignore"]

View File

@ -1,8 +1,8 @@
gunicorn==21.2.0 gunicorn==22.0.0
mypy==1.8.0 mypy==1.8.0
mypy-extensions==1.0.0 mypy-extensions==1.0.0
packaging==23.2 packaging==23.2
requests==2.31.0 requests==2.32.0
ruff==0.1.13 ruff==0.1.13
types-click==7.1.8 types-click==7.1.8
types-Flask==1.1.6 types-Flask==1.1.6
@ -13,10 +13,12 @@ types-Werkzeug==1.0.9
typing_extensions==4.9.0 typing_extensions==4.9.0
Flask==3.0.0 Flask==3.0.0
quart==0.19.4 quart==0.19.4
infernet_ml==0.1.0 infernet-ml==1.0.0
PyArweave @ git+https://github.com/ritual-net/pyarweave.git infernet-ml[torch_inference]==1.0.0
infernet-ml[onnx_inference]==1.0.0
infernet-ml[css_inference]==1.0.0
torch==2.2.1 torch==2.2.1
web3==6.15.0 web3==6.15.0
onnx==1.15.0 onnx==1.16.0
onnxruntime==1.17.1 onnxruntime==1.17.1
pre-commit==2.15.0 pre-commit==2.15.0