Remove b7s and add support for offchain-node

This commit is contained in:
conache 2024-08-07 19:27:04 +03:00
parent 5d70e9feee
commit d2b1ff68f9
No known key found for this signature in database
6 changed files with 53 additions and 118 deletions

View File

@ -0,0 +1,37 @@
ALLORA_OFFCHAIN_NODE_CONFIG_JSON='{
"wallet": {
"addressKeyName": "test",
"addressRestoreMnemonic": "",
"addressAccountPassphrase": "",
"alloraHomeDir": "",
"gas": "1000000",
"gasAdjustment": 1.0,
"nodeRpc": "http://localhost:26657",
"maxRetries": 1,
"delay": 1,
"submitTx": false
},
"worker": [
{
"topicId": 1,
"inferenceEntrypointName": "api-worker-reputer",
"loopSeconds": 5,
"parameters": {
"InferenceEndpoint": "http://source:8000/inference/{Token}",
"Token": "ETH"
}
}
],
"reputer": [
{
"topicId": 1,
"reputerEntrypointName": "api-worker-reputer",
"loopSeconds": 30,
"minStake": 100000,
"parameters": {
"SourceOfTruthEndpoint": "http://source:8888/truth/{Token}/{BlockHeight}",
"Token": "ethereum"
}
}
]
}'

8
.gitignore vendored
View File

@ -1,12 +1,12 @@
.DS_Store
__pycache__
*.pyc
.lake_cache/*
logs/*
.env
keys
data
inference-data
worker-data
head-data
lib
offchain-node-data
.env.*
!.env.*.example

View File

@ -1,7 +0,0 @@
FROM alloranetwork/allora-inference-base:latest
USER root
RUN pip install requests
USER appuser
COPY main.py /app/

View File

@ -3,14 +3,10 @@ services:
container_name: inference-basic-eth-pred
build:
context: .
dockerfile: Dockerfile_inference
command: python -u /app/app.py
ports:
- "8000:8000"
networks:
eth-model-local:
aliases:
- inference
ipv4_address: 172.22.0.4
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/inference/ETH"]
interval: 10s
@ -21,7 +17,9 @@ services:
updater:
container_name: updater-basic-eth-pred
build: .
build:
context: .
dockerfile: Dockerfile_inference
environment:
- INFERENCE_API_ADDRESS=http://inference:8000
command: >
@ -34,78 +32,17 @@ services:
depends_on:
inference:
condition: service_healthy
networks:
eth-model-local:
aliases:
- updater
ipv4_address: 172.22.0.5
head:
container_name: head-basic-eth-pred
image: alloranetwork/allora-inference-base-head:latest
environment:
- HOME=/data
entrypoint:
- "/bin/bash"
- "-c"
- |
if [ ! -f /data/keys/priv.bin ]; then
echo "Generating new private keys..."
mkdir -p /data/keys
cd /data/keys
allora-keys
fi
allora-node --role=head --peer-db=/data/peerdb --function-db=/data/function-db \
--runtime-path=/app/runtime --runtime-cli=bls-runtime --workspace=/data/workspace \
--private-key=/data/keys/priv.bin --log-level=debug --port=9010 --rest-api=:6000
ports:
- "6000:6000"
volumes:
- ./head-data:/data
working_dir: /data
networks:
eth-model-local:
aliases:
- head
ipv4_address: 172.22.0.100
worker:
container_name: worker-basic-eth-pred
environment:
- INFERENCE_API_ADDRESS=http://inference:8000
- HOME=/data
build:
context: .
dockerfile: Dockerfile_b7s
entrypoint:
- "/bin/bash"
- "-c"
- |
if [ ! -f /data/keys/priv.bin ]; then
echo "Generating new private keys..."
mkdir -p /data/keys
cd /data/keys
allora-keys
fi
# Change boot-nodes below to the key advertised by your head
allora-node --role=worker --peer-db=/data/peerdb --function-db=/data/function-db \
--runtime-path=/app/runtime --runtime-cli=bls-runtime --workspace=/data/workspace \
--private-key=/data/keys/priv.bin --log-level=debug --port=9011 \
--boot-nodes=/ip4/172.22.0.100/tcp/9010/p2p/{HEAD-ID} \
--topic=allora-topic-1-worker
node:
container_name: offchain_node_test
image: allora-offchain-node:latest
volumes:
- ./worker-data:/data
working_dir: /data
- ./offchain-node-data:/data
depends_on:
- inference
- head
networks:
eth-model-local:
aliases:
- worker
ipv4_address: 172.22.0.10
inference:
condition: service_healthy
env_file:
- ./env.offchain-node
networks:
eth-model-local:
@ -117,4 +54,3 @@ networks:
volumes:
inference-data:
worker-data:
head-data:

31
main.py
View File

@ -1,31 +0,0 @@
import os
import requests
import sys
import json
INFERENCE_ADDRESS = os.environ["INFERENCE_API_ADDRESS"]
def process(token_name):
response = requests.get(f"{INFERENCE_ADDRESS}/inference/{token_name}")
content = response.text
return content
if __name__ == "__main__":
# Your code logic with the parsed argument goes here
try:
if len(sys.argv) < 5:
value = json.dumps({"error": f"Not enough arguments provided: {len(sys.argv)}, expected 4 arguments: topic_id, blockHeight, blockHeightEval, default_arg"})
else:
topic_id = sys.argv[1]
blockHeight = sys.argv[2]
blockHeightEval = sys.argv[3]
default_arg = sys.argv[4]
response_inference = process(token_name=default_arg)
response_dict = {"infererValue": response_inference}
value = json.dumps(response_dict)
except Exception as e:
value = json.dumps({"error": {str(e)}})
print(value)