Remove b7s and add support for offchain-node
This commit is contained in:
parent
5d70e9feee
commit
d2b1ff68f9
37
.env.offchain-node.example
Normal file
37
.env.offchain-node.example
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
ALLORA_OFFCHAIN_NODE_CONFIG_JSON='{
|
||||||
|
"wallet": {
|
||||||
|
"addressKeyName": "test",
|
||||||
|
"addressRestoreMnemonic": "",
|
||||||
|
"addressAccountPassphrase": "",
|
||||||
|
"alloraHomeDir": "",
|
||||||
|
"gas": "1000000",
|
||||||
|
"gasAdjustment": 1.0,
|
||||||
|
"nodeRpc": "http://localhost:26657",
|
||||||
|
"maxRetries": 1,
|
||||||
|
"delay": 1,
|
||||||
|
"submitTx": false
|
||||||
|
},
|
||||||
|
"worker": [
|
||||||
|
{
|
||||||
|
"topicId": 1,
|
||||||
|
"inferenceEntrypointName": "api-worker-reputer",
|
||||||
|
"loopSeconds": 5,
|
||||||
|
"parameters": {
|
||||||
|
"InferenceEndpoint": "http://source:8000/inference/{Token}",
|
||||||
|
"Token": "ETH"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"reputer": [
|
||||||
|
{
|
||||||
|
"topicId": 1,
|
||||||
|
"reputerEntrypointName": "api-worker-reputer",
|
||||||
|
"loopSeconds": 30,
|
||||||
|
"minStake": 100000,
|
||||||
|
"parameters": {
|
||||||
|
"SourceOfTruthEndpoint": "http://source:8888/truth/{Token}/{BlockHeight}",
|
||||||
|
"Token": "ethereum"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}'
|
8
.gitignore
vendored
8
.gitignore
vendored
@ -1,12 +1,12 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
__pycache__
|
__pycache__
|
||||||
*.pyc
|
*.pyc
|
||||||
.lake_cache/*
|
|
||||||
logs/*
|
logs/*
|
||||||
.env
|
.env
|
||||||
keys
|
|
||||||
data
|
|
||||||
inference-data
|
inference-data
|
||||||
worker-data
|
worker-data
|
||||||
head-data
|
head-data
|
||||||
lib
|
offchain-node-data
|
||||||
|
|
||||||
|
.env.*
|
||||||
|
!.env.*.example
|
||||||
|
@ -1,7 +0,0 @@
|
|||||||
FROM alloranetwork/allora-inference-base:latest
|
|
||||||
|
|
||||||
USER root
|
|
||||||
RUN pip install requests
|
|
||||||
|
|
||||||
USER appuser
|
|
||||||
COPY main.py /app/
|
|
@ -3,14 +3,10 @@ services:
|
|||||||
container_name: inference-basic-eth-pred
|
container_name: inference-basic-eth-pred
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
|
dockerfile: Dockerfile_inference
|
||||||
command: python -u /app/app.py
|
command: python -u /app/app.py
|
||||||
ports:
|
ports:
|
||||||
- "8000:8000"
|
- "8000:8000"
|
||||||
networks:
|
|
||||||
eth-model-local:
|
|
||||||
aliases:
|
|
||||||
- inference
|
|
||||||
ipv4_address: 172.22.0.4
|
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000/inference/ETH"]
|
test: ["CMD", "curl", "-f", "http://localhost:8000/inference/ETH"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
@ -21,7 +17,9 @@ services:
|
|||||||
|
|
||||||
updater:
|
updater:
|
||||||
container_name: updater-basic-eth-pred
|
container_name: updater-basic-eth-pred
|
||||||
build: .
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile_inference
|
||||||
environment:
|
environment:
|
||||||
- INFERENCE_API_ADDRESS=http://inference:8000
|
- INFERENCE_API_ADDRESS=http://inference:8000
|
||||||
command: >
|
command: >
|
||||||
@ -34,78 +32,17 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
inference:
|
inference:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
networks:
|
|
||||||
eth-model-local:
|
|
||||||
aliases:
|
|
||||||
- updater
|
|
||||||
ipv4_address: 172.22.0.5
|
|
||||||
|
|
||||||
head:
|
|
||||||
container_name: head-basic-eth-pred
|
|
||||||
image: alloranetwork/allora-inference-base-head:latest
|
|
||||||
environment:
|
|
||||||
- HOME=/data
|
|
||||||
entrypoint:
|
|
||||||
- "/bin/bash"
|
|
||||||
- "-c"
|
|
||||||
- |
|
|
||||||
if [ ! -f /data/keys/priv.bin ]; then
|
|
||||||
echo "Generating new private keys..."
|
|
||||||
mkdir -p /data/keys
|
|
||||||
cd /data/keys
|
|
||||||
allora-keys
|
|
||||||
fi
|
|
||||||
allora-node --role=head --peer-db=/data/peerdb --function-db=/data/function-db \
|
|
||||||
--runtime-path=/app/runtime --runtime-cli=bls-runtime --workspace=/data/workspace \
|
|
||||||
--private-key=/data/keys/priv.bin --log-level=debug --port=9010 --rest-api=:6000
|
|
||||||
ports:
|
|
||||||
- "6000:6000"
|
|
||||||
volumes:
|
|
||||||
- ./head-data:/data
|
|
||||||
working_dir: /data
|
|
||||||
networks:
|
|
||||||
eth-model-local:
|
|
||||||
aliases:
|
|
||||||
- head
|
|
||||||
ipv4_address: 172.22.0.100
|
|
||||||
|
|
||||||
worker:
|
node:
|
||||||
container_name: worker-basic-eth-pred
|
container_name: offchain_node_test
|
||||||
environment:
|
image: allora-offchain-node:latest
|
||||||
- INFERENCE_API_ADDRESS=http://inference:8000
|
|
||||||
- HOME=/data
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile_b7s
|
|
||||||
entrypoint:
|
|
||||||
- "/bin/bash"
|
|
||||||
- "-c"
|
|
||||||
- |
|
|
||||||
if [ ! -f /data/keys/priv.bin ]; then
|
|
||||||
echo "Generating new private keys..."
|
|
||||||
mkdir -p /data/keys
|
|
||||||
cd /data/keys
|
|
||||||
allora-keys
|
|
||||||
fi
|
|
||||||
# Change boot-nodes below to the key advertised by your head
|
|
||||||
allora-node --role=worker --peer-db=/data/peerdb --function-db=/data/function-db \
|
|
||||||
--runtime-path=/app/runtime --runtime-cli=bls-runtime --workspace=/data/workspace \
|
|
||||||
--private-key=/data/keys/priv.bin --log-level=debug --port=9011 \
|
|
||||||
--boot-nodes=/ip4/172.22.0.100/tcp/9010/p2p/{HEAD-ID} \
|
|
||||||
--topic=allora-topic-1-worker
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./worker-data:/data
|
- ./offchain-node-data:/data
|
||||||
working_dir: /data
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- inference
|
inference:
|
||||||
- head
|
condition: service_healthy
|
||||||
networks:
|
env_file:
|
||||||
eth-model-local:
|
- ./env.offchain-node
|
||||||
aliases:
|
|
||||||
- worker
|
|
||||||
ipv4_address: 172.22.0.10
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
eth-model-local:
|
eth-model-local:
|
||||||
@ -117,4 +54,3 @@ networks:
|
|||||||
volumes:
|
volumes:
|
||||||
inference-data:
|
inference-data:
|
||||||
worker-data:
|
worker-data:
|
||||||
head-data:
|
|
||||||
|
31
main.py
31
main.py
@ -1,31 +0,0 @@
|
|||||||
import os
|
|
||||||
import requests
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
|
|
||||||
INFERENCE_ADDRESS = os.environ["INFERENCE_API_ADDRESS"]
|
|
||||||
|
|
||||||
|
|
||||||
def process(token_name):
|
|
||||||
response = requests.get(f"{INFERENCE_ADDRESS}/inference/{token_name}")
|
|
||||||
content = response.text
|
|
||||||
return content
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
# Your code logic with the parsed argument goes here
|
|
||||||
try:
|
|
||||||
if len(sys.argv) < 5:
|
|
||||||
value = json.dumps({"error": f"Not enough arguments provided: {len(sys.argv)}, expected 4 arguments: topic_id, blockHeight, blockHeightEval, default_arg"})
|
|
||||||
else:
|
|
||||||
topic_id = sys.argv[1]
|
|
||||||
blockHeight = sys.argv[2]
|
|
||||||
blockHeightEval = sys.argv[3]
|
|
||||||
default_arg = sys.argv[4]
|
|
||||||
|
|
||||||
response_inference = process(token_name=default_arg)
|
|
||||||
response_dict = {"infererValue": response_inference}
|
|
||||||
value = json.dumps(response_dict)
|
|
||||||
except Exception as e:
|
|
||||||
value = json.dumps({"error": {str(e)}})
|
|
||||||
print(value)
|
|
Loading…
Reference in New Issue
Block a user