Merge pull request #6 from allora-network/fix/add-inference-serviece-healthcheck
Add inference service healthcheck to ensure the updater service starts
This commit is contained in:
commit
911555f358
1
.gitignore
vendored
1
.gitignore
vendored
@ -6,6 +6,7 @@ logs/*
|
|||||||
.env
|
.env
|
||||||
keys
|
keys
|
||||||
data
|
data
|
||||||
|
inference-data
|
||||||
worker-data
|
worker-data
|
||||||
head-data
|
head-data
|
||||||
lib
|
lib
|
@ -13,15 +13,29 @@ services:
|
|||||||
aliases:
|
aliases:
|
||||||
- inference
|
- inference
|
||||||
ipv4_address: 172.22.0.4
|
ipv4_address: 172.22.0.4
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000/inference/ETH"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 12
|
||||||
|
volumes:
|
||||||
|
- ./inference-data:/app/data
|
||||||
|
|
||||||
updater:
|
updater:
|
||||||
container_name: updater-basic-eth-pred
|
container_name: updater-basic-eth-pred
|
||||||
build: .
|
build: .
|
||||||
environment:
|
environment:
|
||||||
- INFERENCE_API_ADDRESS=http://inference:8000
|
- INFERENCE_API_ADDRESS=http://inference:8000
|
||||||
command: python -u /app/update_app.py
|
command: >
|
||||||
|
sh -c "
|
||||||
|
while true; do
|
||||||
|
python -u /app/update_app.py;
|
||||||
|
sleep 24h;
|
||||||
|
done
|
||||||
|
"
|
||||||
depends_on:
|
depends_on:
|
||||||
- inference
|
inference:
|
||||||
|
condition: service_healthy
|
||||||
networks:
|
networks:
|
||||||
eth-model-local:
|
eth-model-local:
|
||||||
aliases:
|
aliases:
|
||||||
@ -102,5 +116,6 @@ networks:
|
|||||||
- subnet: 172.22.0.0/24
|
- subnet: 172.22.0.0/24
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
inference-data:
|
||||||
worker-data:
|
worker-data:
|
||||||
head-data:
|
head-data:
|
||||||
|
@ -4,6 +4,8 @@ import requests
|
|||||||
inference_address = os.environ["INFERENCE_API_ADDRESS"]
|
inference_address = os.environ["INFERENCE_API_ADDRESS"]
|
||||||
url = f"{inference_address}/update"
|
url = f"{inference_address}/update"
|
||||||
|
|
||||||
|
print("UPDATING INFERENCE WORKER DATA")
|
||||||
|
|
||||||
response = requests.get(url)
|
response = requests.get(url)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
# Request was successful
|
# Request was successful
|
||||||
|
12
updater.py
12
updater.py
@ -5,18 +5,22 @@ from concurrent.futures import ThreadPoolExecutor
|
|||||||
|
|
||||||
# Function to download the URL, called asynchronously by several child processes
|
# Function to download the URL, called asynchronously by several child processes
|
||||||
def download_url(url, download_path):
|
def download_url(url, download_path):
|
||||||
|
target_file_path = os.path.join(download_path, os.path.basename(url))
|
||||||
|
if os.path.exists(target_file_path):
|
||||||
|
print(f"File already exists: {url}")
|
||||||
|
return
|
||||||
|
|
||||||
response = requests.get(url)
|
response = requests.get(url)
|
||||||
if response.status_code == 404:
|
if response.status_code == 404:
|
||||||
print(f"File not exist: {url}")
|
print(f"File not exist: {url}")
|
||||||
else:
|
else:
|
||||||
file_name = os.path.join(download_path, os.path.basename(url))
|
|
||||||
|
|
||||||
# create the entire path if it doesn't exist
|
# create the entire path if it doesn't exist
|
||||||
os.makedirs(os.path.dirname(file_name), exist_ok=True)
|
os.makedirs(os.path.dirname(target_file_path), exist_ok=True)
|
||||||
|
|
||||||
with open(file_name, "wb") as f:
|
with open(target_file_path, "wb") as f:
|
||||||
f.write(response.content)
|
f.write(response.content)
|
||||||
print(f"Downloaded: {url} to {file_name}")
|
print(f"Downloaded: {url} to {target_file_path}")
|
||||||
|
|
||||||
|
|
||||||
def download_binance_monthly_data(
|
def download_binance_monthly_data(
|
||||||
|
Loading…
Reference in New Issue
Block a user