services: inference: container_name: inference-basic-eth-pred build: context: . dockerfile: Dockerfile_inference command: python -u /app/app.py ports: - "8000:8000" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/inference/ETH"] interval: 10s timeout: 5s retries: 12 volumes: - ./inference-data:/app/data updater: container_name: updater-basic-eth-pred build: context: . dockerfile: Dockerfile_inference environment: - INFERENCE_API_ADDRESS=http://inference:8000 command: > sh -c " while true; do python -u /app/update_app.py; sleep 24h; done " depends_on: inference: condition: service_healthy node: container_name: offchain_node_test image: allora-offchain-node:latest volumes: - ./offchain-node-data:/data depends_on: inference: condition: service_healthy env_file: - ./env.offchain-node networks: eth-model-local: driver: bridge ipam: config: - subnet: 172.22.0.0/24 volumes: inference-data: worker-data: