Compare commits
2 Commits
b631442f3e
...
714bf4c863
Author | SHA1 | Date | |
---|---|---|---|
|
714bf4c863 | ||
|
e65e0d95ed |
17
Dockerfile
Normal file
17
Dockerfile
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# Use an official Python runtime as the base image
|
||||||
|
FROM amd64/python:3.9-buster as project_env
|
||||||
|
|
||||||
|
# Set the working directory in the container
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
COPY requirements.txt requirements.txt
|
||||||
|
RUN pip install --upgrade pip setuptools \
|
||||||
|
&& pip install -r requirements.txt
|
||||||
|
|
||||||
|
FROM project_env
|
||||||
|
|
||||||
|
COPY . /app/
|
||||||
|
|
||||||
|
# Set the entrypoint command
|
||||||
|
CMD ["gunicorn", "--conf", "/app/gunicorn_conf.py", "main:app"]
|
5
config.py
Normal file
5
config.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
app_base_path = os.getenv("APP_BASE_PATH", default=os.getcwd())
|
||||||
|
data_base_path = os.path.join(app_base_path, "data")
|
||||||
|
model_file_path = os.path.join(data_base_path, "model.pkl")
|
12
gunicorn_conf.py
Normal file
12
gunicorn_conf.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
# Gunicorn config variables
|
||||||
|
loglevel = "info"
|
||||||
|
errorlog = "-" # stderr
|
||||||
|
accesslog = "-" # stdout
|
||||||
|
worker_tmp_dir = "/dev/shm"
|
||||||
|
graceful_timeout = 120
|
||||||
|
timeout = 30
|
||||||
|
keepalive = 5
|
||||||
|
worker_class = "gthread"
|
||||||
|
workers = 1
|
||||||
|
threads = 8
|
||||||
|
bind = "0.0.0.0:9000"
|
43
init.config
Executable file
43
init.config
Executable file
@ -0,0 +1,43 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ ! -f config.json ]; then
|
||||||
|
echo "Error: config.json file not found, please provide one"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
nodeName=$(jq -r '.wallet.addressKeyName' config.json)
|
||||||
|
if [ -z "$nodeName" ]; then
|
||||||
|
echo "No wallet name provided for the node, please provide your preferred wallet name. config.json >> wallet.addressKeyName"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Ensure the worker-data directory exists
|
||||||
|
mkdir -p ./worker-data
|
||||||
|
|
||||||
|
json_content=$(cat ./config.json)
|
||||||
|
stringified_json=$(echo "$json_content" | jq -c .)
|
||||||
|
|
||||||
|
mnemonic=$(jq -r '.wallet.addressRestoreMnemonic' config.json)
|
||||||
|
if [ -n "$mnemonic" ]; then
|
||||||
|
echo "ALLORA_OFFCHAIN_NODE_CONFIG_JSON='$stringified_json'" > ./worker-data/env_file
|
||||||
|
echo "NAME=$nodeName" >> ./worker-data/env_file
|
||||||
|
echo "ENV_LOADED=true" >> ./worker-data/env_file
|
||||||
|
echo "wallet mnemonic already provided by you, loading config.json . Please proceed to run docker compose"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -f ./worker-data/env_file ]; then
|
||||||
|
echo "ENV_LOADED=false" > ./worker-data/env_file
|
||||||
|
fi
|
||||||
|
|
||||||
|
ENV_LOADED=$(grep '^ENV_LOADED=' ./worker-data/env_file | cut -d '=' -f 2)
|
||||||
|
if [ "$ENV_LOADED" = "false" ]; then
|
||||||
|
json_content=$(cat ./config.json)
|
||||||
|
stringified_json=$(echo "$json_content" | jq -c .)
|
||||||
|
docker run -it --entrypoint=bash -v $(pwd)/worker-data:/data -v $(pwd)/scripts:/scripts -e NAME="${nodeName}" -e ALLORA_OFFCHAIN_NODE_CONFIG_JSON="${stringified_json}" alloranetwork/allora-chain:latest -c "bash /scripts/init.sh"
|
||||||
|
echo "config.json saved to ./worker-data/env_file"
|
||||||
|
else
|
||||||
|
echo "config.json is already loaded, skipping the operation. You can set ENV_LOADED variable to false in ./worker-data/env_file to reload the config.json"
|
||||||
|
fi
|
7
requirements.txt
Normal file
7
requirements.txt
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
flask[async]
|
||||||
|
gunicorn[gthread]
|
||||||
|
numpy==1.26.2
|
||||||
|
pandas==2.1.3
|
||||||
|
Requests==2.32.0
|
||||||
|
scikit_learn==1.3.2
|
||||||
|
werkzeug>=3.0.3 # not directly required, pinned by Snyk to avoid a vulnerability
|
33
scripts/init.sh
Normal file
33
scripts/init.sh
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if allorad keys --home=/data/.allorad --keyring-backend test show $NAME > /dev/null 2>&1 ; then
|
||||||
|
echo "allora account: $NAME already imported"
|
||||||
|
else
|
||||||
|
echo "creating allora account: $NAME"
|
||||||
|
output=$(allorad keys add $NAME --home=/data/.allorad --keyring-backend test 2>&1)
|
||||||
|
address=$(echo "$output" | grep 'address:' | sed 's/.*address: //')
|
||||||
|
mnemonic=$(echo "$output" | tail -n 1)
|
||||||
|
|
||||||
|
# Parse and update the JSON string
|
||||||
|
updated_json=$(echo "$ALLORA_OFFCHAIN_NODE_CONFIG_JSON" | jq --arg name "$NAME" --arg mnemonic "$mnemonic" '
|
||||||
|
.wallet.addressKeyName = $name |
|
||||||
|
.wallet.addressRestoreMnemonic = $mnemonic
|
||||||
|
')
|
||||||
|
|
||||||
|
stringified_json=$(echo "$updated_json" | jq -c .)
|
||||||
|
|
||||||
|
echo "ALLORA_OFFCHAIN_NODE_CONFIG_JSON='$stringified_json'" > /data/env_file
|
||||||
|
echo ALLORA_OFFCHAIN_ACCOUNT_ADDRESS=$address >> /data/env_file
|
||||||
|
echo "NAME=$NAME" >> /data/env_file
|
||||||
|
|
||||||
|
echo "Updated ALLORA_OFFCHAIN_NODE_CONFIG_JSON saved to /data/env_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
if grep -q "ENV_LOADED=false" /data/env_file; then
|
||||||
|
sed -i 's/ENV_LOADED=false/ENV_LOADED=true/' /data/env_file
|
||||||
|
else
|
||||||
|
echo "ENV_LOADED=true" >> /data/env_file
|
||||||
|
fi
|
2
update.sh
Normal file → Executable file
2
update.sh
Normal file → Executable file
@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
if [ "$#" -ne 3 ]; then
|
if [ "$#" -ne 3 ]; then
|
||||||
echo "Usage: $0 <mnemonic> <wallet> <rpc_url>"
|
echo "Usage: $0 <mnemonic> <wallet> <rpc_url>"
|
||||||
|
22
update_app.py
Normal file
22
update_app.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import os
|
||||||
|
import requests
|
||||||
|
|
||||||
|
inference_address = os.environ["INFERENCE_API_ADDRESS"]
|
||||||
|
url = f"{inference_address}/update"
|
||||||
|
|
||||||
|
print("UPDATING INFERENCE WORKER DATA")
|
||||||
|
|
||||||
|
response = requests.get(url)
|
||||||
|
if response.status_code == 200:
|
||||||
|
# Request was successful
|
||||||
|
content = response.text
|
||||||
|
|
||||||
|
if content == "0":
|
||||||
|
print("Response content is '0'")
|
||||||
|
exit(0)
|
||||||
|
else:
|
||||||
|
exit(1)
|
||||||
|
else:
|
||||||
|
# Request failed
|
||||||
|
print(f"Request failed with status code: {response.status_code}")
|
||||||
|
exit(1)
|
59
updater.py
Normal file
59
updater.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import os
|
||||||
|
import requests
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
|
|
||||||
|
# Function to download the URL, called asynchronously by several child processes
|
||||||
|
def download_url(url, download_path):
|
||||||
|
target_file_path = os.path.join(download_path, os.path.basename(url))
|
||||||
|
if os.path.exists(target_file_path):
|
||||||
|
# print(f"File already exists: {url}")
|
||||||
|
return
|
||||||
|
|
||||||
|
response = requests.get(url)
|
||||||
|
if response.status_code == 404:
|
||||||
|
# print(f"File not exist: {url}")
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
|
||||||
|
# create the entire path if it doesn't exist
|
||||||
|
os.makedirs(os.path.dirname(target_file_path), exist_ok=True)
|
||||||
|
|
||||||
|
with open(target_file_path, "wb") as f:
|
||||||
|
f.write(response.content)
|
||||||
|
# print(f"Downloaded: {url} to {target_file_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def download_binance_monthly_data(
|
||||||
|
cm_or_um, symbols, intervals, years, months, download_path
|
||||||
|
):
|
||||||
|
# Verify if CM_OR_UM is correct, if not, exit
|
||||||
|
if cm_or_um not in ["cm", "um"]:
|
||||||
|
print("CM_OR_UM can be only cm or um")
|
||||||
|
return
|
||||||
|
base_url = f"https://data.binance.vision/data/futures/{cm_or_um}/monthly/klines"
|
||||||
|
|
||||||
|
# Main loop to iterate over all the arrays and launch child processes
|
||||||
|
with ThreadPoolExecutor() as executor:
|
||||||
|
for symbol in symbols:
|
||||||
|
for interval in intervals:
|
||||||
|
for year in years:
|
||||||
|
for month in months:
|
||||||
|
url = f"{base_url}/{symbol}/{interval}/{symbol}-{interval}-{year}-{month}.zip"
|
||||||
|
executor.submit(download_url, url, download_path)
|
||||||
|
|
||||||
|
|
||||||
|
def download_binance_daily_data(
|
||||||
|
cm_or_um, symbols, intervals, year, month, download_path
|
||||||
|
):
|
||||||
|
if cm_or_um not in ["cm", "um"]:
|
||||||
|
print("CM_OR_UM can be only cm or um")
|
||||||
|
return
|
||||||
|
base_url = f"https://data.binance.vision/data/futures/{cm_or_um}/daily/klines"
|
||||||
|
|
||||||
|
with ThreadPoolExecutor() as executor:
|
||||||
|
for symbol in symbols:
|
||||||
|
for interval in intervals:
|
||||||
|
for day in range(1, 32): # Assuming days range from 1 to 31
|
||||||
|
url = f"{base_url}/{symbol}/{interval}/{symbol}-{interval}-{year}-{month:02d}-{day:02d}.zip"
|
||||||
|
executor.submit(download_url, url, download_path)
|
Loading…
Reference in New Issue
Block a user