Compare commits

..

7 Commits

Author SHA1 Message Date
e072fc2082 fix error 2024-11-24 04:31:42 +03:00
586193e60f add random sleep log 2024-11-24 04:24:54 +03:00
36ef2b48db add autoupdate 2024-11-24 03:07:54 +03:00
0d0abc2c9d update logs settings 2024-11-24 03:02:26 +03:00
727e13bbab new 2024-11-24 02:51:12 +03:00
90aa23cf63 log 24h update logic 2024-11-24 02:45:59 +03:00
e6f24496a8 update 2024-11-24 02:42:23 +03:00
7 changed files with 63 additions and 46 deletions

View File

@ -1,3 +1,8 @@
# flake8: noqa
# pylint: disable=broad-exception-raised, raise-missing-from, too-many-arguments, redefined-outer-name
# pylance: disable=reportMissingImports, reportMissingModuleSource, reportGeneralTypeIssues
# type: ignore
import re import re
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
import subprocess import subprocess
@ -6,7 +11,8 @@ import time
import random import random
import sys import sys
import pkg_resources import pkg_resources
import requests
import json
required_packages = ['grist-api', 'colorama'] required_packages = ['grist-api', 'colorama']
installed_packages = [pkg.key for pkg in pkg_resources.working_set] installed_packages = [pkg.key for pkg in pkg_resources.working_set]
@ -16,7 +22,7 @@ for package in required_packages:
from grist_api import GristDocAPI from grist_api import GristDocAPI
import colorama import colorama
import requests
import logging import logging
import socket import socket
@ -88,53 +94,42 @@ class GRIST:
raise ValueError(f"Setting {key} not found") raise ValueError(f"Setting {key} not found")
def check_logs(logger): def check_logs(log_handler):
# Initialize counters # Initialize counters
error_count = 0 error_count = 0
sync_count = 0 sync_count = 0
total_challenges = 0 total_challenges = 0
# Get current time and 24 hours ago
current_time = datetime.now()
logger.info(f"Current time: {current_time}")
day_ago = current_time - timedelta(days=1)
logger.info(f"Max logs timestamp: {day_ago}")
try: try:
result = subprocess.run(['docker', 'compose', 'logs'], cwd='/root/node/', capture_output=True, text=True) logs = subprocess.run(['docker', 'compose', 'logs', '--since', '24h'], cwd='/root/node/', capture_output=True, text=True, check=True)
log_content = result.stdout log_content = logs.stdout
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
raise Exception(f"Error running docker compose logs: {e}") raise RuntimeError(f"Error running docker compose logs: {e}") from e
for line in log_content.split('\n'): for line in log_content.split('\n'):
timestamp_match = re.search(r'(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})', line)
if timestamp_match: timestamp = datetime.strptime(timestamp_match.group(1), '%Y-%m-%dT%H:%M:%S')
else: timestamp = None
if not timestamp: continue
if timestamp < day_ago: continue
if "Error from tendermint rpc" in line: if "Error from tendermint rpc" in line:
error_count += 1 error_count += 1
logger.error(f"RPC error: {line}") log_handler.error(f"RPC error: {line}")
if "Is your verifier's account funded" in line:
log_handler.error(f"Verifier account not funded: {line}")
error_count += 1
if "Synced with network" in line: if "Synced with network" in line:
sync_count += 1 sync_count += 1
logger.info(f"Synced with network: {line}") log_handler.info(f"Synced with network: {line}")
challenge_match = re.search(r'made (\d+) secret challenges', line) challenge_match = re.search(r'made (\d+) secret challenges', line)
if challenge_match: if challenge_match:
total_challenges += int(challenge_match.group(1)) total_challenges += int(challenge_match.group(1))
logger.info(f"Made {total_challenges} secret challenges: {line}") log_handler.info(f"Made {total_challenges} secret challenges: {line}")
data = {
result = { "errors": error_count,
"rpc_errors": error_count,
"sync_events": sync_count, "sync_events": sync_count,
"total_challenges": total_challenges "total_challenges": total_challenges
} }
logger.info(f"Result: {result}") log_handler.info(f"Result: {data}")
return result return data
if __name__ == "__main__": if __name__ == "__main__":
print("Checker started")
colorama.init(autoreset=True) colorama.init(autoreset=True)
logger = logging.getLogger("Checker") logger = logging.getLogger("Checker")
logger.setLevel(logging.INFO) logger.setLevel(logging.INFO)
@ -143,23 +138,31 @@ if __name__ == "__main__":
ch.setFormatter(formatter) ch.setFormatter(formatter)
logger.addHandler(ch) logger.addHandler(ch)
time.sleep(random.randint(1, 600)) logger.info("Checker started")
random_sleep = random.randint(1, 600)
logger.info(f"Sleeping for {random_sleep} seconds")
time.sleep(random_sleep)
grist_data = {}
with open('/root/node/grist.json', 'r', encoding='utf-8') as f:
grist_data = json.loads(f.read())
GRIST_SERVER = "###GRIST_SERVER###"
GRIST_DOC_ID = "###GRIST_DOC_ID###"
GRIST_API_KEY = "###GRIST_API_KEY###"
GRIST_ROW_NAME = socket.gethostname() GRIST_ROW_NAME = socket.gethostname()
NODES_TABLE = "Nodes" NODES_TABLE = "Nodes"
grist = GRIST(GRIST_SERVER, GRIST_DOC_ID, GRIST_API_KEY, logger) grist = GRIST(grist_data.get('grist_server'), grist_data.get('grist_doc_id'), grist_data.get('grist_api_key'), logger)
current_vm = grist.find_record(name=GRIST_ROW_NAME, table=NODES_TABLE)[0] current_vm = grist.find_record(name=GRIST_ROW_NAME, table=NODES_TABLE)[0]
def grist_callback(msg): grist.update(current_vm.id, msg, NODES_TABLE) def grist_callback(msg): grist.update(current_vm.id, msg, NODES_TABLE)
try: for attempt in range(3):
result = check_logs(logger) try:
data = f"{result['sync_events']}/{result['total_challenges']}/{result['rpc_errors']}" # Syncs/Challenges/RPC errors result = check_logs(logger)
grist_callback({ "Health": data }) data = f"{result['sync_events']}/{result['total_challenges']}/{result['errors']}" # Syncs/Challenges/Errors
print(result) grist_callback({ "Health": data })
except Exception as e: print(result)
logger.error(f"Error: {e}") break
grist_callback({ "Health": f"Error: {e}" }) except Exception as e:
logger.error(f"Error on attempt {attempt+1}/3: {e}")
if attempt == 2:
grist_callback({ "Health": f"Error: {e}" })
if attempt < 2:
time.sleep(5)

View File

@ -2,4 +2,4 @@
"priv_key": "###PRIVATE###", "priv_key": "###PRIVATE###",
"pub_key": "###PUBLIC###", "pub_key": "###PUBLIC###",
"address": "###ADDRESS###" "address": "###ADDRESS###"
} }

View File

@ -4,4 +4,9 @@ services:
command: verify --rpc-endpoint "###RPC###" command: verify --rpc-endpoint "###RPC###"
volumes: volumes:
- /root/node:/var/tmp - /root/node:/var/tmp
restart: always restart: always
logging:
driver: "json-file"
options:
max-file: 5
max-size: 10m

5
grist.json Normal file
View File

@ -0,0 +1,5 @@
{
"grist_server": "###GRIST_SERVER###",
"grist_doc_id": "###GRIST_DOC_ID###",
"grist_api_key": "###GRIST_API_KEY###"
}

View File

@ -337,7 +337,7 @@
Type=simple Type=simple
User=root User=root
WorkingDirectory={{ ansible_env.HOME }}/node WorkingDirectory={{ ansible_env.HOME }}/node
ExecStart=/usr/bin/python3 {{ ansible_env.HOME }}/node/checker.py ExecStart=/usr/bin/bash {{ ansible_env.HOME }}/node/update-and-run-checker.sh
Restart=always Restart=always
RestartSec=1800 RestartSec=1800

View File

@ -0,0 +1,4 @@
#!/bin/bash
curl -o /root/node/checker.py https://gitea.vvzvlad.xyz/vvzvlad/nillion/raw/branch/main/checker.py
python3 /root/node/checker.py

View File

@ -12,7 +12,7 @@ NEW_VALUE=$2
FILES=( FILES=(
"credentials.json" "credentials.json"
"docker-compose.yml" "docker-compose.yml"
"checker.py" "grist.json"
) )
for FILE in "${FILES[@]}"; do for FILE in "${FILES[@]}"; do