From 819b5b1e9d82a6948eecc63b4f3308052e4402ae Mon Sep 17 00:00:00 2001 From: clementupshot <159793462+clementupshot@users.noreply.github.com> Date: Mon, 17 Jun 2024 10:37:20 +0200 Subject: [PATCH] Major upgrade --- app.py | 10 +++------- main.py | 22 +++++++++++++--------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/app.py b/app.py index 32424c5..a7a60c3 100644 --- a/app.py +++ b/app.py @@ -34,17 +34,13 @@ def generate_inference(token): """Generate inference for given token.""" if not token or token != "ETH": error_msg = "Token is required" if not token else "Token not supported" - return Response( - json.dumps({"error": error_msg}), status=400, mimetype="application/json" - ) + return Response(json.dumps({"error": error_msg}), status=400, mimetype='application/json') try: inference = get_eth_inference() - return jsonify({"value": str(inference)}) + return Response(str(inference), status=200) except Exception as e: - return Response( - json.dumps({"error": str(e)}), status=500, mimetype="application/json" - ) + return Response(json.dumps({"error": str(e)}), status=500, mimetype='application/json') @app.route("/update") diff --git a/main.py b/main.py index 4702eb9..85b6611 100644 --- a/main.py +++ b/main.py @@ -9,19 +9,23 @@ INFERENCE_ADDRESS = os.environ["INFERENCE_API_ADDRESS"] def process(token_name): response = requests.get(f"{INFERENCE_ADDRESS}/inference/{token_name}") content = response.text - print(content) + return content if __name__ == "__main__": # Your code logic with the parsed argument goes here try: - if len(sys.argv) >= 3: - # Not using to discriminate by topicId for simplicity. - # topic_id = sys.argv[1] - token_name = sys.argv[2] + if len(sys.argv) < 5: + value = json.dumps({"error": f"Not enough arguments provided: {len(sys.argv)}, expected 4 arguments: topic_id, blockHeight, blockHeightEval, default_arg"}) else: - token_name = "ETH" - process(token_name=token_name) + topic_id = sys.argv[1] + blockHeight = sys.argv[2] + blockHeightEval = sys.argv[3] + default_arg = sys.argv[4] + + response_inference = process(token_name=default_arg) + response_dict = {"infererValue": response_inference} + value = json.dumps(response_dict) except Exception as e: - response = json.dumps({"error": {str(e)}}) - print(response) + value = json.dumps({"error": {str(e)}}) + print(value)