From bd0d576c7e32eaa0e5c097405ee74e804f321159 Mon Sep 17 00:00:00 2001 From: Luke Robles Date: Fri, 19 Apr 2024 10:31:13 -0700 Subject: [PATCH] I'm dumb, dont make 3 rquests for the same json file --- app/bot.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/app/bot.py b/app/bot.py index 9e8c4f0a..192bdee3 100755 --- a/app/bot.py +++ b/app/bot.py @@ -157,8 +157,9 @@ async def on_message(ctx): llm_rule_endpoint = ( "http://dragon-bot-json.dragon-bot.svc.cluster.local/rules.json" ) + llm_rules = requests.get(llm_rule_endpoint).json() - if ctx.author.id in requests.get(llm_rule_endpoint).json()["disallowed_users"]: + if ctx.author.id in llm_rules["disallowed_users"]: responses = [ "You cant do that right now", "You cant use this feature right now", @@ -168,7 +169,7 @@ async def on_message(ctx): return url = "http://192.168.1.137:1337/v1/chat/completions" - instructions = requests.get(llm_rule_endpoint).json()["prompt"] + instructions = llm_rules["prompt"] payload = { "messages": [ @@ -183,7 +184,7 @@ async def on_message(ctx): "role": "user", }, ], - "model": requests.get(llm_rule_endpoint).json()["model"], + "model": llm_rules["model"], "stream": False, "max_tokens": 4096, "stop": ["goon"],