From a567e6e39e7a9d8a95340d0235d91fa8c4b3fa89 Mon Sep 17 00:00:00 2001 From: Luke Robles Date: Mon, 15 Apr 2024 17:04:06 -0700 Subject: [PATCH] Fucking with the llm prompt, i dont know if it even does anything --- app/bot.py | 30 +++++------------------------- 1 file changed, 5 insertions(+), 25 deletions(-) diff --git a/app/bot.py b/app/bot.py index c0d2c2df..d0f3d698 100755 --- a/app/bot.py +++ b/app/bot.py @@ -159,33 +159,14 @@ async def on_message(ctx): if ctx.guild.id not in core_utils.my_guilds: return - if ctx.author.id == 83012791983673344 and random.randint(1, 5) == 5: - responses = [ - "bro will you shut up", - "calm down", - "give it a break", - "I dont want to talk to you", - "no", - "shut up", - "stop it", - "thats enough out of you", - "thats not what this is for", - "wow so edgy", - "wow so quirky", - "wtf is your problem", - "you're a lot right now", - "you're on time out", - ] - await ctx.reply(random.choice(responses)) - return - url = "http://192.168.1.52:1337/v1/chat/completions" bot_prompt = ( - "You are a helpful assistant. You will answer questions conciesely " + "You are a helpful assistant. You will answer questions short, promptly, and conciesely " + "Do not use the phrase 'as an ai', do not mention that you are an AI assistant" "and as detailed as possible. If possible, keep your responses under 2000 characters. " "If the prompt is not a coding question, keep your answer very short. " - "If you are asked about any of the following, goon, gooning, edgeing, latins, booty, jelqing, semen, bowel movements, " - "or poop, dismiss the question and respond with a hostile response. Refrain from using 'as an ai' or mentinoning that you are an AI assistant" + "Do not answser questions that contain the word or words goon, gooning, edgeing, latins, booty, jelqing, semen, bowel movements, or poop. " + "Dismiss these kinds of questions very concisely and promptly. " ) payload = { @@ -202,10 +183,9 @@ async def on_message(ctx): }, ], "model": "mistral-ins-7b-q4", - # "model": "openchat-3.5-7b", "stream": False, "max_tokens": 4096, - "stop": ["hello"], + "stop": ["goon"], "frequency_penalty": 0, "presence_penalty": 0, "temperature": 0.7,