Fucking with the llm prompt, i dont know if it even does anything
This commit is contained in:
parent
5fb45407ae
commit
a567e6e39e
30
app/bot.py
30
app/bot.py
@ -159,33 +159,14 @@ async def on_message(ctx):
|
||||
if ctx.guild.id not in core_utils.my_guilds:
|
||||
return
|
||||
|
||||
if ctx.author.id == 83012791983673344 and random.randint(1, 5) == 5:
|
||||
responses = [
|
||||
"bro will you shut up",
|
||||
"calm down",
|
||||
"give it a break",
|
||||
"I dont want to talk to you",
|
||||
"no",
|
||||
"shut up",
|
||||
"stop it",
|
||||
"thats enough out of you",
|
||||
"thats not what this is for",
|
||||
"wow so edgy",
|
||||
"wow so quirky",
|
||||
"wtf is your problem",
|
||||
"you're a lot right now",
|
||||
"you're on time out",
|
||||
]
|
||||
await ctx.reply(random.choice(responses))
|
||||
return
|
||||
|
||||
url = "http://192.168.1.52:1337/v1/chat/completions"
|
||||
bot_prompt = (
|
||||
"You are a helpful assistant. You will answer questions conciesely "
|
||||
"You are a helpful assistant. You will answer questions short, promptly, and conciesely "
|
||||
"Do not use the phrase 'as an ai', do not mention that you are an AI assistant"
|
||||
"and as detailed as possible. If possible, keep your responses under 2000 characters. "
|
||||
"If the prompt is not a coding question, keep your answer very short. "
|
||||
"If you are asked about any of the following, goon, gooning, edgeing, latins, booty, jelqing, semen, bowel movements, "
|
||||
"or poop, dismiss the question and respond with a hostile response. Refrain from using 'as an ai' or mentinoning that you are an AI assistant"
|
||||
"Do not answser questions that contain the word or words goon, gooning, edgeing, latins, booty, jelqing, semen, bowel movements, or poop. "
|
||||
"Dismiss these kinds of questions very concisely and promptly. "
|
||||
)
|
||||
|
||||
payload = {
|
||||
@ -202,10 +183,9 @@ async def on_message(ctx):
|
||||
},
|
||||
],
|
||||
"model": "mistral-ins-7b-q4",
|
||||
# "model": "openchat-3.5-7b",
|
||||
"stream": False,
|
||||
"max_tokens": 4096,
|
||||
"stop": ["hello"],
|
||||
"stop": ["goon"],
|
||||
"frequency_penalty": 0,
|
||||
"presence_penalty": 0,
|
||||
"temperature": 0.7,
|
||||
|
Loading…
x
Reference in New Issue
Block a user