1 |
rakinar2 |
577 |
/** |
2 |
|
|
* This file is part of SudoBot. |
3 |
|
|
* |
4 |
|
|
* Copyright (C) 2021-2023 OSN Developers. |
5 |
|
|
* |
6 |
|
|
* SudoBot is free software; you can redistribute it and/or modify it |
7 |
|
|
* under the terms of the GNU Affero General Public License as published by |
8 |
|
|
* the Free Software Foundation, either version 3 of the License, or |
9 |
|
|
* (at your option) any later version. |
10 |
|
|
* |
11 |
|
|
* SudoBot is distributed in the hope that it will be useful, but |
12 |
|
|
* WITHOUT ANY WARRANTY; without even the implied warranty of |
13 |
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
14 |
|
|
* GNU Affero General Public License for more details. |
15 |
|
|
* |
16 |
|
|
* You should have received a copy of the GNU Affero General Public License |
17 |
|
|
* along with SudoBot. If not, see <https://www.gnu.org/licenses/>. |
18 |
|
|
*/ |
19 |
|
|
|
20 |
|
|
import axios from "axios"; |
21 |
|
|
import { ChatInputCommandInteraction, EmbedBuilder, SlashCommandBuilder } from "discord.js"; |
22 |
|
|
import Command, { CommandReturn } from "../../core/Command"; |
23 |
|
|
import { logError } from "../../utils/Logger"; |
24 |
|
|
import Pagination from "../../utils/Pagination"; |
25 |
|
|
import { chunkedString } from "../../utils/utils"; |
26 |
|
|
|
27 |
|
|
type OpenAI = { |
28 |
|
|
chat: { |
29 |
|
|
completions: { |
30 |
|
|
create: (data: { |
31 |
|
|
messages: Array<{ |
32 |
|
|
role: "system" | "user"; |
33 |
|
|
content: string; |
34 |
|
|
}>; |
35 |
|
|
model: string; |
36 |
|
|
user: string; |
37 |
|
|
}) => Promise<{ |
38 |
|
|
id: string; |
39 |
|
|
object: string; |
40 |
|
|
model: string; |
41 |
|
|
choices: Array<{ |
42 |
|
|
message: { |
43 |
|
|
role: "system" | "user"; |
44 |
|
|
content: string; |
45 |
|
|
}; |
46 |
|
|
index: number; |
47 |
|
|
finish_reason: string; |
48 |
|
|
}>; |
49 |
|
|
created: number; |
50 |
|
|
}>; |
51 |
|
|
}; |
52 |
|
|
}; |
53 |
|
|
}; |
54 |
|
|
|
55 |
|
|
type GoogleGenerativeModel = { |
56 |
|
|
startChat: (data: { |
57 |
|
|
history: Array<{ |
58 |
|
|
role: "user" | "model" | "function"; |
59 |
|
|
parts: Array<{ text: string }>; |
60 |
|
|
}>; |
61 |
|
|
}) => { |
62 |
|
|
sendMessage: (prompt: string) => Promise<{ |
63 |
|
|
response: { |
64 |
|
|
text: () => string; |
65 |
|
|
promptFeedback?: { |
66 |
|
|
blockReason: string; |
67 |
|
|
blockReasonMessage?: string; |
68 |
|
|
}; |
69 |
|
|
}; |
70 |
|
|
}>; |
71 |
|
|
}; |
72 |
|
|
}; |
73 |
|
|
|
74 |
|
|
export default class AICommand extends Command { |
75 |
|
|
public readonly name = "ai"; |
76 |
|
|
public readonly permissions = []; |
77 |
|
|
public readonly aliases = ["ask"]; |
78 |
|
|
public readonly supportsLegacy = false; |
79 |
|
|
public readonly slashCommandBuilder = new SlashCommandBuilder().addStringOption(option => |
80 |
|
|
option |
81 |
|
|
.setName("prompt") |
82 |
|
|
.setDescription("Ask something") |
83 |
|
|
.setMaxLength(1000) |
84 |
|
|
.setRequired(true) |
85 |
|
|
); |
86 |
|
|
public readonly description = "Ask something to the AI."; |
87 |
|
|
public openai: OpenAI | null = null; |
88 |
|
|
public googleAi: GoogleGenerativeModel | null = null; |
89 |
|
|
|
90 |
|
|
async execute(interaction: ChatInputCommandInteraction): Promise<CommandReturn> { |
91 |
|
|
await interaction.deferReply(); |
92 |
|
|
|
93 |
|
|
const prompt = interaction.options.getString("prompt", true); |
94 |
|
|
let content = ""; |
95 |
|
|
|
96 |
|
|
try { |
97 |
|
|
if (process.env.GEMINI_API_KEY) { |
98 |
|
|
let geminiAvailable = false; |
99 |
|
|
|
100 |
|
|
try { |
101 |
|
|
require.resolve("@google/generative-ai"); |
102 |
|
|
geminiAvailable = true; |
103 |
|
|
} catch (error) { |
104 |
|
|
this.client.logger.error(error); |
105 |
|
|
} |
106 |
|
|
|
107 |
|
|
if (!geminiAvailable) { |
108 |
|
|
logError("@google/generative-ai package is not installed."); |
109 |
|
|
|
110 |
|
|
await this.error( |
111 |
|
|
interaction, |
112 |
|
|
"Google Generative AI package is not installed. Run `npm install @google/generative-ai` to install it." |
113 |
|
|
); |
114 |
|
|
return; |
115 |
|
|
} |
116 |
|
|
|
117 |
|
|
if (!this.googleAi) { |
118 |
|
|
const { |
119 |
|
|
GoogleGenerativeAI, |
120 |
|
|
HarmCategory, |
121 |
|
|
HarmBlockThreshold |
122 |
|
|
} = require("@google/generative-ai"); |
123 |
|
|
const generativeAI = new GoogleGenerativeAI(process.env.GEMINI_API_KEY); |
124 |
|
|
this.googleAi = generativeAI.getGenerativeModel({ |
125 |
|
|
model: process.env.GEMINI_API_MODEL_CODE ?? "gemini-pro", |
126 |
|
|
safetySettings: Object.keys(HarmCategory).map(k => ({ |
127 |
|
|
category: HarmCategory[k as keyof typeof HarmCategory], |
128 |
|
|
threshold: HarmBlockThreshold.BLOCK_LOW_AND_ABOVE |
129 |
|
|
})) |
130 |
|
|
}); |
131 |
|
|
} |
132 |
|
|
|
133 |
|
|
const chat = this.googleAi!.startChat({ |
134 |
|
|
history: [ |
135 |
|
|
{ |
136 |
|
|
role: "user", |
137 |
|
|
parts: [{ text: "Who are you?" }] |
138 |
|
|
}, |
139 |
|
|
{ |
140 |
|
|
role: "model", |
141 |
|
|
parts: [{ text: "I'm SudoBot, a Discord Moderation Bot." }] |
142 |
|
|
} |
143 |
|
|
] |
144 |
|
|
}); |
145 |
|
|
|
146 |
|
|
try { |
147 |
|
|
const result = await chat.sendMessage(prompt); |
148 |
|
|
const { response } = result; |
149 |
|
|
|
150 |
|
|
if (result.response.promptFeedback?.blockReason) { |
151 |
|
|
const reason = |
152 |
|
|
result.response.promptFeedback?.blockReasonMessage ?? |
153 |
|
|
`This request was cancelled ${ |
154 |
|
|
{ |
155 |
|
|
BLOCKED_REASON_UNSPECIFIED: "for an unspecified reason", |
156 |
|
|
SAFETY: "by the safety filter" |
157 |
|
|
}[result.response.promptFeedback?.blockReason] ?? |
158 |
|
|
"for unknown reasons" |
159 |
|
|
}`; |
160 |
|
|
|
161 |
|
|
await interaction.editReply({ |
162 |
|
|
content: ` ${reason}.` |
163 |
|
|
}); |
164 |
|
|
|
165 |
|
|
return; |
166 |
|
|
} |
167 |
|
|
|
168 |
|
|
content = response.text(); |
169 |
|
|
} catch (error) { |
170 |
|
|
if ( |
171 |
|
|
error && |
172 |
|
|
typeof error === "object" && |
173 |
|
|
"message" in error && |
174 |
|
|
typeof error.message === "string" && |
175 |
|
|
error.message.includes("overloaded") |
176 |
|
|
) { |
177 |
|
|
logError(error); |
178 |
|
|
|
179 |
|
|
await this.error( |
180 |
|
|
interaction, |
181 |
|
|
"The AI model is currently overloaded. Please try again later." |
182 |
|
|
); |
183 |
|
|
} |
184 |
|
|
|
185 |
|
|
throw error; |
186 |
|
|
} |
187 |
|
|
} else if (process.env.CF_AI_URL) { |
188 |
|
|
const { data } = await axios.post( |
189 |
|
|
process.env.CF_AI_URL!, |
190 |
|
|
{ |
191 |
|
|
messages: [ |
192 |
|
|
{ |
193 |
|
|
role: "system", |
194 |
|
|
content: |
195 |
|
|
"You are a Discord Moderation bot. Your name is SudoBot. You were built at OSN, by open source developers." |
196 |
|
|
}, |
197 |
|
|
{ role: "user", content: prompt } |
198 |
|
|
] |
199 |
|
|
}, |
200 |
|
|
{ |
201 |
|
|
headers: { |
202 |
|
|
"Content-Type": "application/json" |
203 |
|
|
} |
204 |
|
|
} |
205 |
|
|
); |
206 |
|
|
|
207 |
|
|
content = data.response; |
208 |
|
|
} else if (process.env.OPENAI_API_KEY) { |
209 |
|
|
let openAIAvailable = false; |
210 |
|
|
|
211 |
|
|
try { |
212 |
|
|
require.resolve("openai"); |
213 |
|
|
openAIAvailable = true; |
214 |
|
|
} catch (error) { |
215 |
|
|
this.client.logger.error(error); |
216 |
|
|
} |
217 |
|
|
|
218 |
|
|
if (!openAIAvailable) { |
219 |
|
|
logError("OpenAI package is not installed."); |
220 |
|
|
await this.error( |
221 |
|
|
interaction, |
222 |
|
|
"OpenAI package is not installed. Run `npm install openai` to install it." |
223 |
|
|
); |
224 |
|
|
return; |
225 |
|
|
} |
226 |
|
|
|
227 |
|
|
const apiKey = process.env.OPENAI_API_KEY; |
228 |
|
|
|
229 |
|
|
if (openAIAvailable && !this.openai) { |
230 |
|
|
this.openai = new (require("openai").OpenAI)({ |
231 |
|
|
apiKey |
232 |
|
|
}); |
233 |
|
|
} |
234 |
|
|
|
235 |
|
|
if (process.env.OPENAI_MODERATION !== "none") { |
236 |
|
|
try { |
237 |
|
|
const response = await axios.post( |
238 |
|
|
"https://api.openai.com/v1/moderations", |
239 |
|
|
{ |
240 |
|
|
input: prompt |
241 |
|
|
}, |
242 |
|
|
{ |
243 |
|
|
headers: { |
244 |
|
|
"Content-Type": "application/json", |
245 |
|
|
Authorization: `Bearer ${apiKey}` |
246 |
|
|
} |
247 |
|
|
} |
248 |
|
|
); |
249 |
|
|
|
250 |
|
|
if ( |
251 |
|
|
response.data?.results.find((r: Record<string, boolean>) => r.flagged) |
252 |
|
|
) { |
253 |
|
|
await this.error( |
254 |
|
|
interaction, |
255 |
|
|
"Sorry, your prompt was flagged by the OpenAI moderation system." |
256 |
|
|
); |
257 |
|
|
return; |
258 |
|
|
} |
259 |
|
|
} catch (error) { |
260 |
|
|
logError(error); |
261 |
|
|
await this.error( |
262 |
|
|
interaction, |
263 |
|
|
"An error occurred while trying to moderate the input." |
264 |
|
|
); |
265 |
|
|
return; |
266 |
|
|
} |
267 |
|
|
} |
268 |
|
|
|
269 |
|
|
const completion = await this.openai!.chat.completions.create({ |
270 |
|
|
messages: [ |
271 |
|
|
{ role: "system", content: "You're SudoBot, a Discord Moderation Bot." }, |
272 |
|
|
{ |
273 |
|
|
role: "user", |
274 |
|
|
content: prompt |
275 |
|
|
} |
276 |
|
|
], |
277 |
|
|
model: process.env.OPENAI_MODEL_ID ?? "gpt-3.5-turbo", |
278 |
|
|
user: interaction.user.id |
279 |
|
|
}); |
280 |
|
|
|
281 |
|
|
if (!completion.choices[0]?.message.content) { |
282 |
|
|
await interaction.editReply({ |
283 |
|
|
content: "No response was received from the AI model." |
284 |
|
|
}); |
285 |
|
|
|
286 |
|
|
return; |
287 |
|
|
} |
288 |
|
|
|
289 |
|
|
content = completion.choices[0].message.content; |
290 |
|
|
} else { |
291 |
|
|
await interaction.editReply({ |
292 |
|
|
content: "No suitable AI service provider was configured." |
293 |
|
|
}); |
294 |
|
|
|
295 |
|
|
return; |
296 |
|
|
} |
297 |
|
|
|
298 |
|
|
const chunks = chunkedString(content); |
299 |
|
|
|
300 |
|
|
if (chunks.length === 1) { |
301 |
|
|
await interaction.editReply({ |
302 |
|
|
embeds: [ |
303 |
|
|
new EmbedBuilder({ |
304 |
|
|
title: "Response", |
305 |
|
|
color: 0x007bff, |
306 |
|
|
description: chunks[0], |
307 |
|
|
footer: { |
308 |
|
|
text: "Responses will not always be complete or correct" |
309 |
|
|
}, |
310 |
|
|
timestamp: new Date().toISOString() |
311 |
|
|
}) |
312 |
|
|
] |
313 |
|
|
}); |
314 |
|
|
|
315 |
|
|
return; |
316 |
|
|
} |
317 |
|
|
|
318 |
|
|
const pagination = new Pagination(chunks, { |
319 |
|
|
limit: 1, |
320 |
|
|
channelId: interaction.channelId!, |
321 |
|
|
guildId: interaction.guildId!, |
322 |
|
|
client: this.client, |
323 |
|
|
embedBuilder({ currentPage, data: [chunk], maxPages }) { |
324 |
|
|
return new EmbedBuilder({ |
325 |
|
|
title: "Response", |
326 |
|
|
color: 0x007bff, |
327 |
|
|
description: chunk, |
328 |
|
|
footer: { |
329 |
|
|
text: `Page ${currentPage} of ${maxPages} • Responses will not always be complete or correct` |
330 |
|
|
}, |
331 |
|
|
timestamp: new Date().toISOString() |
332 |
|
|
}); |
333 |
|
|
}, |
334 |
|
|
timeout: 60_000 * 5 |
335 |
|
|
}); |
336 |
|
|
|
337 |
|
|
const message = await interaction.editReply(await pagination.getMessageOptions(1)); |
338 |
|
|
await pagination.start(message!); |
339 |
|
|
} catch (error) { |
340 |
|
|
logError(error); |
341 |
|
|
|
342 |
|
|
await interaction.editReply({ |
343 |
|
|
content: `${this.emoji( |
344 |
|
|
"error" |
345 |
|
|
)} An error has occurred while trying to communicate with the AI model.` |
346 |
|
|
}); |
347 |
|
|
|
348 |
|
|
return; |
349 |
|
|
} |
350 |
|
|
} |
351 |
|
|
} |