From 354b7a8be1498a0833f34881b22f9208fcc232d6 Mon Sep 17 00:00:00 2001 From: Atridad Lahiji <88056492+atridadl@users.noreply.github.com> Date: Sun, 25 Jun 2023 21:20:50 -0600 Subject: [PATCH] new command --- src/commands/ask.ts | 3 +- src/commands/prompthelper.ts | 65 ++++++++++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+), 2 deletions(-) create mode 100644 src/commands/prompthelper.ts diff --git a/src/commands/ask.ts b/src/commands/ask.ts index 3bdfa7c..edf4a69 100644 --- a/src/commands/ask.ts +++ b/src/commands/ask.ts @@ -48,8 +48,7 @@ export class UserCommand extends Command { role: 'user', content: prompt } - ], - max_tokens: 420 + ] }); const content = blockQuote(`> ${prompt}\n${codeBlock(`${chatCompletion.data.choices[0].message?.content}`)}`); diff --git a/src/commands/prompthelper.ts b/src/commands/prompthelper.ts new file mode 100644 index 0000000..7db79ed --- /dev/null +++ b/src/commands/prompthelper.ts @@ -0,0 +1,65 @@ +import { ApplyOptions } from '@sapphire/decorators'; +import { Args, Command } from '@sapphire/framework'; +import { Message, blockQuote, codeBlock } from 'discord.js'; +import { Configuration, OpenAIApi } from 'openai'; + +const configuration = new Configuration({ + apiKey: process.env.OPENAI_API_KEY +}); +const openai = new OpenAIApi(configuration); + +@ApplyOptions({ + description: 'AI will help you with AI!', + options: ['prompt'] +}) +export class UserCommand extends Command { + // Register Chat Input and Context Menu command + public override registerApplicationCommands(registry: Command.Registry) { + registry.registerChatInputCommand((builder) => + builder // + .setName(this.name) + .setDescription(this.description) + .addStringOption((option) => option.setName('prompt').setDescription('AI will help you with AI!').setRequired(true)) + ); + } + + // Message command + public async messageRun(message: Message, args: Args) { + return this.promptHelper(message, args.getOption('prompt') || message.content.split('!wryna ')[1]); + } + + // Chat Input (slash) command + public async chatInputRun(interaction: Command.ChatInputCommandInteraction) { + return this.promptHelper(interaction, interaction.options.getString('prompt') || 'NOTHING'); + } + + private async promptHelper( + interactionOrMessage: Message | Command.ChatInputCommandInteraction | Command.ContextMenuCommandInteraction, + prompt: string + ) { + const askMessage = + interactionOrMessage instanceof Message + ? await interactionOrMessage.channel.send({ content: '🤔 Thinking... 🤔' }) + : await interactionOrMessage.reply({ content: '🤔 Thinking... 🤔', fetchReply: true }); + + const chatCompletion = await openai.createChatCompletion({ + model: 'gpt-3.5-turbo', + messages: [ + { + role: 'user', + content: `Can you optimize the following prompt to be used for an image generation model?: ${prompt}` + } + ] + }); + + const content = blockQuote(`> ${prompt}\n${codeBlock(`${chatCompletion.data.choices[0].message?.content}`)}`); + + if (interactionOrMessage instanceof Message) { + return askMessage.edit({ content: content.length <= 2000 ? content : 'Sorry... AI no work good...' }); + } + + return interactionOrMessage.editReply({ + content: content.length <= 2000 ? content : 'Sorry... AI no work good...' + }); + } +}