diff --git a/src/EllieBot/Modules/Games/ChatterBot/CleverBotCommands.cs b/src/EllieBot/Modules/Games/ChatterBot/ChatterBotCommands.cs similarity index 85% rename from src/EllieBot/Modules/Games/ChatterBot/CleverBotCommands.cs rename to src/EllieBot/Modules/Games/ChatterBot/ChatterBotCommands.cs index 1a41953..371c958 100644 --- a/src/EllieBot/Modules/Games/ChatterBot/CleverBotCommands.cs +++ b/src/EllieBot/Modules/Games/ChatterBot/ChatterBotCommands.cs @@ -18,7 +18,8 @@ public partial class Games [Cmd] [RequireContext(ContextType.Guild)] [UserPerm(GuildPerm.ManageMessages)] - public async Task Cleverbot() + [NoPublicBot] + public async Task CleverBot() { var channel = (ITextChannel)ctx.Channel; @@ -30,7 +31,7 @@ public partial class Games await uow.SaveChangesAsync(); } - await Response().Confirm(strs.cleverbot_disabled).SendAsync(); + await Response().Confirm(strs.chatbot_disabled).SendAsync(); return; } @@ -42,7 +43,7 @@ public partial class Games await uow.SaveChangesAsync(); } - await Response().Confirm(strs.cleverbot_enabled).SendAsync(); + await Response().Confirm(strs.chatbot_enabled).SendAsync(); } } } \ No newline at end of file diff --git a/src/EllieBot/Modules/Games/ChatterBot/ChatterbotService.cs b/src/EllieBot/Modules/Games/ChatterBot/ChatterbotService.cs index 30dc17e..532dd5a 100644 --- a/src/EllieBot/Modules/Games/ChatterBot/ChatterbotService.cs +++ b/src/EllieBot/Modules/Games/ChatterBot/ChatterbotService.cs @@ -15,43 +15,32 @@ public class ChatterBotService : IExecOnMessage public int Priority => 1; - private readonly FeatureLimitKey _flKey; - private readonly DiscordSocketClient _client; private readonly IPermissionChecker _perms; - private readonly CommandHandler _cmd; private readonly IBotCredentials _creds; private readonly IHttpClientFactory _httpFactory; - private readonly IPatronageService _ps; private readonly GamesConfigService _gcs; private readonly IMessageSenderService _sender; + public readonly IPatronageService _ps; public ChatterBotService( DiscordSocketClient client, IPermissionChecker perms, IBot bot, - CommandHandler cmd, + IPatronageService ps, IHttpClientFactory factory, IBotCredentials creds, - IPatronageService ps, GamesConfigService gcs, IMessageSenderService sender) { _client = client; _perms = perms; - _cmd = cmd; _creds = creds; _sender = sender; _httpFactory = factory; - _ps = ps; _perms = perms; _gcs = gcs; - - _flKey = new FeatureLimitKey() - { - Key = CleverBotResponseStr.CLEVERBOT_RESPONSE, - PrettyName = "Cleverbot Replies" - }; + _ps = ps; ChatterBotGuilds = new(bot.AllGuildConfigs .Where(gc => gc.CleverbotEnabled) @@ -69,9 +58,9 @@ public class ChatterBotService : IExecOnMessage Log.Information("Cleverbot will not work as the api key is missing"); return null; - case ChatBotImplementation.Gpt3: + case ChatBotImplementation.Gpt: if (!string.IsNullOrWhiteSpace(_creds.Gpt3ApiKey)) - return new OfficialGpt3Session(_creds.Gpt3ApiKey, + return new OfficialGptSession(_creds.Gpt3ApiKey, _gcs.Data.ChatGpt.ModelName, _gcs.Data.ChatGpt.ChatHistory, _gcs.Data.ChatGpt.MaxTokens, @@ -87,22 +76,21 @@ public class ChatterBotService : IExecOnMessage } } - public string PrepareMessage(IUserMessage msg, out IChatterBotSession cleverbot) + public IChatterBotSession GetOrCreateSession(ulong guildId) { - var channel = msg.Channel as ITextChannel; - cleverbot = null; + if (ChatterBotGuilds.TryGetValue(guildId, out var lazyChatBot)) + return lazyChatBot.Value; - if (channel is null) - return null; + lazyChatBot = new(() => CreateSession(), true); + ChatterBotGuilds.TryAdd(guildId, lazyChatBot); + return lazyChatBot.Value; + } - if (!ChatterBotGuilds.TryGetValue(channel.Guild.Id, out var lazyCleverbot)) - return null; - - cleverbot = lazyCleverbot.Value; - - var ellieId = _client.CurrentUser.Id; - var normalMention = $"<@{ellieId}> "; - var nickMention = $"<@!{ellieId}> "; + public string PrepareMessage(IUserMessage msg) + { + var nadekoId = _client.CurrentUser.Id; + var normalMention = $"<@{nadekoId}> "; + var nickMention = $"<@!{nadekoId}> "; string message; if (msg.Content.StartsWith(normalMention, StringComparison.InvariantCulture)) message = msg.Content[normalMention.Length..].Trim(); @@ -119,13 +107,31 @@ public class ChatterBotService : IExecOnMessage if (guild is not SocketGuild sg) return false; + var channel = usrMsg.Channel as ITextChannel; + if (channel is null) + return false; + + if (!ChatterBotGuilds.TryGetValue(channel.Guild.Id, out var lazyChatBot)) + return false; + + var chatBot = lazyChatBot.Value; + var message = PrepareMessage(usrMsg); + if (message is null) + return false; + + return await RunChatterBot(sg, usrMsg, channel, chatBot, message); + } + + public async Task RunChatterBot( + SocketGuild guild, + IUserMessage usrMsg, + ITextChannel channel, + IChatterBotSession chatBot, + string message) + { try { - var message = PrepareMessage(usrMsg, out var cbs); - if (message is null || cbs is null) - return false; - - var res = await _perms.CheckPermsAsync(sg, + var res = await _perms.CheckPermsAsync(guild, usrMsg.Channel, usrMsg.Author, CleverBotResponseStr.CLEVERBOT_RESPONSE, @@ -134,59 +140,33 @@ public class ChatterBotService : IExecOnMessage if (!res.IsAllowed) return false; - var channel = (ITextChannel)usrMsg.Channel; - var conf = _ps.GetConfig(); - if (!_creds.IsOwner(sg.OwnerId) && conf.IsEnabled) + if (!await _ps.LimitHitAsync(LimitedFeatureName.ChatBot, usrMsg.Author.Id, 2048 / 2)) { - var quota = await _ps.TryGetFeatureLimitAsync(_flKey, sg.OwnerId, 0); - - uint? daily = quota.Quota is int dVal and < 0 - ? (uint)-dVal - : null; - - uint? monthly = quota.Quota is int mVal and >= 0 - ? (uint)mVal - : null; - - var maybeLimit = await _ps.TryIncrementQuotaCounterAsync(sg.OwnerId, - sg.OwnerId == usrMsg.Author.Id, - FeatureType.Limit, - _flKey.Key, - null, - daily, - monthly); - - if (maybeLimit.TryPickT1(out var ql, out var counters)) - { - if (ql.Quota == 0) - { - await _sender.Response(channel) - .Error(null, - text: - "In order to use the cleverbot feature, the owner of this server should be [Patron Tier X](https://patreon.com/join/elliebot) on patreon.", - footer: - "You may disable the cleverbot feature, and this message via '.cleverbot' command") - .SendAsync(); - - return true; - } - - await _sender.Response(channel) - .Error( - null!, - $"You've reached your quota limit of **{ql.Quota}** responses {ql.QuotaPeriod.ToFullName()} for the cleverbot feature.", - footer: "You may wait for the quota reset or .") - .SendAsync(); - - return true; - } + // limit exceeded + return false; } _ = channel.TriggerTypingAsync(); - var response = await cbs.Think(message, usrMsg.Author.ToString()); - await _sender.Response(channel) - .Confirm(response) - .SendAsync(); + var response = await chatBot.Think(message, usrMsg.Author.ToString()); + + if (response.TryPickT0(out var result, out var error)) + { + // calculate the diff in case we overestimated user's usage + var inTokens = (result.TokensIn - 2048) / 2; + + // add the output tokens to the limit + await _ps.LimitForceHit(LimitedFeatureName.ChatBot, + usrMsg.Author.Id, + (inTokens) + (result.TokensOut / 2 * 3)); + + await _sender.Response(channel) + .Confirm(result.Text) + .SendAsync(); + } + else + { + Log.Warning("Error in chatterbot: {Error}", error); + } Log.Information(""" CleverBot Executed diff --git a/src/EllieBot/Modules/Games/ChatterBot/_common/Gpt3Response.cs b/src/EllieBot/Modules/Games/ChatterBot/_common/Gpt3Response.cs index ad8692a..e983338 100644 --- a/src/EllieBot/Modules/Games/ChatterBot/_common/Gpt3Response.cs +++ b/src/EllieBot/Modules/Games/ChatterBot/_common/Gpt3Response.cs @@ -3,10 +3,25 @@ using System.Text.Json.Serialization; namespace EllieBot.Modules.Games.Common.ChatterBot; -public class Gpt3Response +public class OpenAiCompletionResponse { [JsonPropertyName("choices")] public Choice[] Choices { get; set; } + + [JsonPropertyName("usage")] + public OpenAiUsageData Usage { get; set; } +} + +public class OpenAiUsageData +{ + [JsonPropertyName("prompt_tokens")] + public int PromptTokens { get; set; } + + [JsonPropertyName("completion_tokens")] + public int CompletionTokens { get; set; } + + [JsonPropertyName("total_tokens")] + public int TotalTokens { get; set; } } public class Choice diff --git a/src/EllieBot/Modules/Games/ChatterBot/_common/IChatterBotSession.cs b/src/EllieBot/Modules/Games/ChatterBot/_common/IChatterBotSession.cs index 847d661..0372c87 100644 --- a/src/EllieBot/Modules/Games/ChatterBot/_common/IChatterBotSession.cs +++ b/src/EllieBot/Modules/Games/ChatterBot/_common/IChatterBotSession.cs @@ -1,7 +1,10 @@ #nullable disable +using OneOf; +using OneOf.Types; + namespace EllieBot.Modules.Games.Common.ChatterBot; public interface IChatterBotSession { - Task Think(string input, string username); + Task>> Think(string input, string username); } \ No newline at end of file diff --git a/src/EllieBot/Modules/Games/ChatterBot/_common/OfficialCleverbotSession.cs b/src/EllieBot/Modules/Games/ChatterBot/_common/OfficialCleverbotSession.cs index 83dc060..b20f1d0 100644 --- a/src/EllieBot/Modules/Games/ChatterBot/_common/OfficialCleverbotSession.cs +++ b/src/EllieBot/Modules/Games/ChatterBot/_common/OfficialCleverbotSession.cs @@ -1,5 +1,7 @@ #nullable disable using Newtonsoft.Json; +using OneOf; +using OneOf.Types; namespace EllieBot.Modules.Games.Common.ChatterBot; @@ -18,7 +20,7 @@ public class OfficialCleverbotSession : IChatterBotSession _httpFactory = factory; } - public async Task Think(string input, string username) + public async Task>> Think(string input, string username) { using var http = _httpFactory.CreateClient(); var dataString = await http.GetStringAsync(string.Format(QueryString, input, cs ?? "")); @@ -27,12 +29,17 @@ public class OfficialCleverbotSession : IChatterBotSession var data = JsonConvert.DeserializeObject(dataString); cs = data?.Cs; - return data?.Output; + return new ThinkResult + { + Text = data?.Output, + TokensIn = 2, + TokensOut = 1 + }; } catch { - Log.Warning("Unexpected cleverbot response received: {ResponseString}", dataString); - return null; + Log.Warning("Unexpected response from CleverBot: {ResponseString}", dataString); + return new Error("Unexpected CleverBot response received"); } } } \ No newline at end of file diff --git a/src/EllieBot/Modules/Games/ChatterBot/_common/OfficialGpt3Session.cs b/src/EllieBot/Modules/Games/ChatterBot/_common/OfficialGpt3Session.cs deleted file mode 100644 index 4711fd6..0000000 --- a/src/EllieBot/Modules/Games/ChatterBot/_common/OfficialGpt3Session.cs +++ /dev/null @@ -1,105 +0,0 @@ -#nullable disable -using Newtonsoft.Json; -using System.Net.Http.Json; -using SharpToken; - -namespace EllieBot.Modules.Games.Common.ChatterBot; - -public class OfficialGpt3Session : IChatterBotSession -{ - private string Uri - => $"https://api.openai.com/v1/chat/completions"; - - private readonly string _apiKey; - private readonly string _model; - private readonly int _maxHistory; - private readonly int _maxTokens; - private readonly int _minTokens; - private readonly string _ellieUsername; - private readonly GptEncoding _encoding; - private List messages = new(); - private readonly IHttpClientFactory _httpFactory; - - - - public OfficialGpt3Session( - string apiKey, - ChatGptModel model, - int chatHistory, - int maxTokens, - int minTokens, - string personality, - string ellieUsername, - IHttpClientFactory factory) - { - _apiKey = apiKey; - _httpFactory = factory; - switch (model) - { - case ChatGptModel.Gpt35Turbo: - _model = "gpt-3.5-turbo"; - break; - case ChatGptModel.Gpt4: - _model = "gpt-4"; - break; - case ChatGptModel.Gpt432k: - _model = "gpt-4-32k"; - break; - } - _maxHistory = chatHistory; - _maxTokens = maxTokens; - _minTokens = minTokens; - _ellieUsername = ellieUsername; - _encoding = GptEncoding.GetEncodingForModel(_model); - messages.Add(new GPTMessage(){Role = "user", Content = personality, Name = _ellieUsername}); - } - - public async Task Think(string input, string username) - { - messages.Add(new GPTMessage(){Role = "user", Content = input, Name = username}); - while(messages.Count > _maxHistory + 2){ - messages.RemoveAt(1); - } - int tokensUsed = 0; - foreach(GPTMessage message in messages){ - tokensUsed += _encoding.Encode(message.Content).Count; - } - tokensUsed *= 2; //Unsure why this is the case, but the token count chatgpt reports back is double what I calculate. - //check if we have the minimum number of tokens available to use. Remove messages until we have enough, otherwise exit out and inform the user why. - while(_maxTokens - tokensUsed <= _minTokens){ - if(messages.Count > 2){ - int tokens = _encoding.Encode(messages[1].Content).Count * 2; - tokensUsed -= tokens; - messages.RemoveAt(1); - } - else{ - return "Token count exceeded, please increase the number of tokens in the bot config and restart."; - } - } - using var http = _httpFactory.CreateClient(); - http.DefaultRequestHeaders.Authorization = new("Bearer", _apiKey); - var data = await http.PostAsJsonAsync(Uri, new Gpt3ApiRequest() - { - Model = _model, - Messages = messages, - MaxTokens = _maxTokens - tokensUsed, - Temperature = 1, - }); - var dataString = await data.Content.ReadAsStringAsync(); - try - { - var response = JsonConvert.DeserializeObject(dataString); - string message = response?.Choices[0]?.Message?.Content; - //Can't rely on the return to except, now that we need to add it to the messages list. - _ = message ?? throw new ArgumentNullException(nameof(message)); - messages.Add(new GPTMessage(){Role = "assistant", Content = message, Name = _ellieUsername}); - return message; - } - catch - { - Log.Warning("Unexpected GPT-3 response received: {ResponseString}", dataString); - return null; - } - } -} - diff --git a/src/EllieBot/Modules/Games/ChatterBot/_common/OfficialGptSession.cs b/src/EllieBot/Modules/Games/ChatterBot/_common/OfficialGptSession.cs new file mode 100644 index 0000000..4f7d35a --- /dev/null +++ b/src/EllieBot/Modules/Games/ChatterBot/_common/OfficialGptSession.cs @@ -0,0 +1,141 @@ +#nullable disable +using Newtonsoft.Json; +using OneOf.Types; +using System.Net.Http.Json; +using SharpToken; + +namespace EllieBot.Modules.Games.Common.ChatterBot; + +public class OfficialGptSession : IChatterBotSession +{ + private string Uri + => $"https://api.openai.com/v1/chat/completions"; + + private readonly string _apiKey; + private readonly string _model; + private readonly int _maxHistory; + private readonly int _maxTokens; + private readonly int _minTokens; + private readonly string _nadekoUsername; + private readonly GptEncoding _encoding; + private List messages = new(); + private readonly IHttpClientFactory _httpFactory; + + + public OfficialGptSession( + string apiKey, + ChatGptModel model, + int chatHistory, + int maxTokens, + int minTokens, + string personality, + string nadekoUsername, + IHttpClientFactory factory) + { + _apiKey = apiKey; + _httpFactory = factory; + + _model = model switch + { + ChatGptModel.Gpt35Turbo => "gpt-3.5-turbo", + ChatGptModel.Gpt4o => "gpt-4o", + _ => throw new ArgumentException("Unknown, unsupported or obsolete model", nameof(model)) + }; + + _maxHistory = chatHistory; + _maxTokens = maxTokens; + _minTokens = minTokens; + _nadekoUsername = nadekoUsername; + _encoding = GptEncoding.GetEncodingForModel(_model); + messages.Add(new() + { + Role = "system", + Content = personality, + Name = _nadekoUsername + }); + } + + public async Task>> Think(string input, string username) + { + messages.Add(new() + { + Role = "user", + Content = input, + Name = username + }); + while (messages.Count > _maxHistory + 2) + { + messages.RemoveAt(1); + } + + var tokensUsed = messages.Sum(message => _encoding.Encode(message.Content).Count); + + tokensUsed *= 2; + + //check if we have the minimum number of tokens available to use. Remove messages until we have enough, otherwise exit out and inform the user why. + while (_maxTokens - tokensUsed <= _minTokens) + { + if (messages.Count > 2) + { + var tokens = _encoding.Encode(messages[1].Content).Count * 2; + tokensUsed -= tokens; + messages.RemoveAt(1); + } + else + { + return new Error("Token count exceeded, please increase the number of tokens in the bot config and restart."); + } + } + + using var http = _httpFactory.CreateClient(); + http.DefaultRequestHeaders.Authorization = new("Bearer", _apiKey); + + var data = await http.PostAsJsonAsync(Uri, + new Gpt3ApiRequest() + { + Model = _model, + Messages = messages, + MaxTokens = _maxTokens - tokensUsed, + Temperature = 1, + }); + + var dataString = await data.Content.ReadAsStringAsync(); + try + { + var response = JsonConvert.DeserializeObject(dataString); + var res = response?.Choices?[0]; + var message = res?.Message?.Content; + + if (message is null) + { + return new Error("ChatGpt: Received no response."); + } + + messages.Add(new() + { + Role = "assistant", + Content = message, + Name = _nadekoUsername + }); + + return new ThinkResult() + { + Text = message, + TokensIn = response.Usage.PromptTokens, + TokensOut = response.Usage.CompletionTokens + }; + } + catch + { + Log.Warning("Unexpected response received from OpenAI: {ResponseString}", dataString); + return new Error("Unexpected response received"); + } + } +} + +public sealed class ThinkResult +{ + public string Text { get; set; } + public int TokensIn { get; set; } + public int TokensOut { get; set; } +} \ No newline at end of file diff --git a/src/EllieBot/Modules/Games/GamesConfig.cs b/src/EllieBot/Modules/Games/GamesConfig.cs index 1502c39..e56cc70 100644 --- a/src/EllieBot/Modules/Games/GamesConfig.cs +++ b/src/EllieBot/Modules/Games/GamesConfig.cs @@ -8,7 +8,7 @@ namespace EllieBot.Modules.Games.Common; public sealed partial class GamesConfig : ICloneable { [Comment("DO NOT CHANGE")] - public int Version { get; set; } = 3; + public int Version { get; set; } = 4; [Comment("Hangman related settings (.hangman command)")] public HangmanConfig Hangman { get; set; } = new() @@ -105,8 +105,8 @@ public sealed partial class GamesConfig : ICloneable [Comment(@"Which chatbot API should bot use. 'cleverbot' - bot will use Cleverbot API. -'gpt3' - bot will use GPT-3 API")] - public ChatBotImplementation ChatBot { get; set; } = ChatBotImplementation.Gpt3; +'gpt' - bot will use GPT API")] + public ChatBotImplementation ChatBot { get; set; } = ChatBotImplementation.Gpt; public ChatGptConfig ChatGpt { get; set; } = new(); } @@ -114,10 +114,10 @@ public sealed partial class GamesConfig : ICloneable [Cloneable] public sealed partial class ChatGptConfig { - [Comment(@"Which GPT-3 Model should bot use. + [Comment(@"Which GPT Model should bot use. gpt35turbo - cheapest - gpt4 - 30x more expensive, higher quality - gp432k - same model as above, but with a 32k token limit")] + gpt4o - more expensive, higher quality +")] public ChatGptModel ModelName { get; set; } = ChatGptModel.Gpt35Turbo; [Comment(@"How should the chat bot behave, what's its personality? (Usage of this counts towards the max tokens)")] @@ -126,10 +126,10 @@ public sealed partial class ChatGptConfig [Comment(@"The maximum number of messages in a conversation that can be remembered. (This will increase the number of tokens used)")] public int ChatHistory { get; set; } = 5; - [Comment(@"The maximum number of tokens to use per GPT-3 API call")] + [Comment(@"The maximum number of tokens to use per GPT API call")] public int MaxTokens { get; set; } = 100; - [Comment(@"The minimum number of tokens to use per GPT-3 API call, such that chat history is removed to make room.")] + [Comment(@"The minimum number of tokens to use per GPT API call, such that chat history is removed to make room.")] public int MinTokens { get; set; } = 30; } @@ -163,12 +163,18 @@ public sealed partial class RaceAnimal public enum ChatBotImplementation { Cleverbot, - Gpt3 + Gpt = 1, + [Obsolete] + Gpt3 = 1, } public enum ChatGptModel { - Gpt35Turbo, + [Obsolete] Gpt4, - Gpt432k + [Obsolete] + Gpt432k, + + Gpt35Turbo, + Gpt4o, } \ No newline at end of file diff --git a/src/EllieBot/Modules/Games/GamesConfigService.cs b/src/EllieBot/Modules/Games/GamesConfigService.cs index 6446c23..06b75c3 100644 --- a/src/EllieBot/Modules/Games/GamesConfigService.cs +++ b/src/EllieBot/Modules/Games/GamesConfigService.cs @@ -73,15 +73,6 @@ public sealed class GamesConfigService : ConfigServiceBase }); } - if (data.Version < 2) - { - ModifyConfig(c => - { - c.Version = 2; - c.ChatBot = ChatBotImplementation.Cleverbot; - }); - } - if (data.Version < 3) { ModifyConfig(c => @@ -90,5 +81,19 @@ public sealed class GamesConfigService : ConfigServiceBase c.ChatGpt.ModelName = ChatGptModel.Gpt35Turbo; }); } + + if (data.Version < 4) + { + ModifyConfig(c => + { + c.Version = 4; +#pragma warning disable CS0612 // Type or member is obsolete + c.ChatGpt.ModelName = + c.ChatGpt.ModelName == ChatGptModel.Gpt4 || c.ChatGpt.ModelName == ChatGptModel.Gpt432k + ? ChatGptModel.Gpt4o + : c.ChatGpt.ModelName; +#pragma warning restore CS0612 // Type or member is obsolete + }); + } } } \ No newline at end of file