Updated Games module

This commit is contained in:
Toastie 2024-06-27 16:41:10 +12:00
parent 3df4f710da
commit 3a70370287
Signed by: toastie_t0ast
GPG key ID: 27F3B6855AFD40A4
9 changed files with 270 additions and 217 deletions

View file

@ -18,7 +18,8 @@ public partial class Games
[Cmd] [Cmd]
[RequireContext(ContextType.Guild)] [RequireContext(ContextType.Guild)]
[UserPerm(GuildPerm.ManageMessages)] [UserPerm(GuildPerm.ManageMessages)]
public async Task Cleverbot() [NoPublicBot]
public async Task CleverBot()
{ {
var channel = (ITextChannel)ctx.Channel; var channel = (ITextChannel)ctx.Channel;
@ -30,7 +31,7 @@ public partial class Games
await uow.SaveChangesAsync(); await uow.SaveChangesAsync();
} }
await Response().Confirm(strs.cleverbot_disabled).SendAsync(); await Response().Confirm(strs.chatbot_disabled).SendAsync();
return; return;
} }
@ -42,7 +43,7 @@ public partial class Games
await uow.SaveChangesAsync(); await uow.SaveChangesAsync();
} }
await Response().Confirm(strs.cleverbot_enabled).SendAsync(); await Response().Confirm(strs.chatbot_enabled).SendAsync();
} }
} }
} }

View file

@ -15,43 +15,32 @@ public class ChatterBotService : IExecOnMessage
public int Priority public int Priority
=> 1; => 1;
private readonly FeatureLimitKey _flKey;
private readonly DiscordSocketClient _client; private readonly DiscordSocketClient _client;
private readonly IPermissionChecker _perms; private readonly IPermissionChecker _perms;
private readonly CommandHandler _cmd;
private readonly IBotCredentials _creds; private readonly IBotCredentials _creds;
private readonly IHttpClientFactory _httpFactory; private readonly IHttpClientFactory _httpFactory;
private readonly IPatronageService _ps;
private readonly GamesConfigService _gcs; private readonly GamesConfigService _gcs;
private readonly IMessageSenderService _sender; private readonly IMessageSenderService _sender;
public readonly IPatronageService _ps;
public ChatterBotService( public ChatterBotService(
DiscordSocketClient client, DiscordSocketClient client,
IPermissionChecker perms, IPermissionChecker perms,
IBot bot, IBot bot,
CommandHandler cmd, IPatronageService ps,
IHttpClientFactory factory, IHttpClientFactory factory,
IBotCredentials creds, IBotCredentials creds,
IPatronageService ps,
GamesConfigService gcs, GamesConfigService gcs,
IMessageSenderService sender) IMessageSenderService sender)
{ {
_client = client; _client = client;
_perms = perms; _perms = perms;
_cmd = cmd;
_creds = creds; _creds = creds;
_sender = sender; _sender = sender;
_httpFactory = factory; _httpFactory = factory;
_ps = ps;
_perms = perms; _perms = perms;
_gcs = gcs; _gcs = gcs;
_ps = ps;
_flKey = new FeatureLimitKey()
{
Key = CleverBotResponseStr.CLEVERBOT_RESPONSE,
PrettyName = "Cleverbot Replies"
};
ChatterBotGuilds = new(bot.AllGuildConfigs ChatterBotGuilds = new(bot.AllGuildConfigs
.Where(gc => gc.CleverbotEnabled) .Where(gc => gc.CleverbotEnabled)
@ -69,9 +58,9 @@ public class ChatterBotService : IExecOnMessage
Log.Information("Cleverbot will not work as the api key is missing"); Log.Information("Cleverbot will not work as the api key is missing");
return null; return null;
case ChatBotImplementation.Gpt3: case ChatBotImplementation.Gpt:
if (!string.IsNullOrWhiteSpace(_creds.Gpt3ApiKey)) if (!string.IsNullOrWhiteSpace(_creds.Gpt3ApiKey))
return new OfficialGpt3Session(_creds.Gpt3ApiKey, return new OfficialGptSession(_creds.Gpt3ApiKey,
_gcs.Data.ChatGpt.ModelName, _gcs.Data.ChatGpt.ModelName,
_gcs.Data.ChatGpt.ChatHistory, _gcs.Data.ChatGpt.ChatHistory,
_gcs.Data.ChatGpt.MaxTokens, _gcs.Data.ChatGpt.MaxTokens,
@ -87,22 +76,21 @@ public class ChatterBotService : IExecOnMessage
} }
} }
public string PrepareMessage(IUserMessage msg, out IChatterBotSession cleverbot) public IChatterBotSession GetOrCreateSession(ulong guildId)
{ {
var channel = msg.Channel as ITextChannel; if (ChatterBotGuilds.TryGetValue(guildId, out var lazyChatBot))
cleverbot = null; return lazyChatBot.Value;
if (channel is null) lazyChatBot = new(() => CreateSession(), true);
return null; ChatterBotGuilds.TryAdd(guildId, lazyChatBot);
return lazyChatBot.Value;
}
if (!ChatterBotGuilds.TryGetValue(channel.Guild.Id, out var lazyCleverbot)) public string PrepareMessage(IUserMessage msg)
return null; {
var nadekoId = _client.CurrentUser.Id;
cleverbot = lazyCleverbot.Value; var normalMention = $"<@{nadekoId}> ";
var nickMention = $"<@!{nadekoId}> ";
var ellieId = _client.CurrentUser.Id;
var normalMention = $"<@{ellieId}> ";
var nickMention = $"<@!{ellieId}> ";
string message; string message;
if (msg.Content.StartsWith(normalMention, StringComparison.InvariantCulture)) if (msg.Content.StartsWith(normalMention, StringComparison.InvariantCulture))
message = msg.Content[normalMention.Length..].Trim(); message = msg.Content[normalMention.Length..].Trim();
@ -119,13 +107,31 @@ public class ChatterBotService : IExecOnMessage
if (guild is not SocketGuild sg) if (guild is not SocketGuild sg)
return false; return false;
var channel = usrMsg.Channel as ITextChannel;
if (channel is null)
return false;
if (!ChatterBotGuilds.TryGetValue(channel.Guild.Id, out var lazyChatBot))
return false;
var chatBot = lazyChatBot.Value;
var message = PrepareMessage(usrMsg);
if (message is null)
return false;
return await RunChatterBot(sg, usrMsg, channel, chatBot, message);
}
public async Task<bool> RunChatterBot(
SocketGuild guild,
IUserMessage usrMsg,
ITextChannel channel,
IChatterBotSession chatBot,
string message)
{
try try
{ {
var message = PrepareMessage(usrMsg, out var cbs); var res = await _perms.CheckPermsAsync(guild,
if (message is null || cbs is null)
return false;
var res = await _perms.CheckPermsAsync(sg,
usrMsg.Channel, usrMsg.Channel,
usrMsg.Author, usrMsg.Author,
CleverBotResponseStr.CLEVERBOT_RESPONSE, CleverBotResponseStr.CLEVERBOT_RESPONSE,
@ -134,59 +140,33 @@ public class ChatterBotService : IExecOnMessage
if (!res.IsAllowed) if (!res.IsAllowed)
return false; return false;
var channel = (ITextChannel)usrMsg.Channel; if (!await _ps.LimitHitAsync(LimitedFeatureName.ChatBot, usrMsg.Author.Id, 2048 / 2))
var conf = _ps.GetConfig();
if (!_creds.IsOwner(sg.OwnerId) && conf.IsEnabled)
{ {
var quota = await _ps.TryGetFeatureLimitAsync(_flKey, sg.OwnerId, 0); // limit exceeded
return false;
uint? daily = quota.Quota is int dVal and < 0
? (uint)-dVal
: null;
uint? monthly = quota.Quota is int mVal and >= 0
? (uint)mVal
: null;
var maybeLimit = await _ps.TryIncrementQuotaCounterAsync(sg.OwnerId,
sg.OwnerId == usrMsg.Author.Id,
FeatureType.Limit,
_flKey.Key,
null,
daily,
monthly);
if (maybeLimit.TryPickT1(out var ql, out var counters))
{
if (ql.Quota == 0)
{
await _sender.Response(channel)
.Error(null,
text:
"In order to use the cleverbot feature, the owner of this server should be [Patron Tier X](https://patreon.com/join/elliebot) on patreon.",
footer:
"You may disable the cleverbot feature, and this message via '.cleverbot' command")
.SendAsync();
return true;
}
await _sender.Response(channel)
.Error(
null!,
$"You've reached your quota limit of **{ql.Quota}** responses {ql.QuotaPeriod.ToFullName()} for the cleverbot feature.",
footer: "You may wait for the quota reset or .")
.SendAsync();
return true;
}
} }
_ = channel.TriggerTypingAsync(); _ = channel.TriggerTypingAsync();
var response = await cbs.Think(message, usrMsg.Author.ToString()); var response = await chatBot.Think(message, usrMsg.Author.ToString());
await _sender.Response(channel)
.Confirm(response) if (response.TryPickT0(out var result, out var error))
.SendAsync(); {
// calculate the diff in case we overestimated user's usage
var inTokens = (result.TokensIn - 2048) / 2;
// add the output tokens to the limit
await _ps.LimitForceHit(LimitedFeatureName.ChatBot,
usrMsg.Author.Id,
(inTokens) + (result.TokensOut / 2 * 3));
await _sender.Response(channel)
.Confirm(result.Text)
.SendAsync();
}
else
{
Log.Warning("Error in chatterbot: {Error}", error);
}
Log.Information(""" Log.Information("""
CleverBot Executed CleverBot Executed

View file

@ -3,10 +3,25 @@ using System.Text.Json.Serialization;
namespace EllieBot.Modules.Games.Common.ChatterBot; namespace EllieBot.Modules.Games.Common.ChatterBot;
public class Gpt3Response public class OpenAiCompletionResponse
{ {
[JsonPropertyName("choices")] [JsonPropertyName("choices")]
public Choice[] Choices { get; set; } public Choice[] Choices { get; set; }
[JsonPropertyName("usage")]
public OpenAiUsageData Usage { get; set; }
}
public class OpenAiUsageData
{
[JsonPropertyName("prompt_tokens")]
public int PromptTokens { get; set; }
[JsonPropertyName("completion_tokens")]
public int CompletionTokens { get; set; }
[JsonPropertyName("total_tokens")]
public int TotalTokens { get; set; }
} }
public class Choice public class Choice

View file

@ -1,7 +1,10 @@
#nullable disable #nullable disable
using OneOf;
using OneOf.Types;
namespace EllieBot.Modules.Games.Common.ChatterBot; namespace EllieBot.Modules.Games.Common.ChatterBot;
public interface IChatterBotSession public interface IChatterBotSession
{ {
Task<string> Think(string input, string username); Task<OneOf<ThinkResult, Error<string>>> Think(string input, string username);
} }

View file

@ -1,5 +1,7 @@
#nullable disable #nullable disable
using Newtonsoft.Json; using Newtonsoft.Json;
using OneOf;
using OneOf.Types;
namespace EllieBot.Modules.Games.Common.ChatterBot; namespace EllieBot.Modules.Games.Common.ChatterBot;
@ -18,7 +20,7 @@ public class OfficialCleverbotSession : IChatterBotSession
_httpFactory = factory; _httpFactory = factory;
} }
public async Task<string> Think(string input, string username) public async Task<OneOf<ThinkResult, Error<string>>> Think(string input, string username)
{ {
using var http = _httpFactory.CreateClient(); using var http = _httpFactory.CreateClient();
var dataString = await http.GetStringAsync(string.Format(QueryString, input, cs ?? "")); var dataString = await http.GetStringAsync(string.Format(QueryString, input, cs ?? ""));
@ -27,12 +29,17 @@ public class OfficialCleverbotSession : IChatterBotSession
var data = JsonConvert.DeserializeObject<CleverbotResponse>(dataString); var data = JsonConvert.DeserializeObject<CleverbotResponse>(dataString);
cs = data?.Cs; cs = data?.Cs;
return data?.Output; return new ThinkResult
{
Text = data?.Output,
TokensIn = 2,
TokensOut = 1
};
} }
catch catch
{ {
Log.Warning("Unexpected cleverbot response received: {ResponseString}", dataString); Log.Warning("Unexpected response from CleverBot: {ResponseString}", dataString);
return null; return new Error<string>("Unexpected CleverBot response received");
} }
} }
} }

View file

@ -1,105 +0,0 @@
#nullable disable
using Newtonsoft.Json;
using System.Net.Http.Json;
using SharpToken;
namespace EllieBot.Modules.Games.Common.ChatterBot;
public class OfficialGpt3Session : IChatterBotSession
{
private string Uri
=> $"https://api.openai.com/v1/chat/completions";
private readonly string _apiKey;
private readonly string _model;
private readonly int _maxHistory;
private readonly int _maxTokens;
private readonly int _minTokens;
private readonly string _ellieUsername;
private readonly GptEncoding _encoding;
private List<GPTMessage> messages = new();
private readonly IHttpClientFactory _httpFactory;
public OfficialGpt3Session(
string apiKey,
ChatGptModel model,
int chatHistory,
int maxTokens,
int minTokens,
string personality,
string ellieUsername,
IHttpClientFactory factory)
{
_apiKey = apiKey;
_httpFactory = factory;
switch (model)
{
case ChatGptModel.Gpt35Turbo:
_model = "gpt-3.5-turbo";
break;
case ChatGptModel.Gpt4:
_model = "gpt-4";
break;
case ChatGptModel.Gpt432k:
_model = "gpt-4-32k";
break;
}
_maxHistory = chatHistory;
_maxTokens = maxTokens;
_minTokens = minTokens;
_ellieUsername = ellieUsername;
_encoding = GptEncoding.GetEncodingForModel(_model);
messages.Add(new GPTMessage(){Role = "user", Content = personality, Name = _ellieUsername});
}
public async Task<string> Think(string input, string username)
{
messages.Add(new GPTMessage(){Role = "user", Content = input, Name = username});
while(messages.Count > _maxHistory + 2){
messages.RemoveAt(1);
}
int tokensUsed = 0;
foreach(GPTMessage message in messages){
tokensUsed += _encoding.Encode(message.Content).Count;
}
tokensUsed *= 2; //Unsure why this is the case, but the token count chatgpt reports back is double what I calculate.
//check if we have the minimum number of tokens available to use. Remove messages until we have enough, otherwise exit out and inform the user why.
while(_maxTokens - tokensUsed <= _minTokens){
if(messages.Count > 2){
int tokens = _encoding.Encode(messages[1].Content).Count * 2;
tokensUsed -= tokens;
messages.RemoveAt(1);
}
else{
return "Token count exceeded, please increase the number of tokens in the bot config and restart.";
}
}
using var http = _httpFactory.CreateClient();
http.DefaultRequestHeaders.Authorization = new("Bearer", _apiKey);
var data = await http.PostAsJsonAsync(Uri, new Gpt3ApiRequest()
{
Model = _model,
Messages = messages,
MaxTokens = _maxTokens - tokensUsed,
Temperature = 1,
});
var dataString = await data.Content.ReadAsStringAsync();
try
{
var response = JsonConvert.DeserializeObject<Gpt3Response>(dataString);
string message = response?.Choices[0]?.Message?.Content;
//Can't rely on the return to except, now that we need to add it to the messages list.
_ = message ?? throw new ArgumentNullException(nameof(message));
messages.Add(new GPTMessage(){Role = "assistant", Content = message, Name = _ellieUsername});
return message;
}
catch
{
Log.Warning("Unexpected GPT-3 response received: {ResponseString}", dataString);
return null;
}
}
}

View file

@ -0,0 +1,141 @@
#nullable disable
using Newtonsoft.Json;
using OneOf.Types;
using System.Net.Http.Json;
using SharpToken;
namespace EllieBot.Modules.Games.Common.ChatterBot;
public class OfficialGptSession : IChatterBotSession
{
private string Uri
=> $"https://api.openai.com/v1/chat/completions";
private readonly string _apiKey;
private readonly string _model;
private readonly int _maxHistory;
private readonly int _maxTokens;
private readonly int _minTokens;
private readonly string _nadekoUsername;
private readonly GptEncoding _encoding;
private List<GPTMessage> messages = new();
private readonly IHttpClientFactory _httpFactory;
public OfficialGptSession(
string apiKey,
ChatGptModel model,
int chatHistory,
int maxTokens,
int minTokens,
string personality,
string nadekoUsername,
IHttpClientFactory factory)
{
_apiKey = apiKey;
_httpFactory = factory;
_model = model switch
{
ChatGptModel.Gpt35Turbo => "gpt-3.5-turbo",
ChatGptModel.Gpt4o => "gpt-4o",
_ => throw new ArgumentException("Unknown, unsupported or obsolete model", nameof(model))
};
_maxHistory = chatHistory;
_maxTokens = maxTokens;
_minTokens = minTokens;
_nadekoUsername = nadekoUsername;
_encoding = GptEncoding.GetEncodingForModel(_model);
messages.Add(new()
{
Role = "system",
Content = personality,
Name = _nadekoUsername
});
}
public async Task<OneOf.OneOf<ThinkResult, Error<string>>> Think(string input, string username)
{
messages.Add(new()
{
Role = "user",
Content = input,
Name = username
});
while (messages.Count > _maxHistory + 2)
{
messages.RemoveAt(1);
}
var tokensUsed = messages.Sum(message => _encoding.Encode(message.Content).Count);
tokensUsed *= 2;
//check if we have the minimum number of tokens available to use. Remove messages until we have enough, otherwise exit out and inform the user why.
while (_maxTokens - tokensUsed <= _minTokens)
{
if (messages.Count > 2)
{
var tokens = _encoding.Encode(messages[1].Content).Count * 2;
tokensUsed -= tokens;
messages.RemoveAt(1);
}
else
{
return new Error<string>("Token count exceeded, please increase the number of tokens in the bot config and restart.");
}
}
using var http = _httpFactory.CreateClient();
http.DefaultRequestHeaders.Authorization = new("Bearer", _apiKey);
var data = await http.PostAsJsonAsync(Uri,
new Gpt3ApiRequest()
{
Model = _model,
Messages = messages,
MaxTokens = _maxTokens - tokensUsed,
Temperature = 1,
});
var dataString = await data.Content.ReadAsStringAsync();
try
{
var response = JsonConvert.DeserializeObject<OpenAiCompletionResponse>(dataString);
var res = response?.Choices?[0];
var message = res?.Message?.Content;
if (message is null)
{
return new Error<string>("ChatGpt: Received no response.");
}
messages.Add(new()
{
Role = "assistant",
Content = message,
Name = _nadekoUsername
});
return new ThinkResult()
{
Text = message,
TokensIn = response.Usage.PromptTokens,
TokensOut = response.Usage.CompletionTokens
};
}
catch
{
Log.Warning("Unexpected response received from OpenAI: {ResponseString}", dataString);
return new Error<string>("Unexpected response received");
}
}
}
public sealed class ThinkResult
{
public string Text { get; set; }
public int TokensIn { get; set; }
public int TokensOut { get; set; }
}

View file

@ -8,7 +8,7 @@ namespace EllieBot.Modules.Games.Common;
public sealed partial class GamesConfig : ICloneable<GamesConfig> public sealed partial class GamesConfig : ICloneable<GamesConfig>
{ {
[Comment("DO NOT CHANGE")] [Comment("DO NOT CHANGE")]
public int Version { get; set; } = 3; public int Version { get; set; } = 4;
[Comment("Hangman related settings (.hangman command)")] [Comment("Hangman related settings (.hangman command)")]
public HangmanConfig Hangman { get; set; } = new() public HangmanConfig Hangman { get; set; } = new()
@ -105,8 +105,8 @@ public sealed partial class GamesConfig : ICloneable<GamesConfig>
[Comment(@"Which chatbot API should bot use. [Comment(@"Which chatbot API should bot use.
'cleverbot' - bot will use Cleverbot API. 'cleverbot' - bot will use Cleverbot API.
'gpt3' - bot will use GPT-3 API")] 'gpt' - bot will use GPT API")]
public ChatBotImplementation ChatBot { get; set; } = ChatBotImplementation.Gpt3; public ChatBotImplementation ChatBot { get; set; } = ChatBotImplementation.Gpt;
public ChatGptConfig ChatGpt { get; set; } = new(); public ChatGptConfig ChatGpt { get; set; } = new();
} }
@ -114,10 +114,10 @@ public sealed partial class GamesConfig : ICloneable<GamesConfig>
[Cloneable] [Cloneable]
public sealed partial class ChatGptConfig public sealed partial class ChatGptConfig
{ {
[Comment(@"Which GPT-3 Model should bot use. [Comment(@"Which GPT Model should bot use.
gpt35turbo - cheapest gpt35turbo - cheapest
gpt4 - 30x more expensive, higher quality gpt4o - more expensive, higher quality
gp432k - same model as above, but with a 32k token limit")] ")]
public ChatGptModel ModelName { get; set; } = ChatGptModel.Gpt35Turbo; public ChatGptModel ModelName { get; set; } = ChatGptModel.Gpt35Turbo;
[Comment(@"How should the chat bot behave, what's its personality? (Usage of this counts towards the max tokens)")] [Comment(@"How should the chat bot behave, what's its personality? (Usage of this counts towards the max tokens)")]
@ -126,10 +126,10 @@ public sealed partial class ChatGptConfig
[Comment(@"The maximum number of messages in a conversation that can be remembered. (This will increase the number of tokens used)")] [Comment(@"The maximum number of messages in a conversation that can be remembered. (This will increase the number of tokens used)")]
public int ChatHistory { get; set; } = 5; public int ChatHistory { get; set; } = 5;
[Comment(@"The maximum number of tokens to use per GPT-3 API call")] [Comment(@"The maximum number of tokens to use per GPT API call")]
public int MaxTokens { get; set; } = 100; public int MaxTokens { get; set; } = 100;
[Comment(@"The minimum number of tokens to use per GPT-3 API call, such that chat history is removed to make room.")] [Comment(@"The minimum number of tokens to use per GPT API call, such that chat history is removed to make room.")]
public int MinTokens { get; set; } = 30; public int MinTokens { get; set; } = 30;
} }
@ -163,12 +163,18 @@ public sealed partial class RaceAnimal
public enum ChatBotImplementation public enum ChatBotImplementation
{ {
Cleverbot, Cleverbot,
Gpt3 Gpt = 1,
[Obsolete]
Gpt3 = 1,
} }
public enum ChatGptModel public enum ChatGptModel
{ {
Gpt35Turbo, [Obsolete]
Gpt4, Gpt4,
Gpt432k [Obsolete]
Gpt432k,
Gpt35Turbo,
Gpt4o,
} }

View file

@ -73,15 +73,6 @@ public sealed class GamesConfigService : ConfigServiceBase<GamesConfig>
}); });
} }
if (data.Version < 2)
{
ModifyConfig(c =>
{
c.Version = 2;
c.ChatBot = ChatBotImplementation.Cleverbot;
});
}
if (data.Version < 3) if (data.Version < 3)
{ {
ModifyConfig(c => ModifyConfig(c =>
@ -90,5 +81,19 @@ public sealed class GamesConfigService : ConfigServiceBase<GamesConfig>
c.ChatGpt.ModelName = ChatGptModel.Gpt35Turbo; c.ChatGpt.ModelName = ChatGptModel.Gpt35Turbo;
}); });
} }
if (data.Version < 4)
{
ModifyConfig(c =>
{
c.Version = 4;
#pragma warning disable CS0612 // Type or member is obsolete
c.ChatGpt.ModelName =
c.ChatGpt.ModelName == ChatGptModel.Gpt4 || c.ChatGpt.ModelName == ChatGptModel.Gpt432k
? ChatGptModel.Gpt4o
: c.ChatGpt.ModelName;
#pragma warning restore CS0612 // Type or member is obsolete
});
}
} }
} }