From 804d9188e79df57fc2d298107f9f1260c83b1ea1 Mon Sep 17 00:00:00 2001 From: BinkanSalaryman Date: Sun, 7 Jan 2018 04:33:00 +0100 Subject: [PATCH] Fix flawed bulk message deletion (#872) * Fix flawed bulk message deletion https://github.com/RogueException/Discord.Net/issues/871, consider changing DeleteMessagesParams.MessageIds type to I(Readonly)List or IEnumerable to avoid unnecessary copying (batch.ToArray()) * Update code formatting --- .../Entities/Channels/ChannelHelper.cs | 28 +++++++++++-------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/src/Discord.Net.Rest/Entities/Channels/ChannelHelper.cs b/src/Discord.Net.Rest/Entities/Channels/ChannelHelper.cs index ad5029785..585e0a354 100644 --- a/src/Discord.Net.Rest/Entities/Channels/ChannelHelper.cs +++ b/src/Discord.Net.Rest/Entities/Channels/ChannelHelper.cs @@ -188,21 +188,27 @@ namespace Discord.Rest public static async Task DeleteMessagesAsync(ITextChannel channel, BaseDiscordClient client, IEnumerable messageIds, RequestOptions options) { + const int BATCH_SIZE = 100; + var msgs = messageIds.ToArray(); - if (msgs.Length < 100) - { - var args = new DeleteMessagesParams(msgs); - await client.ApiClient.DeleteMessagesAsync(channel.Id, args, options).ConfigureAwait(false); - } - else + int batches = msgs.Length / BATCH_SIZE; + for (int i = 0; i <= batches; i++) { - var batch = new ulong[100]; - for (int i = 0; i < (msgs.Length + 99) / 100; i++) + ArraySegment batch; + if (i < batches) { - Array.Copy(msgs, i * 100, batch, 0, Math.Min(msgs.Length - (100 * i), 100)); - var args = new DeleteMessagesParams(batch); - await client.ApiClient.DeleteMessagesAsync(channel.Id, args, options).ConfigureAwait(false); + batch = new ArraySegment(msgs, i * BATCH_SIZE, BATCH_SIZE); } + else + { + batch = new ArraySegment(msgs, i * BATCH_SIZE, msgs.Length - batches * BATCH_SIZE); + if (batch.Count == 0) + { + break; + } + } + var args = new DeleteMessagesParams(batch.ToArray()); + await client.ApiClient.DeleteMessagesAsync(channel.Id, args, options).ConfigureAwait(false); } }