2025-04-09 14:33:20 +08:00
|
|
|
|
using Confluent.Kafka;
|
2025-04-17 18:08:27 +08:00
|
|
|
|
using JiShe.CollectBus.Common.Consts;
|
2025-04-09 14:33:20 +08:00
|
|
|
|
using Microsoft.Extensions.Configuration;
|
|
|
|
|
|
using Microsoft.Extensions.Logging;
|
2025-04-17 13:54:18 +08:00
|
|
|
|
using Microsoft.Extensions.Options;
|
2025-04-15 15:49:22 +08:00
|
|
|
|
using System.Collections.Concurrent;
|
2025-04-16 18:26:25 +08:00
|
|
|
|
using System.Text;
|
2025-04-09 14:33:20 +08:00
|
|
|
|
|
|
|
|
|
|
namespace JiShe.CollectBus.Kafka.Consumer
|
|
|
|
|
|
{
|
2025-04-15 15:49:22 +08:00
|
|
|
|
public class ConsumerService : IConsumerService, IDisposable
|
2025-04-09 14:33:20 +08:00
|
|
|
|
{
|
2025-04-15 15:49:22 +08:00
|
|
|
|
private readonly ILogger<ConsumerService> _logger;
|
|
|
|
|
|
private readonly IConfiguration _configuration;
|
|
|
|
|
|
private readonly ConcurrentDictionary<Type, (object Consumer, CancellationTokenSource CTS)>
|
|
|
|
|
|
_consumerStore = new();
|
2025-04-17 11:42:35 +08:00
|
|
|
|
private readonly KafkaOptionConfig _kafkaOptionConfig;
|
2025-04-16 18:26:25 +08:00
|
|
|
|
private class KafkaConsumer<TKey, TValue> where TKey : notnull where TValue : class { }
|
2025-04-09 14:33:20 +08:00
|
|
|
|
|
2025-04-17 13:54:18 +08:00
|
|
|
|
public ConsumerService(IConfiguration configuration, ILogger<ConsumerService> logger, IOptions<KafkaOptionConfig> kafkaOptionConfig)
|
2025-04-09 14:33:20 +08:00
|
|
|
|
{
|
2025-04-15 15:49:22 +08:00
|
|
|
|
_configuration = configuration;
|
2025-04-09 14:33:20 +08:00
|
|
|
|
_logger = logger;
|
2025-04-17 13:54:18 +08:00
|
|
|
|
_kafkaOptionConfig = kafkaOptionConfig.Value;
|
2025-04-12 15:11:18 +08:00
|
|
|
|
}
|
2025-04-09 14:33:20 +08:00
|
|
|
|
|
2025-04-15 15:49:22 +08:00
|
|
|
|
#region private 私有方法
|
2025-04-09 14:33:20 +08:00
|
|
|
|
|
2025-04-15 15:49:22 +08:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// 创建消费者
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <typeparam name="TKey"></typeparam>
|
|
|
|
|
|
/// <typeparam name="TValue"></typeparam>
|
|
|
|
|
|
/// <returns></returns>
|
|
|
|
|
|
private IConsumer<TKey, TValue> CreateConsumer<TKey, TValue>(string? groupId = null) where TKey : notnull where TValue : class
|
|
|
|
|
|
{
|
|
|
|
|
|
var config = BuildConsumerConfig(groupId);
|
|
|
|
|
|
return new ConsumerBuilder<TKey, TValue>(config)
|
2025-04-16 18:26:25 +08:00
|
|
|
|
.SetValueDeserializer(new JsonSerializer<TValue>())
|
2025-04-16 09:54:21 +08:00
|
|
|
|
.SetLogHandler((_, log) => _logger.LogInformation($"消费者Log: {log.Message}"))
|
2025-04-15 15:49:22 +08:00
|
|
|
|
.SetErrorHandler((_, e) => _logger.LogError($"消费者错误: {e.Reason}"))
|
|
|
|
|
|
.Build();
|
|
|
|
|
|
}
|
2025-04-09 14:33:20 +08:00
|
|
|
|
|
2025-04-15 15:49:22 +08:00
|
|
|
|
private ConsumerConfig BuildConsumerConfig(string? groupId = null)
|
2025-04-09 14:33:20 +08:00
|
|
|
|
{
|
2025-04-15 15:49:22 +08:00
|
|
|
|
var config = new ConsumerConfig
|
2025-04-12 15:11:18 +08:00
|
|
|
|
{
|
2025-04-17 11:42:35 +08:00
|
|
|
|
BootstrapServers = _kafkaOptionConfig.BootstrapServers,
|
2025-04-15 15:49:22 +08:00
|
|
|
|
GroupId = groupId ?? "default",
|
2025-04-14 19:10:27 +08:00
|
|
|
|
AutoOffsetReset = AutoOffsetReset.Earliest,
|
2025-04-16 09:54:21 +08:00
|
|
|
|
EnableAutoCommit = false, // 禁止AutoCommit
|
|
|
|
|
|
EnablePartitionEof = true, // 启用分区末尾标记
|
2025-04-17 18:08:27 +08:00
|
|
|
|
//AllowAutoCreateTopics = true, // 启用自动创建
|
2025-04-16 18:26:25 +08:00
|
|
|
|
FetchMaxBytes = 1024 * 1024 * 50 // 增加拉取大小(50MB)
|
2025-04-12 15:11:18 +08:00
|
|
|
|
};
|
2025-04-09 14:33:20 +08:00
|
|
|
|
|
2025-04-17 11:42:35 +08:00
|
|
|
|
if (_kafkaOptionConfig.EnableAuthorization)
|
2025-04-12 15:11:18 +08:00
|
|
|
|
{
|
2025-04-17 11:42:35 +08:00
|
|
|
|
config.SecurityProtocol = _kafkaOptionConfig.SecurityProtocol;
|
|
|
|
|
|
config.SaslMechanism = _kafkaOptionConfig.SaslMechanism;
|
|
|
|
|
|
config.SaslUsername = _kafkaOptionConfig.SaslUserName;
|
|
|
|
|
|
config.SaslPassword = _kafkaOptionConfig.SaslPassword;
|
2025-04-12 15:11:18 +08:00
|
|
|
|
}
|
2025-04-15 15:49:22 +08:00
|
|
|
|
|
|
|
|
|
|
return config;
|
|
|
|
|
|
}
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// 订阅消息
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <typeparam name="TKey"></typeparam>
|
|
|
|
|
|
/// <typeparam name="TValue"></typeparam>
|
|
|
|
|
|
/// <param name="topic"></param>
|
|
|
|
|
|
/// <param name="messageHandler"></param>
|
|
|
|
|
|
/// <returns></returns>
|
|
|
|
|
|
public async Task SubscribeAsync<TKey, TValue>(string topic, Func<TKey, TValue, Task<bool>> messageHandler, string? groupId = null) where TKey : notnull where TValue : class
|
|
|
|
|
|
{
|
|
|
|
|
|
await SubscribeAsync<TKey, TValue>(new[] { topic }, messageHandler, groupId);
|
2025-04-09 14:33:20 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-04-15 15:49:22 +08:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// 订阅消息
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <typeparam name="TValue"></typeparam>
|
|
|
|
|
|
/// <param name="topic"></param>
|
|
|
|
|
|
/// <param name="messageHandler"></param>
|
|
|
|
|
|
/// <returns></returns>
|
|
|
|
|
|
public async Task SubscribeAsync<TValue>(string topic, Func<TValue, Task<bool>> messageHandler, string? groupId = null) where TValue : class
|
|
|
|
|
|
{
|
|
|
|
|
|
await SubscribeAsync<TValue>(new[] { topic }, messageHandler,groupId);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// 订阅消息
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <typeparam name="TKey"></typeparam>
|
|
|
|
|
|
/// <typeparam name="TValue"></typeparam>
|
|
|
|
|
|
/// <param name="topics"></param>
|
|
|
|
|
|
/// <param name="messageHandler"></param>
|
|
|
|
|
|
/// <returns></returns>
|
|
|
|
|
|
public async Task SubscribeAsync<TKey, TValue>(string[] topics, Func<TKey, TValue, Task<bool>> messageHandler, string? groupId = null) where TKey : notnull where TValue : class
|
2025-04-09 14:33:20 +08:00
|
|
|
|
{
|
2025-04-16 18:26:25 +08:00
|
|
|
|
var consumerKey = typeof(KafkaConsumer<TKey, TValue>);
|
2025-04-15 15:49:22 +08:00
|
|
|
|
var cts = new CancellationTokenSource();
|
|
|
|
|
|
|
|
|
|
|
|
var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
|
|
|
|
|
|
(
|
|
|
|
|
|
CreateConsumer<TKey, TValue>(groupId),
|
|
|
|
|
|
cts
|
|
|
|
|
|
)).Consumer as IConsumer<TKey, TValue>;
|
2025-04-12 15:11:18 +08:00
|
|
|
|
|
2025-04-15 15:49:22 +08:00
|
|
|
|
consumer!.Subscribe(topics);
|
|
|
|
|
|
|
2025-04-17 22:21:34 +08:00
|
|
|
|
await Task.Run(async () =>
|
2025-04-12 15:11:18 +08:00
|
|
|
|
{
|
2025-04-15 15:49:22 +08:00
|
|
|
|
while (!cts.IsCancellationRequested)
|
2025-04-12 15:11:18 +08:00
|
|
|
|
{
|
2025-04-15 15:49:22 +08:00
|
|
|
|
try
|
2025-04-12 15:11:18 +08:00
|
|
|
|
{
|
2025-04-17 22:21:34 +08:00
|
|
|
|
//_logger.LogInformation($"Kafka消费: {string.Join("", topics)} 开始拉取消息....");
|
|
|
|
|
|
|
2025-04-15 15:49:22 +08:00
|
|
|
|
var result = consumer.Consume(cts.Token);
|
2025-04-16 18:26:25 +08:00
|
|
|
|
if (result == null || result.Message==null || result.Message.Value == null)
|
|
|
|
|
|
continue;
|
2025-04-17 22:21:34 +08:00
|
|
|
|
|
2025-04-16 09:54:21 +08:00
|
|
|
|
if (result.IsPartitionEOF)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
|
2025-04-16 18:26:25 +08:00
|
|
|
|
await Task.Delay(TimeSpan.FromSeconds(1),cts.Token);
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
2025-04-17 11:42:35 +08:00
|
|
|
|
if (_kafkaOptionConfig.EnableFilter)
|
2025-04-16 18:26:25 +08:00
|
|
|
|
{
|
2025-04-17 11:42:35 +08:00
|
|
|
|
var headersFilter = new HeadersFilter { { "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) } };
|
2025-04-16 20:41:52 +08:00
|
|
|
|
// 检查 Header 是否符合条件
|
|
|
|
|
|
if (!headersFilter.Match(result.Message.Headers))
|
|
|
|
|
|
{
|
|
|
|
|
|
//consumer.Commit(result); // 提交偏移量
|
|
|
|
|
|
// 跳过消息
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
2025-04-16 09:54:21 +08:00
|
|
|
|
}
|
2025-04-15 15:49:22 +08:00
|
|
|
|
bool sucess= await messageHandler(result.Message.Key, result.Message.Value);
|
|
|
|
|
|
if (sucess)
|
|
|
|
|
|
{
|
|
|
|
|
|
consumer.Commit(result); // 手动提交
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (ConsumeException ex)
|
|
|
|
|
|
{
|
2025-04-17 18:08:27 +08:00
|
|
|
|
_logger.LogError(ex, $"{string.Join("、", topics)}消息消费失败: {ex.Error.Reason}");
|
2025-04-12 15:11:18 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-04-15 15:49:22 +08:00
|
|
|
|
});
|
|
|
|
|
|
await Task.CompletedTask;
|
2025-04-09 14:33:20 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-04-15 15:49:22 +08:00
|
|
|
|
|
|
|
|
|
|
|
2025-04-15 11:15:22 +08:00
|
|
|
|
/// <summary>
|
2025-04-15 15:49:22 +08:00
|
|
|
|
/// 订阅消息
|
2025-04-15 11:15:22 +08:00
|
|
|
|
/// </summary>
|
2025-04-15 15:49:22 +08:00
|
|
|
|
/// <typeparam name="TKey"></typeparam>
|
|
|
|
|
|
/// <typeparam name="TValue"></typeparam>
|
2025-04-15 11:15:22 +08:00
|
|
|
|
/// <param name="topics"></param>
|
|
|
|
|
|
/// <param name="messageHandler"></param>
|
|
|
|
|
|
/// <returns></returns>
|
2025-04-15 15:49:22 +08:00
|
|
|
|
public async Task SubscribeAsync<TValue>(string[] topics, Func<TValue, Task<bool>> messageHandler, string? groupId) where TValue : class
|
2025-04-15 11:15:22 +08:00
|
|
|
|
{
|
2025-04-17 18:08:27 +08:00
|
|
|
|
var consumerKey = typeof(KafkaConsumer<string, TValue>);
|
2025-04-15 15:49:22 +08:00
|
|
|
|
var cts = new CancellationTokenSource();
|
2025-04-17 19:41:09 +08:00
|
|
|
|
//if (topics.Contains(ProtocolConst.SubscriberLoginReceivedEventName))
|
|
|
|
|
|
//{
|
|
|
|
|
|
// string ssss = "";
|
|
|
|
|
|
//}
|
2025-04-15 15:49:22 +08:00
|
|
|
|
var consumer = _consumerStore.GetOrAdd(consumerKey, _=>
|
|
|
|
|
|
(
|
2025-04-17 18:08:27 +08:00
|
|
|
|
CreateConsumer<string, TValue>(groupId),
|
2025-04-15 15:49:22 +08:00
|
|
|
|
cts
|
2025-04-17 18:08:27 +08:00
|
|
|
|
)).Consumer as IConsumer<string, TValue>;
|
2025-04-15 15:49:22 +08:00
|
|
|
|
|
2025-04-17 19:41:09 +08:00
|
|
|
|
|
2025-04-15 15:49:22 +08:00
|
|
|
|
consumer!.Subscribe(topics);
|
|
|
|
|
|
|
2025-04-17 22:21:34 +08:00
|
|
|
|
await Task.Run(async () =>
|
2025-04-15 11:15:22 +08:00
|
|
|
|
{
|
2025-04-15 15:49:22 +08:00
|
|
|
|
while (!cts.IsCancellationRequested)
|
2025-04-15 11:15:22 +08:00
|
|
|
|
{
|
2025-04-15 15:49:22 +08:00
|
|
|
|
try
|
2025-04-15 11:15:22 +08:00
|
|
|
|
{
|
2025-04-17 22:21:34 +08:00
|
|
|
|
//_logger.LogInformation($"Kafka消费: {string.Join("", topics)} 开始拉取消息....");
|
2025-04-15 15:49:22 +08:00
|
|
|
|
var result = consumer.Consume(cts.Token);
|
2025-04-16 18:26:25 +08:00
|
|
|
|
if (result == null || result.Message==null || result.Message.Value == null)
|
|
|
|
|
|
continue;
|
2025-04-17 22:21:34 +08:00
|
|
|
|
|
2025-04-16 09:54:21 +08:00
|
|
|
|
if (result.IsPartitionEOF)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
|
2025-04-16 18:26:25 +08:00
|
|
|
|
await Task.Delay(100, cts.Token);
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
2025-04-17 11:42:35 +08:00
|
|
|
|
if (_kafkaOptionConfig.EnableFilter)
|
2025-04-16 18:26:25 +08:00
|
|
|
|
{
|
2025-04-17 11:42:35 +08:00
|
|
|
|
var headersFilter = new HeadersFilter { { "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) } };
|
2025-04-16 20:41:52 +08:00
|
|
|
|
// 检查 Header 是否符合条件
|
|
|
|
|
|
if (!headersFilter.Match(result.Message.Headers))
|
|
|
|
|
|
{
|
|
|
|
|
|
//consumer.Commit(result); // 提交偏移量
|
|
|
|
|
|
// 跳过消息
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
2025-04-16 09:54:21 +08:00
|
|
|
|
}
|
2025-04-15 15:49:22 +08:00
|
|
|
|
bool sucess = await messageHandler(result.Message.Value);
|
|
|
|
|
|
if (sucess)
|
|
|
|
|
|
consumer.Commit(result); // 手动提交
|
2025-04-17 18:08:27 +08:00
|
|
|
|
else
|
|
|
|
|
|
consumer.StoreOffset(result);
|
2025-04-15 15:49:22 +08:00
|
|
|
|
}
|
|
|
|
|
|
catch (ConsumeException ex)
|
|
|
|
|
|
{
|
2025-04-17 18:08:27 +08:00
|
|
|
|
_logger.LogError(ex, $"{string.Join("、", topics)}消息消费失败: {ex.Error.Reason}");
|
2025-04-15 11:15:22 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-04-15 15:49:22 +08:00
|
|
|
|
});
|
|
|
|
|
|
await Task.CompletedTask;
|
2025-04-15 11:15:22 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-04-16 18:26:25 +08:00
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// 批量订阅消息
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <typeparam name="TKey">消息Key类型</typeparam>
|
|
|
|
|
|
/// <typeparam name="TValue">消息Value类型</typeparam>
|
|
|
|
|
|
/// <param name="topic">主题</param>
|
|
|
|
|
|
/// <param name="messageBatchHandler">批量消息处理函数</param>
|
|
|
|
|
|
/// <param name="groupId">消费组ID</param>
|
|
|
|
|
|
/// <param name="batchSize">批次大小</param>
|
|
|
|
|
|
/// <param name="batchTimeout">批次超时时间</param>
|
|
|
|
|
|
public async Task SubscribeBatchAsync<TKey, TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class
|
|
|
|
|
|
{
|
|
|
|
|
|
await SubscribeBatchAsync<TKey, TValue>(new[] { topic }, messageBatchHandler, groupId, batchSize, batchTimeout);
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// 批量订阅消息
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <typeparam name="TKey">消息Key类型</typeparam>
|
|
|
|
|
|
/// <typeparam name="TValue">消息Value类型</typeparam>
|
|
|
|
|
|
/// <param name="topics">主题列表</param>
|
|
|
|
|
|
/// <param name="messageBatchHandler">批量消息处理函数</param>
|
|
|
|
|
|
/// <param name="groupId">消费组ID</param>
|
|
|
|
|
|
/// <param name="batchSize">批次大小</param>
|
|
|
|
|
|
/// <param name="batchTimeout">批次超时时间</param>
|
|
|
|
|
|
public async Task SubscribeBatchAsync<TKey, TValue>(string[] topics,Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null,int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class
|
|
|
|
|
|
{
|
|
|
|
|
|
var consumerKey = typeof(KafkaConsumer<TKey, TValue>);
|
|
|
|
|
|
var cts = new CancellationTokenSource();
|
|
|
|
|
|
|
|
|
|
|
|
var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
|
|
|
|
|
|
(
|
|
|
|
|
|
CreateConsumer<TKey, TValue>(groupId),
|
|
|
|
|
|
cts
|
|
|
|
|
|
)).Consumer as IConsumer<TKey, TValue>;
|
|
|
|
|
|
|
|
|
|
|
|
consumer!.Subscribe(topics);
|
|
|
|
|
|
|
|
|
|
|
|
var timeout = batchTimeout ?? TimeSpan.FromSeconds(5); // 默认超时时间调整为5秒
|
|
|
|
|
|
|
|
|
|
|
|
_ = Task.Run(async () =>
|
|
|
|
|
|
{
|
|
|
|
|
|
var messages = new List<(TValue Value, TopicPartitionOffset Offset)>();
|
|
|
|
|
|
var startTime = DateTime.UtcNow;
|
|
|
|
|
|
|
|
|
|
|
|
while (!cts.IsCancellationRequested)
|
|
|
|
|
|
{
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
// 非阻塞快速累积消息
|
|
|
|
|
|
while (messages.Count < batchSize && (DateTime.UtcNow - startTime) < timeout)
|
|
|
|
|
|
{
|
|
|
|
|
|
var result = consumer.Consume(TimeSpan.Zero); // 非阻塞调用
|
|
|
|
|
|
|
|
|
|
|
|
if (result != null)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (result.IsPartitionEOF)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
|
|
|
|
|
|
await Task.Delay(TimeSpan.FromSeconds(1), cts.Token);
|
|
|
|
|
|
}
|
|
|
|
|
|
else if (result.Message.Value != null)
|
|
|
|
|
|
{
|
2025-04-17 11:42:35 +08:00
|
|
|
|
if (_kafkaOptionConfig.EnableFilter)
|
2025-04-16 18:26:25 +08:00
|
|
|
|
{
|
2025-04-17 11:42:35 +08:00
|
|
|
|
var headersFilter = new HeadersFilter { { "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) } };
|
2025-04-16 20:41:52 +08:00
|
|
|
|
// 检查 Header 是否符合条件
|
|
|
|
|
|
if (!headersFilter.Match(result.Message.Headers))
|
|
|
|
|
|
{
|
|
|
|
|
|
//consumer.Commit(result); // 提交偏移量
|
|
|
|
|
|
// 跳过消息
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
2025-04-16 18:26:25 +08:00
|
|
|
|
}
|
|
|
|
|
|
messages.Add((result.Message.Value, result.TopicPartitionOffset));
|
|
|
|
|
|
//messages.Add(result.Message.Value);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
// 无消息时短暂等待
|
|
|
|
|
|
await Task.Delay(10, cts.Token);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 处理批次
|
|
|
|
|
|
if (messages.Count > 0)
|
|
|
|
|
|
{
|
|
|
|
|
|
bool success = await messageBatchHandler(messages.Select(m => m.Value));
|
|
|
|
|
|
if (success)
|
|
|
|
|
|
{
|
|
|
|
|
|
var offsetsByPartition = new Dictionary<TopicPartition, long>();
|
|
|
|
|
|
foreach (var msg in messages)
|
|
|
|
|
|
{
|
|
|
|
|
|
var tp = msg.Offset.TopicPartition;
|
|
|
|
|
|
var offset = msg.Offset.Offset;
|
|
|
|
|
|
if (!offsetsByPartition.TryGetValue(tp, out var currentMax) || offset > currentMax)
|
|
|
|
|
|
{
|
|
|
|
|
|
offsetsByPartition[tp] = offset;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var offsetsToCommit = offsetsByPartition
|
|
|
|
|
|
.Select(kv => new TopicPartitionOffset(kv.Key, new Offset(kv.Value + 1)))
|
|
|
|
|
|
.ToList();
|
|
|
|
|
|
consumer.Commit(offsetsToCommit);
|
|
|
|
|
|
}
|
|
|
|
|
|
messages.Clear();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
startTime = DateTime.UtcNow;
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (ConsumeException ex)
|
|
|
|
|
|
{
|
2025-04-17 18:08:27 +08:00
|
|
|
|
_logger.LogError(ex, $"{string.Join("、", topics)} 消息消费失败: {ex.Error.Reason}");
|
2025-04-16 18:26:25 +08:00
|
|
|
|
}
|
|
|
|
|
|
catch (OperationCanceledException)
|
|
|
|
|
|
{
|
|
|
|
|
|
// 任务取消,正常退出
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogError(ex, "处理批量消息时发生未知错误");
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}, cts.Token);
|
|
|
|
|
|
|
|
|
|
|
|
await Task.CompletedTask;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// 批量订阅消息
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <typeparam name="TValue">消息Value类型</typeparam>
|
|
|
|
|
|
/// <param name="topic">主题列表</param>
|
|
|
|
|
|
/// <param name="messageBatchHandler">批量消息处理函数</param>
|
|
|
|
|
|
/// <param name="groupId">消费组ID</param>
|
|
|
|
|
|
/// <param name="batchSize">批次大小</param>
|
|
|
|
|
|
/// <param name="batchTimeout">批次超时时间</param>
|
|
|
|
|
|
/// <param name="consumeTimeout">消费等待时间</param>
|
|
|
|
|
|
public async Task SubscribeBatchAsync<TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class
|
|
|
|
|
|
{
|
|
|
|
|
|
await SubscribeBatchAsync<TValue>(new[] { topic }, messageBatchHandler, groupId, batchSize, batchTimeout, consumeTimeout);
|
|
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// 批量订阅消息
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <typeparam name="TValue">消息Value类型</typeparam>
|
|
|
|
|
|
/// <param name="topics">主题列表</param>
|
|
|
|
|
|
/// <param name="messageBatchHandler">批量消息处理函数</param>
|
|
|
|
|
|
/// <param name="groupId">消费组ID</param>
|
|
|
|
|
|
/// <param name="batchSize">批次大小</param>
|
|
|
|
|
|
/// <param name="batchTimeout">批次超时时间</param>
|
|
|
|
|
|
/// <param name="consumeTimeout">消费等待时间</param>
|
|
|
|
|
|
public async Task SubscribeBatchAsync<TValue>(string[] topics,Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100,TimeSpan? batchTimeout = null,TimeSpan? consumeTimeout = null)where TValue : class
|
|
|
|
|
|
{
|
2025-04-17 18:08:27 +08:00
|
|
|
|
var consumerKey = typeof(KafkaConsumer<string, TValue>);
|
2025-04-16 18:26:25 +08:00
|
|
|
|
var cts = new CancellationTokenSource();
|
|
|
|
|
|
|
|
|
|
|
|
var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
|
|
|
|
|
|
(
|
2025-04-17 18:08:27 +08:00
|
|
|
|
CreateConsumer<string, TValue>(groupId),
|
2025-04-16 18:26:25 +08:00
|
|
|
|
cts
|
2025-04-17 18:08:27 +08:00
|
|
|
|
)).Consumer as IConsumer<string, TValue>;
|
2025-04-16 18:26:25 +08:00
|
|
|
|
|
|
|
|
|
|
consumer!.Subscribe(topics);
|
|
|
|
|
|
|
|
|
|
|
|
var timeout = batchTimeout ?? TimeSpan.FromSeconds(5); // 默认超时时间调整为5秒
|
|
|
|
|
|
|
|
|
|
|
|
_ = Task.Run(async () =>
|
|
|
|
|
|
{
|
|
|
|
|
|
var messages = new List<(TValue Value, TopicPartitionOffset Offset)>();
|
|
|
|
|
|
//var messages = new List<ConsumeResult<TKey, TValue>>();
|
|
|
|
|
|
var startTime = DateTime.UtcNow;
|
|
|
|
|
|
|
|
|
|
|
|
while (!cts.IsCancellationRequested)
|
|
|
|
|
|
{
|
|
|
|
|
|
try
|
|
|
|
|
|
{
|
|
|
|
|
|
// 非阻塞快速累积消息
|
|
|
|
|
|
while (messages.Count < batchSize && (DateTime.UtcNow - startTime) < timeout)
|
|
|
|
|
|
{
|
|
|
|
|
|
var result = consumer.Consume(TimeSpan.Zero); // 非阻塞调用
|
|
|
|
|
|
|
|
|
|
|
|
if (result != null)
|
|
|
|
|
|
{
|
|
|
|
|
|
if (result.IsPartitionEOF)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
|
|
|
|
|
|
await Task.Delay(TimeSpan.FromSeconds(1), cts.Token);
|
|
|
|
|
|
}
|
|
|
|
|
|
else if (result.Message.Value != null)
|
|
|
|
|
|
{
|
2025-04-17 11:42:35 +08:00
|
|
|
|
if (_kafkaOptionConfig.EnableFilter)
|
2025-04-16 18:26:25 +08:00
|
|
|
|
{
|
2025-04-17 11:42:35 +08:00
|
|
|
|
var headersFilter = new HeadersFilter { { "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) } };
|
2025-04-16 20:41:52 +08:00
|
|
|
|
// 检查 Header 是否符合条件
|
|
|
|
|
|
if (!headersFilter.Match(result.Message.Headers))
|
|
|
|
|
|
{
|
|
|
|
|
|
//consumer.Commit(result); // 提交偏移量
|
|
|
|
|
|
// 跳过消息
|
|
|
|
|
|
continue;
|
|
|
|
|
|
}
|
2025-04-16 18:26:25 +08:00
|
|
|
|
}
|
|
|
|
|
|
messages.Add((result.Message.Value, result.TopicPartitionOffset));
|
|
|
|
|
|
//messages.Add(result.Message.Value);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
else
|
|
|
|
|
|
{
|
|
|
|
|
|
// 无消息时短暂等待
|
|
|
|
|
|
await Task.Delay(10, cts.Token);
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 处理批次
|
|
|
|
|
|
if (messages.Count > 0)
|
|
|
|
|
|
{
|
|
|
|
|
|
bool success = await messageBatchHandler(messages.Select(m => m.Value));
|
|
|
|
|
|
if (success)
|
|
|
|
|
|
{
|
|
|
|
|
|
var offsetsByPartition = new Dictionary<TopicPartition, long>();
|
|
|
|
|
|
foreach (var msg in messages)
|
|
|
|
|
|
{
|
|
|
|
|
|
var tp = msg.Offset.TopicPartition;
|
|
|
|
|
|
var offset = msg.Offset.Offset;
|
|
|
|
|
|
if (!offsetsByPartition.TryGetValue(tp, out var currentMax) || offset > currentMax)
|
|
|
|
|
|
{
|
|
|
|
|
|
offsetsByPartition[tp] = offset;
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var offsetsToCommit = offsetsByPartition
|
|
|
|
|
|
.Select(kv => new TopicPartitionOffset(kv.Key, new Offset(kv.Value + 1)))
|
|
|
|
|
|
.ToList();
|
|
|
|
|
|
consumer.Commit(offsetsToCommit);
|
|
|
|
|
|
}
|
|
|
|
|
|
messages.Clear();
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
startTime = DateTime.UtcNow;
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (ConsumeException ex)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogError(ex, $"消息消费失败: {ex.Error.Reason}");
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (OperationCanceledException)
|
|
|
|
|
|
{
|
|
|
|
|
|
// 任务取消,正常退出
|
|
|
|
|
|
}
|
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
|
{
|
|
|
|
|
|
_logger.LogError(ex, "处理批量消息时发生未知错误");
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}, cts.Token);
|
|
|
|
|
|
|
|
|
|
|
|
await Task.CompletedTask;
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
2025-04-15 15:49:22 +08:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// 取消消息订阅
|
|
|
|
|
|
/// </summary>
|
|
|
|
|
|
/// <typeparam name="TKey"></typeparam>
|
|
|
|
|
|
/// <typeparam name="TValue"></typeparam>
|
|
|
|
|
|
public void Unsubscribe<TKey, TValue>() where TKey : notnull where TValue : class
|
2025-04-09 14:33:20 +08:00
|
|
|
|
{
|
2025-04-15 15:49:22 +08:00
|
|
|
|
var consumerKey = typeof((TKey, TValue));
|
|
|
|
|
|
if (_consumerStore.TryRemove(consumerKey, out var entry))
|
|
|
|
|
|
{
|
|
|
|
|
|
entry.CTS.Cancel();
|
|
|
|
|
|
(entry.Consumer as IDisposable)?.Dispose();
|
|
|
|
|
|
entry.CTS.Dispose();
|
|
|
|
|
|
}
|
2025-04-09 14:33:20 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-04-15 15:49:22 +08:00
|
|
|
|
/// <summary>
|
|
|
|
|
|
/// 释放资源
|
|
|
|
|
|
/// </summary>
|
2025-04-09 14:33:20 +08:00
|
|
|
|
public void Dispose()
|
|
|
|
|
|
{
|
2025-04-15 15:49:22 +08:00
|
|
|
|
foreach (var entry in _consumerStore.Values)
|
|
|
|
|
|
{
|
|
|
|
|
|
entry.CTS.Cancel();
|
|
|
|
|
|
(entry.Consumer as IDisposable)?.Dispose();
|
|
|
|
|
|
entry.CTS.Dispose();
|
|
|
|
|
|
}
|
|
|
|
|
|
_consumerStore.Clear();
|
2025-04-09 14:33:20 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|