合并冲突

This commit is contained in:
cli 2025-04-17 16:21:42 +08:00
commit 76ae008e01
54 changed files with 3512 additions and 806 deletions

View File

@ -26,6 +26,7 @@
<ProjectReference Include="..\JiShe.CollectBus.Common\JiShe.CollectBus.Common.csproj" />
<ProjectReference Include="..\JiShe.CollectBus.Domain.Shared\JiShe.CollectBus.Domain.Shared.csproj" />
<ProjectReference Include="..\JiShe.CollectBus.Domain\JiShe.CollectBus.Domain.csproj" />
<ProjectReference Include="..\JiShe.CollectBus.KafkaProducer\JiShe.CollectBus.Kafka.csproj" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,187 @@
using JiShe.CollectBus.Common.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace JiShe.CollectBus.Application.Contracts
{
/// <summary>
/// 数据缓存服务接口
/// </summary>
public interface IRedisDataCacheService
{
/// <summary>
/// 单个添加数据
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
/// <param name="data">待缓存数据</param>
/// <returns></returns>
Task InsertDataAsync<T>(
string redisHashCacheKey,
string redisSetIndexCacheKey,
string redisZSetScoresIndexCacheKey,
T data) where T : DeviceCacheBasicModel;
/// <summary>
/// 批量添加数据
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
/// <param name="items">待缓存数据集合</param>
/// <returns></returns>
Task BatchInsertDataAsync<T>(
string redisHashCacheKey,
string redisSetIndexCacheKey,
string redisZSetScoresIndexCacheKey,
IEnumerable<T> items) where T : DeviceCacheBasicModel;
/// <summary>
/// 删除缓存信息
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
/// <param name="data">已缓存数据</param>
/// <returns></returns>
Task RemoveCacheDataAsync<T>(
string redisHashCacheKey,
string redisSetIndexCacheKey,
string redisZSetScoresIndexCacheKey,
T data) where T : DeviceCacheBasicModel;
/// <summary>
/// 修改缓存信息,映射关系未发生改变
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
/// <param name="newData">待修改缓存数据</param>
/// <returns></returns>
Task ModifyDataAsync<T>(
string redisHashCacheKey,
string redisSetIndexCacheKey,
string redisZSetScoresIndexCacheKey,
T newData) where T : DeviceCacheBasicModel;
/// <summary>
/// 修改缓存信息,映射关系已改变
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
/// <param name="oldMemberId">旧的映射关系</param>
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
/// <param name="newData">待修改缓存数据</param>
/// <returns></returns>
Task ModifyDataAsync<T>(
string redisHashCacheKey,
string redisSetIndexCacheKey,
string oldMemberId,
string redisZSetScoresIndexCacheKey,
T newData) where T : DeviceCacheBasicModel;
///// <summary>
///// 通过集中器与表计信息排序索引获取数据
///// </summary>
///// <typeparam name="T"></typeparam>
///// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
///// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
///// <param name="pageSize">分页尺寸</param>
///// <param name="lastScore">最后一个索引</param>
///// <param name="lastMember">最后一个唯一标识</param>
///// <param name="descending">排序方式</param>
///// <returns></returns>
//Task<BusCacheGlobalPagedResult<T>> GetPagedData<T>(
//string redisHashCacheKey,
//string redisZSetScoresIndexCacheKey,
//IEnumerable<int> focusIds,
//int pageSize = 10,
//decimal? lastScore = null,
//string lastMember = null,
//bool descending = true)
//where T : DeviceCacheBasicModel;
/// <summary>
/// 通过ZSET索引获取数据支持10万级别数据处理控制在13秒以内。
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
/// <param name="pageSize">分页尺寸</param>
/// <param name="lastScore">最后一个索引</param>
/// <param name="lastMember">最后一个唯一标识</param>
/// <param name="descending">排序方式</param>
/// <returns></returns>
Task<BusCacheGlobalPagedResult<T>> GetAllPagedData<T>(
string redisHashCacheKey,
string redisZSetScoresIndexCacheKey,
int pageSize = 1000,
decimal? lastScore = null,
string lastMember = null,
bool descending = true)
where T : DeviceCacheBasicModel;
/// <summary>
/// 优化后的分页获取方法(支持百万级数据)
/// </summary>
Task<BusCacheGlobalPagedResult<T>> GetAllPagedDataOptimized<T>(
string redisHashCacheKey,
string redisZSetScoresIndexCacheKey,
int pageSize = 1000,
decimal? lastScore = null,
string lastMember = null,
bool descending = true) where T : DeviceCacheBasicModel;
///// <summary>
///// 游标分页查询
///// </summary>
///// <param name="redisZSetScoresIndexCacheKey">排序索引ZSET缓存Key</param>
///// <param name="pageSize">分页数量</param>
///// <param name="startScore">开始索引</param>
///// <param name="excludeMember">开始唯一标识</param>
///// <param name="descending">排序方式</param>
///// <returns></returns>
//Task<(List<string> Members, bool HasNext)> GetPagedMembers(
// string redisZSetScoresIndexCacheKey,
// int pageSize,
// decimal? startScore,
// string excludeMember,
// bool descending);
///// <summary>
///// 批量获取指定分页的数据
///// </summary>
///// <typeparam name="T"></typeparam>
///// <param name="redisHashCacheKey">Hash表缓存key</param>
///// <param name="members">Hash表字段集合</param>
///// <returns></returns>
//Task<Dictionary<string, T>> BatchGetData<T>(
// string redisHashCacheKey,
// IEnumerable<string> members)
// where T : DeviceCacheBasicModel;
///// <summary>
///// 获取下一页游标
///// </summary>
///// <param name="redisZSetScoresIndexCacheKey">排序索引ZSET缓存Key</param>
///// <param name="lastMember">最后一个唯一标识</param>
///// <param name="descending">排序方式</param>
///// <returns></returns>
//Task<decimal?> GetNextScore(
// string redisZSetScoresIndexCacheKey,
// string lastMember,
// bool descending);
}
}

View File

@ -1,16 +1,17 @@
using System.Threading.Tasks;
using JiShe.CollectBus.Common.Models;
using JiShe.CollectBus.IotSystems.MessageReceiveds;
using JiShe.CollectBus.Kafka;
using Volo.Abp.Application.Services;
namespace JiShe.CollectBus.Subscribers
{
public interface ISubscriberAppService : IApplicationService
{
Task LoginIssuedEvent(IssuedEventMessage issuedEventMessage);
Task HeartbeatIssuedEvent(IssuedEventMessage issuedEventMessage);
Task ReceivedEvent(MessageReceived receivedMessage);
Task ReceivedHeartbeatEvent(MessageReceivedHeartbeat receivedHeartbeatMessage);
Task ReceivedLoginEvent(MessageReceivedLogin receivedLoginMessage);
Task<ISubscribeAck> LoginIssuedEvent(IssuedEventMessage issuedEventMessage);
Task<ISubscribeAck> HeartbeatIssuedEvent(IssuedEventMessage issuedEventMessage);
Task<ISubscribeAck> ReceivedEvent(MessageReceived receivedMessage);
Task<ISubscribeAck> ReceivedHeartbeatEvent(MessageReceivedHeartbeat receivedHeartbeatMessage);
Task<ISubscribeAck> ReceivedLoginEvent(MessageReceivedLogin receivedLoginMessage);
}
}

View File

@ -1,6 +1,7 @@
using JiShe.CollectBus.IotSystems.MessageIssueds;
using JiShe.CollectBus.IotSystems.MessageReceiveds;
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
using JiShe.CollectBus.Kafka;
using System.Collections.Generic;
using System.Threading.Tasks;
using Volo.Abp.Application.Services;
@ -19,19 +20,19 @@ namespace JiShe.CollectBus.Subscribers
/// 1分钟采集电表数据下行消息消费订阅
/// </summary>
/// <returns></returns>
Task AmmeterScheduledMeterOneMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage issuedEventMessage);
Task<ISubscribeAck> AmmeterScheduledMeterOneMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage issuedEventMessage);
/// <summary>
/// 5分钟采集电表数据下行消息消费订阅
/// </summary>
/// <returns></returns>
Task AmmeterScheduledMeterFiveMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage issuedEventMessage);
Task<ISubscribeAck> AmmeterScheduledMeterFiveMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage issuedEventMessage);
/// <summary>
/// 15分钟采集电表数据下行消息消费订阅
/// </summary>
/// <returns></returns>
Task AmmeterScheduledMeterFifteenMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage issuedEventMessage);
Task<ISubscribeAck> AmmeterScheduledMeterFifteenMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage issuedEventMessage);
#endregion
#region
@ -39,7 +40,7 @@ namespace JiShe.CollectBus.Subscribers
/// 1分钟采集水表数据下行消息消费订阅
/// </summary>
/// <returns></returns>
Task WatermeterSubscriberWorkerAutoReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage issuedEventMessage);
Task<ISubscribeAck> WatermeterSubscriberWorkerAutoReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage issuedEventMessage);
#endregion
}

View File

@ -79,7 +79,7 @@ public abstract class CollectBusAppService : ApplicationService
{
string key = (string)item[0];
object[] fieldsAndValues = (object[])item[1];
var redisCacheKey = $"{string.Format(RedisConst.CacheMeterInfoKey, systemType, serverTagName, meterType, timeDensity)}";
var redisCacheKey = $"{string.Format(RedisConst.CacheMeterInfoHashKey, systemType, serverTagName, meterType, timeDensity)}";
string focusAddress = key.Replace(redisCacheKey, "");
var meterHashs = new Dictionary<string, T>();
@ -182,7 +182,7 @@ public abstract class CollectBusAppService : ApplicationService
string key = (string)item[0];
object[] fieldsAndValues = (object[])item[1];
var redisCacheKey = string.Format(
RedisConst.CacheMeterInfoKey,
RedisConst.CacheMeterInfoHashKey,
systemType,
serverTagName,
meterType,

View File

@ -22,6 +22,7 @@ using Volo.Abp.BackgroundWorkers;
using Volo.Abp.BackgroundWorkers.Hangfire;
using Volo.Abp.EventBus;
using Volo.Abp.Modularity;
using Microsoft.Extensions.Options;
namespace JiShe.CollectBus;
@ -69,13 +70,14 @@ public class CollectBusApplicationModule : AbpModule
//初始化主题信息
var kafkaAdminClient = context.ServiceProvider.GetRequiredService<IAdminClientService>();
var configuration = context.ServiceProvider.GetRequiredService<IConfiguration>();
var kafkaOptions = context.ServiceProvider.GetRequiredService<IOptions<KafkaOptionConfig>>();
List<string> topics = ProtocolConstExtensions.GetAllTopicNamesByIssued();
topics.AddRange(ProtocolConstExtensions.GetAllTopicNamesByReceived());
foreach (var item in topics)
{
await kafkaAdminClient.CreateTopicAsync(item, configuration.GetValue<int>(CommonConst.NumPartitions), configuration.GetValue<short>(CommonConst.KafkaReplicationFactor));
await kafkaAdminClient.CreateTopicAsync(item, kafkaOptions.Value.NumPartitions, kafkaOptions.Value.KafkaReplicationFactor);
}
}

View File

@ -7,13 +7,16 @@ using System.Threading.Tasks;
using DeviceDetectorNET.Class.Device;
using DotNetCore.CAP;
using JiShe.CollectBus.Common.BuildSendDatas;
using JiShe.CollectBus.Common.Consts;
using JiShe.CollectBus.Common.Enums;
using JiShe.CollectBus.Common.Extensions;
using JiShe.CollectBus.Common.Helpers;
using JiShe.CollectBus.Common.Models;
using JiShe.CollectBus.EnergySystem.Dto;
using JiShe.CollectBus.FreeSql;
using JiShe.CollectBus.IotSystems.PrepayModel;
using JiShe.CollectBus.IotSystems.Records;
using JiShe.CollectBus.Kafka.Producer;
using JiShe.CollectBus.Protocol.Contracts;
using MassTransit;
using Microsoft.AspNetCore.Mvc;
@ -28,14 +31,16 @@ namespace JiShe.CollectBus.EnergySystem
private readonly IRepository<FocusRecord, Guid> _focusRecordRepository;
private readonly IRepository<CsqRecord,Guid> _csqRecordRepository;
private readonly IRepository<ConrOnlineRecord,Guid> _conrOnlineRecordRepository;
private readonly IProducerService _producerService;
private readonly ICapPublisher _capBus;
public EnergySystemAppService(IRepository<FocusRecord, Guid> focusRecordRepository, IRepository<CsqRecord, Guid> csqRecordRepository,
IRepository<ConrOnlineRecord, Guid> conrOnlineRecordRepository, ICapPublisher capBus)
IRepository<ConrOnlineRecord, Guid> conrOnlineRecordRepository, IProducerService producerService, ICapPublisher capBus)
{
_focusRecordRepository = focusRecordRepository;
_csqRecordRepository = csqRecordRepository;
_conrOnlineRecordRepository = conrOnlineRecordRepository;
_producerService = producerService;
_capBus = capBus;
}
@ -70,8 +75,16 @@ namespace JiShe.CollectBus.EnergySystem
if (bytes == null)
return result;
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -108,7 +121,16 @@ namespace JiShe.CollectBus.EnergySystem
foreach (var bytes in bytesList)
{
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -116,6 +138,7 @@ namespace JiShe.CollectBus.EnergySystem
Type = IssuedEventType.Data,
MessageId = NewId.NextGuid().ToString()
});
}
return result;
@ -149,7 +172,15 @@ namespace JiShe.CollectBus.EnergySystem
}).ToList();
var bytes = Build3761SendData.BuildAmmeterParameterSetSendCmd(address, meterParameters);
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -178,7 +209,16 @@ namespace JiShe.CollectBus.EnergySystem
{
var dataUnit = Build645SendData.BuildReadMeterAddressSendDataUnit(detail.MeterAddress);
var bytes =Build3761SendData.BuildTransparentForwardingSendCmd(address, detail.Port, detail.BaudRate.ToString(), dataUnit, StopBit.Stop1, Parity.None);
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -261,7 +301,16 @@ namespace JiShe.CollectBus.EnergySystem
if (bytes != null)
{
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -320,7 +369,16 @@ namespace JiShe.CollectBus.EnergySystem
var bytes = Build3761SendData.BuildCommunicationParametersSetSendCmd(address, masterIP, materPort,
backupIP, backupPort, input.Data.APN);
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -347,7 +405,16 @@ namespace JiShe.CollectBus.EnergySystem
var address = $"{input.AreaCode}{input.Address}";
var bytes = Build3761SendData.BuildTerminalCalendarClockSendCmd(address);
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -375,7 +442,15 @@ namespace JiShe.CollectBus.EnergySystem
bool isManual = !input.AreaCode.Equals("5110");//低功耗集中器不是长连接,在连接的那一刻再发送
var bytes = Build3761SendData.BuildConrCheckTimeSendCmd(address,DateTime.Now, isManual);
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -402,7 +477,15 @@ namespace JiShe.CollectBus.EnergySystem
var address = $"{input.AreaCode}{input.Address}";
var bytes = Build3761SendData.BuildConrRebootSendCmd(address);
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -430,7 +513,15 @@ namespace JiShe.CollectBus.EnergySystem
var address = $"{input.AreaCode}{input.Address}";
var pnList = input.Data.Split(',').Select(it => int.Parse(it)).ToList();
var bytes = Build3761SendData.BuildAmmeterParameterReadingSendCmd(address, pnList);
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -479,7 +570,16 @@ namespace JiShe.CollectBus.EnergySystem
foreach (var bytes in bytesList)
{
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -548,7 +648,15 @@ namespace JiShe.CollectBus.EnergySystem
foreach (var bytes in bytesList)
{
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -577,7 +685,15 @@ namespace JiShe.CollectBus.EnergySystem
var address = $"{code.AreaCode}{code.Address}";
var bytes = Build3761SendData.BuildAmmeterReportCollectionItemsSetSendCmd(address,input.Detail.Pn, input.Detail.Unit,input.Detail.Cycle,input.Detail.BaseTime,
input.Detail.CurveRatio,input.Detail.Details.Select(it => new PnFn(it.Pn,it.Fn)).ToList());
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -605,7 +721,15 @@ namespace JiShe.CollectBus.EnergySystem
{
var address = $"{code.AreaCode}{code.Address}";
var bytes = Build3761SendData.BuildAmmeterAutoUpSwitchSetSendCmd(address, input.Detail.Pn,input.Detail.IsOpen);
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -631,7 +755,15 @@ namespace JiShe.CollectBus.EnergySystem
var result = new BaseResultDto();
var address = $"{input.AreaCode}{input.Address}";
var bytes = Build3761SendData.BuildAmmeterReadAutoUpSwitchSendCmd(address, input.Detail.Pn);
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -658,7 +790,15 @@ namespace JiShe.CollectBus.EnergySystem
{
var address = $"{data.AreaCode}{data.Address}";
var bytes = Build3761SendData.BuildTerminalVersionInfoReadingSendCmd(address);
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,
@ -713,7 +853,15 @@ namespace JiShe.CollectBus.EnergySystem
foreach (var bytes in bytesList)
{
await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//await _capBus.PublishAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
//{
// //ClientId = messageReceived.ClientId,
// DeviceNo = address,
// Message = bytes,
// Type = IssuedEventType.Data,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerManualValveReadingIssuedEventName, new IssuedEventMessage
{
//ClientId = messageReceived.ClientId,
DeviceNo = address,

View File

@ -15,6 +15,7 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="DotNetCore.CAP" Version="8.3.1" />
<PackageReference Include="MassTransit.Kafka" Version="8.4.0" />
<PackageReference Include="Volo.Abp.AspNetCore.Mvc" Version="8.3.3" />
@ -23,7 +24,6 @@
<PackageReference Include="Volo.Abp.Ddd.Application" Version="8.3.3" />
<PackageReference Include="TouchSocket" Version="3.0.19" />
<PackageReference Include="TouchSocket.Hosting" Version="3.0.19" />
<PackageReference Include="DotNetCore.CAP" Version="8.3.1" />
<PackageReference Include="Volo.Abp.EventBus.Kafka" Version="8.3.3" />
<ProjectReference Include="..\JiShe.CollectBus.Application.Contracts\JiShe.CollectBus.Application.Contracts.csproj" />

View File

@ -5,12 +5,15 @@ using System.Threading.Tasks;
using DeviceDetectorNET.Parser.Device;
using DotNetCore.CAP;
using JiShe.CollectBus.Ammeters;
using JiShe.CollectBus.Common.Consts;
using JiShe.CollectBus.Common.Enums;
using JiShe.CollectBus.Common.Extensions;
using JiShe.CollectBus.Common.Helpers;
using JiShe.CollectBus.Enums;
using JiShe.CollectBus.Interceptors;
using JiShe.CollectBus.IotSystems.Devices;
using JiShe.CollectBus.IotSystems.MessageReceiveds;
using JiShe.CollectBus.Kafka.Producer;
using JiShe.CollectBus.Protocol.Contracts;
using MassTransit;
using Microsoft.Extensions.Logging;
@ -27,6 +30,7 @@ namespace JiShe.CollectBus.Plugins
public partial class TcpMonitor : PluginBase, ITransientDependency, ITcpReceivedPlugin, ITcpConnectingPlugin, ITcpConnectedPlugin, ITcpClosedPlugin
{
private readonly ICapPublisher _producerBus;
private readonly IProducerService _producerService;
private readonly ILogger<TcpMonitor> _logger;
private readonly IRepository<Device, Guid> _deviceRepository;
private readonly IDistributedCache<AmmeterInfo> _ammeterInfoCache;
@ -34,16 +38,17 @@ namespace JiShe.CollectBus.Plugins
/// <summary>
///
/// </summary>
/// <param name="producerBus"></param>
/// <param name="producerService"></param>
/// <param name="logger"></param>
/// <param name="deviceRepository"></param>
/// <param name="ammeterInfoCache"></param>
public TcpMonitor(ICapPublisher producerBus,
public TcpMonitor(ICapPublisher producerBus, IProducerService producerService,
ILogger<TcpMonitor> logger,
IRepository<Device, Guid> deviceRepository,
IDistributedCache<AmmeterInfo> ammeterInfoCache)
{
_producerBus = producerBus;
_producerService = producerService;
_logger = logger;
_deviceRepository = deviceRepository;
_ammeterInfoCache = ammeterInfoCache;
@ -170,7 +175,11 @@ namespace JiShe.CollectBus.Plugins
DeviceNo = deviceNo,
MessageId = NewId.NextGuid().ToString()
};
await _producerBus.PublishAsync(ProtocolConst.SubscriberLoginReceivedEventName, messageReceivedLoginEvent);
//await _producerBus.PublishAsync(ProtocolConst.SubscriberLoginReceivedEventName, messageReceivedLoginEvent);
await _producerService.ProduceAsync(ProtocolConst.SubscriberLoginReceivedEventName, messageReceivedLoginEvent);
//await _producerBus.Publish( messageReceivedLoginEvent);
}
@ -217,7 +226,9 @@ namespace JiShe.CollectBus.Plugins
DeviceNo = deviceNo,
MessageId = NewId.NextGuid().ToString()
};
await _producerBus.PublishAsync(ProtocolConst.SubscriberHeartbeatReceivedEventName, messageReceivedHeartbeatEvent);
//await _producerBus.PublishAsync(ProtocolConst.SubscriberHeartbeatReceivedEventName, messageReceivedHeartbeatEvent);
await _producerService.ProduceAsync(ProtocolConst.SubscriberHeartbeatReceivedEventName, messageReceivedHeartbeatEvent);
//await _producerBus.Publish(messageReceivedHeartbeatEvent);
}
@ -244,8 +255,16 @@ namespace JiShe.CollectBus.Plugins
//string topicName = string.Format(ProtocolConst.AFNTopicNameFormat, aFn);
//todo 如何确定时标?目前集中器的采集频率,都是固定,数据上报的时候,根据当前时间,往后推测出应当采集的时间点作为时标。但是如果由于网络问题,数据一直没上报的情况改怎么计算?
await _producerBus.PublishAsync(ProtocolConst.SubscriberReceivedEventName, new MessageReceived
//await _producerBus.PublishAsync(ProtocolConst.SubscriberReceivedEventName, new MessageReceived
//{
// ClientId = client.Id,
// ClientIp = client.IP,
// ClientPort = client.Port,
// MessageHexString = messageHexString,
// DeviceNo = deviceNo,
// MessageId = NewId.NextGuid().ToString()
//});
await _producerService.ProduceAsync(ProtocolConst.SubscriberReceivedEventName, new MessageReceived
{
ClientId = client.Id,
ClientIp = client.IP,

View File

@ -0,0 +1,940 @@
using Confluent.Kafka;
using FreeRedis;
using JiShe.CollectBus.Application.Contracts;
using JiShe.CollectBus.Common.Extensions;
using JiShe.CollectBus.Common.Helpers;
using JiShe.CollectBus.Common.Models;
using JiShe.CollectBus.FreeRedisProvider;
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Volo.Abp.DependencyInjection;
using static FreeSql.Internal.GlobalFilter;
using static System.Runtime.InteropServices.JavaScript.JSType;
using static Volo.Abp.UI.Navigation.DefaultMenuNames.Application;
namespace JiShe.CollectBus.RedisDataCache
{
/// <summary>
/// 数据缓存服务接口
/// </summary>
public class RedisDataCacheService : IRedisDataCacheService, ITransientDependency
{
private readonly IFreeRedisProvider _freeRedisProvider;
private readonly ILogger<RedisDataCacheService> _logger;
private RedisClient Instance { get; set; }
/// <summary>
/// 数据缓存服务接口
/// </summary>
/// <param name="freeRedisProvider"></param>
/// <param name="logger"></param>
public RedisDataCacheService(IFreeRedisProvider freeRedisProvider,
ILogger<RedisDataCacheService> logger)
{
this._freeRedisProvider = freeRedisProvider;
this._logger = logger;
Instance = _freeRedisProvider.Instance;
}
/// <summary>
/// 单个添加数据
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
/// <param name="data">待缓存数据</param>
/// <returns></returns>
public async Task InsertDataAsync<T>(
string redisHashCacheKey,
string redisSetIndexCacheKey,
string redisZSetScoresIndexCacheKey,
T data) where T : DeviceCacheBasicModel
{
// 参数校验增强
if (data == null || string.IsNullOrWhiteSpace(redisHashCacheKey)
|| string.IsNullOrWhiteSpace(redisSetIndexCacheKey)
|| string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
{
_logger.LogError($"{nameof(InsertDataAsync)} 参数异常,-101");
return;
}
// 使用事务保证原子性
using (var trans = Instance.Multi())
{
// 主数据存储Hash
trans.HSet(redisHashCacheKey, data.MemberId, data.Serialize());
// 集中器号分组索引Set缓存
trans.SAdd(redisSetIndexCacheKey, data.MemberId);
// 集中器与表计信息排序索引ZSET缓存Key
trans.ZAdd(redisZSetScoresIndexCacheKey, data.ScoreValue, data.MemberId);
var results = trans.Exec();
if (results == null || results.Length <= 0)
{
_logger.LogError($"{nameof(InsertDataAsync)} 添加事务提交失败,-102");
}
}
await Task.CompletedTask;
}
/// <summary>
/// 批量添加数据
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
/// <param name="items">待缓存数据集合</param>
/// <returns></returns>
public async Task BatchInsertDataAsync<T>(
string redisHashCacheKey,
string redisSetIndexCacheKey,
string redisZSetScoresIndexCacheKey,
IEnumerable<T> items) where T : DeviceCacheBasicModel
{
if (items == null
|| items.Count() <= 0
|| string.IsNullOrWhiteSpace(redisHashCacheKey)
|| string.IsNullOrWhiteSpace(redisSetIndexCacheKey)
|| string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
{
_logger.LogError($"{nameof(BatchInsertDataAsync)} 参数异常,-101");
return;
}
const int BATCH_SIZE = 1000; // 每批1000条
var semaphore = new SemaphoreSlim(Environment.ProcessorCount * 2);
foreach (var batch in items.Batch(BATCH_SIZE))
{
await semaphore.WaitAsync();
_ = Task.Run(() =>
{
using (var pipe = Instance.StartPipe())
{
foreach (var item in batch)
{
// 主数据存储Hash
pipe.HSet(redisHashCacheKey, item.MemberId, item.Serialize());
// Set索引缓存
pipe.SAdd(redisSetIndexCacheKey, item.MemberId);
// ZSET索引缓存Key
pipe.ZAdd(redisZSetScoresIndexCacheKey, item.ScoreValue, item.MemberId);
}
pipe.EndPipe();
}
semaphore.Release();
});
}
await Task.CompletedTask;
}
/// <summary>
/// 删除缓存信息
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
/// <param name="data">已缓存数据</param>
/// <returns></returns>
public async Task RemoveCacheDataAsync<T>(
string redisHashCacheKey,
string redisSetIndexCacheKey,
string redisZSetScoresIndexCacheKey,
T data) where T : DeviceCacheBasicModel
{
if (data == null
|| string.IsNullOrWhiteSpace(redisHashCacheKey)
|| string.IsNullOrWhiteSpace(redisSetIndexCacheKey)
|| string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
{
_logger.LogError($"{nameof(RemoveCacheDataAsync)} 参数异常,-101");
return;
}
const string luaScript = @"
local hashCacheKey = KEYS[1]
local setIndexCacheKey = KEYS[2]
local zsetScoresIndexCacheKey = KEYS[3]
local member = ARGV[1]
local deleted = 0
if redis.call('HDEL', hashCacheKey, member) > 0 then
deleted = 1
end
redis.call('SREM', setIndexCacheKey, member)
redis.call('ZREM', zsetScoresIndexCacheKey, member)
return deleted
";
var keys = new[]
{
redisHashCacheKey,
redisSetIndexCacheKey,
redisZSetScoresIndexCacheKey
};
var result = await Instance.EvalAsync(luaScript, keys, new[] { data.MemberId });
if ((int)result == 0)
{
_logger.LogError($"{nameof(RemoveCacheDataAsync)} 删除指定Key{redisHashCacheKey}的{data.MemberId}数据失败,-102");
}
}
/// <summary>
/// 修改缓存信息,映射关系未发生改变
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
/// <param name="newData">待修改缓存数据</param>
/// <returns></returns>
public async Task ModifyDataAsync<T>(
string redisHashCacheKey,
string redisSetIndexCacheKey,
string redisZSetScoresIndexCacheKey,
T newData) where T : DeviceCacheBasicModel
{
if (newData == null
|| string.IsNullOrWhiteSpace(redisHashCacheKey)
|| string.IsNullOrWhiteSpace(redisSetIndexCacheKey)
|| string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
{
_logger.LogError($"{nameof(ModifyDataAsync)} 参数异常,-101");
return;
}
var luaScript = @"
local hashCacheKey = KEYS[1]
local member = ARGV[1]
local newData = ARGV[2]
--
if redis.call('HEXISTS', hashCacheKey, member) == 0 then
return 0
end
--
redis.call('HSET', hashCacheKey, member, newData)
return 1
";
var result = await Instance.EvalAsync(luaScript,
new[]
{
redisHashCacheKey
},
new object[]
{
newData.MemberId,
newData.Serialize()
});
if ((int)result == 0)
{
_logger.LogError($"{nameof(ModifyDataAsync)} 更新指定Key{redisHashCacheKey}的{newData.MemberId}数据失败,-102");
}
}
/// <summary>
/// 修改缓存信息,映射关系已经改变
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
/// <param name="oldMemberId">旧的映射关系</param>
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
/// <param name="newData">待修改缓存数据</param>
/// <returns></returns>
public async Task ModifyDataAsync<T>(
string redisHashCacheKey,
string redisSetIndexCacheKey,
string oldMemberId,
string redisZSetScoresIndexCacheKey,
T newData) where T : DeviceCacheBasicModel
{
if (newData == null
|| string.IsNullOrWhiteSpace(redisHashCacheKey)
|| string.IsNullOrWhiteSpace(redisSetIndexCacheKey)
|| string.IsNullOrWhiteSpace(oldMemberId)
|| string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
{
_logger.LogError($"{nameof(ModifyDataAsync)} 参数异常,-101");
return;
}
var luaScript = @"
local hashCacheKey = KEYS[1]
local setIndexCacheKey = KEYS[2]
local zsetScoresIndexCacheKey = KEYS[3]
local member = ARGV[1]
local oldMember = ARGV[2]
local newData = ARGV[3]
local newScore = ARGV[4]
--
if redis.call('HEXISTS', hashCacheKey, oldMember) == 0 then
return 0
end
--
redis.call('HDEL', hashCacheKey, oldMember)
--
redis.call('HSET', hashCacheKey, member, newData)
--
if newScore ~= '' then
--
redis.call('SREM', setIndexCacheKey, oldMember)
redis.call('ZREM', zsetScoresIndexCacheKey, oldMember)
--
redis.call('SADD', setIndexCacheKey, member)
redis.call('ZADD', zsetScoresIndexCacheKey, newScore, member)
end
return 1
";
var result = await Instance.EvalAsync(luaScript,
new[]
{
redisHashCacheKey,
redisSetIndexCacheKey,
redisZSetScoresIndexCacheKey
},
new object[]
{
newData.MemberId,
oldMemberId,
newData.Serialize(),
newData.ScoreValue.ToString() ?? "",
});
if ((int)result == 0)
{
_logger.LogError($"{nameof(ModifyDataAsync)} 更新指定Key{redisHashCacheKey}的{newData.MemberId}数据失败,-102");
}
}
/// <summary>
/// 通过集中器与表计信息排序索引获取指定集中器号集合数据
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisCacheFocusScoresIndexKey">集中器与表计信息排序索引ZSET缓存Key</param>
/// <param name="focusIds">集中器Id</param>
/// <param name="pageSize">分页尺寸</param>
/// <param name="lastScore">最后一个索引</param>
/// <param name="lastMember">最后一个唯一标识</param>
/// <param name="descending">排序方式</param>
/// <returns></returns>
public async Task<BusCacheGlobalPagedResult<T>> GetPagedData<T>(
string redisCacheKey,
string redisCacheFocusScoresIndexKey,
IEnumerable<int> focusIds,
int pageSize = 10,
decimal? lastScore = null,
string lastMember = null,
bool descending = true)
where T : DeviceCacheBasicModel
{
throw new Exception();
}
/// <summary>
/// 优化后的分页获取方法(支持百万级数据)
/// </summary>
public async Task<BusCacheGlobalPagedResult<T>> GetAllPagedDataOptimized<T>(
string redisHashCacheKey,
string redisZSetScoresIndexCacheKey,
int pageSize = 1000,
decimal? lastScore = null,
string lastMember = null,
bool descending = true) where T : DeviceCacheBasicModel
{
// 参数校验
if (string.IsNullOrWhiteSpace(redisHashCacheKey) ||
string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
{
_logger.LogError("Invalid parameters in {Method}", nameof(GetAllPagedDataOptimized));
return new BusCacheGlobalPagedResult<T> { Items = new List<T>() };
}
pageSize = Math.Clamp(pageSize, 1, 10000);
const string luaScript = @"
local command = ARGV[1]
local range_start = ARGV[2]
local range_end = ARGV[3]
local limit = tonumber(ARGV[4])
local last_score = ARGV[5]
local last_member = ARGV[6]
-- 5
local members
if command == 'ZRANGEBYSCORE' then
members = redis.call('ZRANGEBYSCORE', KEYS[1], range_start, range_end,
'WITHSCORES', 'LIMIT', 0, limit * 5)
else
members = redis.call('ZREVRANGEBYSCORE', KEYS[1], range_start, range_end,
'WITHSCORES', 'LIMIT', 0, limit * 5)
end
--
local filtered = {}
local count = 0
local start_index = 1
--
if last_score ~= '' and last_member ~= '' then
for i=1,#members,2 do
local score = members[i+1]
local member = members[i]
if command == 'ZRANGEBYSCORE' then
if tonumber(score) > tonumber(last_score) then
start_index = i
break
elseif tonumber(score) == tonumber(last_score) then
if member > last_member then
start_index = i
break
end
end
else
if tonumber(score) < tonumber(last_score) then
start_index = i
break
elseif tonumber(score) == tonumber(last_score) then
if member < last_member then
start_index = i
break
end
end
end
end
end
--
for i=start_index,#members,2 do
if count >= limit then break end
table.insert(filtered, members[i])
table.insert(filtered, members[i+1])
count = count + 1
end
--
local result_members = {}
local result_scores = {}
for i=1,#filtered,2 do
table.insert(result_members, filtered[i])
table.insert(result_scores, filtered[i+1])
end
if #result_members == 0 then return {0,{},{},{}} end
-- Hash数据
local hash_data = redis.call('HMGET', KEYS[2], unpack(result_members))
return {#result_members, result_members, result_scores, hash_data}";
// 构造查询范围(包含等于)
string rangeStart, rangeEnd;
if (descending)
{
rangeStart = lastScore.HasValue ? lastScore.Value.ToString() : "+inf";
rangeEnd = "-inf";
}
else
{
rangeStart = lastScore.HasValue ? lastScore.Value.ToString() : "-inf";
rangeEnd = "+inf";
}
try
{
var scriptResult = (object[])await Instance.EvalAsync(
luaScript,
new[] { redisZSetScoresIndexCacheKey, redisHashCacheKey },
new object[]
{
descending ? "ZREVRANGEBYSCORE" : "ZRANGEBYSCORE",
rangeStart,
rangeEnd,
pageSize,
lastScore?.ToString() ?? "",
lastMember ?? ""
});
var itemCount = (long)scriptResult[0];
if (itemCount == 0)
return new BusCacheGlobalPagedResult<T> { Items = new List<T>() };
// 处理结果集
var members = ((object[])scriptResult[1]).Cast<string>().ToList();
var scores = ((object[])scriptResult[2]).Cast<string>()
.Select(decimal.Parse).ToList();
var hashData = ((object[])scriptResult[3]).Cast<string>().ToList();
var validItems = members.AsParallel()
.Select((m, i) =>
{
try { return BusJsonSerializer.Deserialize<T>(hashData[i]); }
catch { return null; }
})
.Where(x => x != null)
.ToList();
// 精确分页控制
var hasNext = validItems.Count >= pageSize;
var actualItems = validItems.Take(pageSize).ToList();
// 计算下一页锚点(必须基于原始排序)
decimal? nextScore = null;
string nextMember = null;
if (hasNext && actualItems.Count > 0)
{
var lastValidIndex = Math.Min(pageSize - 1, members.Count - 1);
nextScore = scores[lastValidIndex];
nextMember = members[lastValidIndex];
}
return new BusCacheGlobalPagedResult<T>
{
Items = actualItems,
HasNext = hasNext,
NextScore = nextScore,
NextMember = nextMember,
TotalCount = await GetTotalCount(redisZSetScoresIndexCacheKey),
PageSize = pageSize
};
}
catch (Exception ex)
{
_logger.LogError(ex, "分页查询异常");
return new BusCacheGlobalPagedResult<T> { Items = new List<T>() };
}
}
/// <summary>
/// 通过ZSET索引获取数据支持10万级别数据处理控制在13秒以内。
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
/// <param name="pageSize">分页尺寸</param>
/// <param name="lastScore">最后一个索引</param>
/// <param name="lastMember">最后一个唯一标识</param>
/// <param name="descending">排序方式</param>
/// <returns></returns>
public async Task<BusCacheGlobalPagedResult<T>> GetAllPagedData<T>(
string redisHashCacheKey,
string redisZSetScoresIndexCacheKey,
int pageSize = 1000,
decimal? lastScore = null,
string lastMember = null,
bool descending = true)
where T : DeviceCacheBasicModel
{
// 参数校验增强
if (string.IsNullOrWhiteSpace(redisHashCacheKey) || string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
{
_logger.LogError($"{nameof(GetAllPagedData)} 参数异常,-101");
return null;
}
if (pageSize < 1 || pageSize > 10000)
{
_logger.LogError($"{nameof(GetAllPagedData)} 分页大小应在1-10000之间-102");
return null;
}
var luaScript = @"
local command = ARGV[1]
local range_start = ARGV[2]
local range_end = ARGV[3]
local limit = tonumber(ARGV[4])
local last_score = ARGV[5]
local last_member = ARGV[6]
--
local members
if command == 'ZRANGEBYSCORE' then
members = redis.call(command, KEYS[1], range_start, range_end, 'WITHSCORES', 'LIMIT', 0, limit * 2)
else
members = redis.call('ZREVRANGEBYSCORE', KEYS[1], range_start, range_end, 'WITHSCORES', 'LIMIT', 0, limit * 2)
end
--
local filtered_members = {}
local count = 0
for i = 1, #members, 2 do
local member = members[i]
local score = members[i+1]
local include = true
if last_score ~= '' and last_member ~= '' then
if command == 'ZRANGEBYSCORE' then
-- score > last_score (score == last_score member > last_member)
if score == last_score then
include = member > last_member
else
include = tonumber(score) > tonumber(last_score)
end
else
-- score < last_score (score == last_score member < last_member)
if score == last_score then
include = member < last_member
else
include = tonumber(score) < tonumber(last_score)
end
end
end
if include then
table.insert(filtered_members, member)
table.insert(filtered_members, score)
count = count + 1
if count >= limit then
break
end
end
end
--
local result_members, result_scores = {}, {}
for i=1,#filtered_members,2 do
table.insert(result_members, filtered_members[i])
table.insert(result_scores, filtered_members[i+1])
end
if #result_members == 0 then return {0,{},{},{}} end
-- Hash数据
local hash_data = redis.call('HMGET', KEYS[2], unpack(result_members))
return {#result_members, result_members, result_scores, hash_data}";
// 调整范围构造逻辑(移除排他符号)
string rangeStart, rangeEnd;
if (descending)
{
rangeStart = lastScore.HasValue ? lastScore.Value.ToString() : "+inf";
rangeEnd = "-inf";
}
else
{
rangeStart = lastScore.HasValue ? lastScore.Value.ToString() : "-inf";
rangeEnd = "+inf";
}
var scriptResult = (object[])await Instance.EvalAsync(luaScript,
new[] { redisZSetScoresIndexCacheKey, redisHashCacheKey },
new object[]
{
descending ? "ZREVRANGEBYSCORE" : "ZRANGEBYSCORE",
rangeStart,
rangeEnd,
(pageSize + 1).ToString(), // 获取pageSize+1条以判断是否有下一页
lastScore?.ToString() ?? "",
lastMember ?? ""
});
if ((long)scriptResult[0] == 0)
return new BusCacheGlobalPagedResult<T> { Items = new List<T>() };
// 处理结果集
var members = ((object[])scriptResult[1]).Cast<string>().ToList();
var scores = ((object[])scriptResult[2]).Cast<string>().Select(decimal.Parse).ToList();
var hashData = ((object[])scriptResult[3]).Cast<string>().ToList();
var validItems = members.AsParallel()
.Select((m, i) =>
{
try { return BusJsonSerializer.Deserialize<T>(hashData[i]); }
catch { return null; }
})
.Where(x => x != null)
.ToList();
var hasNext = validItems.Count > pageSize;
var actualItems = hasNext ? validItems.Take(pageSize) : validItems;
//分页锚点索引
decimal? nextScore = null;
string nextMember = null;
if (hasNext && actualItems.Any())
{
var lastIndex = actualItems.Count() - 1; // 使用actualItems的最后一个索引
nextScore = scores[lastIndex];
nextMember = members[lastIndex];
}
return new BusCacheGlobalPagedResult<T>
{
Items = actualItems.ToList(),
HasNext = hasNext,
NextScore = nextScore,
NextMember = nextMember,
TotalCount = await GetTotalCount(redisZSetScoresIndexCacheKey),
PageSize = pageSize,
};
}
///// <summary>
///// 通过集中器与表计信息排序索引获取数据
///// </summary>
///// <typeparam name="T"></typeparam>
///// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
///// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
///// <param name="pageSize">分页尺寸</param>
///// <param name="lastScore">最后一个索引</param>
///// <param name="lastMember">最后一个唯一标识</param>
///// <param name="descending">排序方式</param>
///// <returns></returns>
//public async Task<BusCacheGlobalPagedResult<T>> GetAllPagedData<T>(
//string redisHashCacheKey,
//string redisZSetScoresIndexCacheKey,
//int pageSize = 1000,
//decimal? lastScore = null,
//string lastMember = null,
//bool descending = true)
//where T : DeviceCacheBasicModel
//{
// // 参数校验增强
// if (string.IsNullOrWhiteSpace(redisHashCacheKey) || string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
// {
// _logger.LogError($"{nameof(GetAllPagedData)} 参数异常,-101");
// return null;
// }
// if (pageSize < 1 || pageSize > 10000)
// {
// _logger.LogError($"{nameof(GetAllPagedData)} 分页大小应在1-10000之间-102");
// return null;
// }
// //// 分页参数解析
// //var (startScore, excludeMember) = descending
// // ? (lastScore ?? decimal.MaxValue, lastMember)
// // : (lastScore ?? 0, lastMember);
// //执行分页查询(整合游标处理)
// var pageResult = await GetPagedMembers(
// redisZSetScoresIndexCacheKey,
// pageSize,
// lastScore,
// lastMember,
// descending);
// // 批量获取数据(优化内存分配)
// var dataDict = await BatchGetData<T>(redisHashCacheKey, pageResult.Members);
// return new BusCacheGlobalPagedResult<T>
// {
// Items = pageResult.Members.Select(m => dataDict.TryGetValue(m, out var v) ? v : default)
// .Where(x => x != null).ToList(),
// HasNext = pageResult.HasNext,
// NextScore = pageResult.NextScore,
// NextMember = pageResult.NextMember,
// TotalCount = await GetTotalCount(redisZSetScoresIndexCacheKey),
// PageSize = pageSize,
// };
//}
/// <summary>
/// 游标分页查询
/// </summary>
/// <param name="redisZSetScoresIndexCacheKey">排序索引ZSET缓存Key</param>
/// <param name="pageSize">分页数量</param>
/// <param name="lastScore">上一个索引</param>
/// <param name="lastMember">上一个标识</param>
/// <param name="descending">排序方式</param>
/// <returns></returns>
private async Task<(List<string> Members, bool HasNext, decimal? NextScore, string NextMember)> GetPagedMembers(
string redisZSetScoresIndexCacheKey,
int pageSize,
decimal? lastScore,
string lastMember,
bool descending)
{
// 根据排序方向初始化参数
long initialScore = descending ? long.MaxValue : 0;
decimal? currentScore = lastScore ?? initialScore;
string currentMember = lastMember;
var members = new List<string>(pageSize + 1);
// 使用游标分页查询
while (members.Count < pageSize + 1 && currentScore.HasValue)
{
var (batch, hasMore) = await GetNextBatch(
redisZSetScoresIndexCacheKey,
pageSize + 1 - members.Count,
currentScore.Value,
currentMember,
descending);
if (!batch.Any()) break;
members.AddRange(batch);
// 更新游标
currentMember = batch.LastOrDefault();
currentScore = await GetNextScore(redisZSetScoresIndexCacheKey, currentMember, descending);
}
// 处理分页结果
bool hasNext = members.Count > pageSize;
var resultMembers = members.Take(pageSize).ToList();
return (
resultMembers,
hasNext,
currentScore,
currentMember
);
}
/// <summary>
/// 批量获取指定分页的数据
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="redisHashCacheKey">Hash表缓存key</param>
/// <param name="members">Hash表字段集合</param>
/// <returns></returns>
private async Task<Dictionary<string, T>> BatchGetData<T>(
string redisHashCacheKey,
IEnumerable<string> members)
where T : DeviceCacheBasicModel
{
using var pipe = Instance.StartPipe();
foreach (var member in members)
{
pipe.HGet<T>(redisHashCacheKey, member);
}
var results = pipe.EndPipe();
return await Task.FromResult(members.Zip(results, (k, v) => new { k, v })
.ToDictionary(x => x.k, x => (T)x.v));
}
/// <summary>
/// 处理下一个分页数据
/// </summary>
/// <param name="zsetKey"></param>
/// <param name="limit"></param>
/// <param name="score"></param>
/// <param name="excludeMember"></param>
/// <param name="descending"></param>
/// <returns></returns>
private async Task<(string[] Batch, bool HasMore)> GetNextBatch(
string zsetKey,
int limit,
decimal score,
string excludeMember,
bool descending)
{
var query = descending
? await Instance.ZRevRangeByScoreAsync(
zsetKey,
max: score,
min: 0,
offset: 0,
count: limit)
: await Instance.ZRangeByScoreAsync(
zsetKey,
min: score,
max: long.MaxValue,
offset: 0,
count: limit);
return (query, query.Length >= limit);
}
/// <summary>
/// 获取下一页游标
/// </summary>
/// <param name="redisZSetScoresIndexCacheKey">排序索引ZSET缓存Key</param>
/// <param name="lastMember">最后一个唯一标识</param>
/// <param name="descending">排序方式</param>
/// <returns></returns>
private async Task<decimal?> GetNextScore(
string redisZSetScoresIndexCacheKey,
string lastMember,
bool descending)
{
if (string.IsNullOrEmpty(lastMember)) return null;
var score = await Instance.ZScoreAsync(redisZSetScoresIndexCacheKey, lastMember);
if (!score.HasValue) return null;
// 根据排序方向调整score
return descending
? score.Value - 1 // 降序时下页查询小于当前score
: score.Value + 1; // 升序时下页查询大于当前score
}
/// <summary>
/// 获取指定ZSET区间内的总数量
/// </summary>
/// <param name="zsetKey"></param>
/// <param name="min"></param>
/// <param name="max"></param>
/// <returns></returns>
public async Task<long> GetCount(string zsetKey, long min, long max)
{
// 缓存计数优化
var cacheKey = $"{zsetKey}_count_{min}_{max}";
var cached = await Instance.GetAsync<long?>(cacheKey);
if (cached.HasValue)
return cached.Value;
var count = await Instance.ZCountAsync(zsetKey, min, max);
await Instance.SetExAsync(cacheKey, 60, count); // 缓存60秒
return count;
}
/// <summary>
/// 获取指定ZSET的总数量
/// </summary>
/// <param name="redisZSetScoresIndexCacheKey"></param>
/// <returns></returns>
private async Task<long> GetTotalCount(string redisZSetScoresIndexCacheKey)
{
// 缓存计数优化
var cacheKey = $"{redisZSetScoresIndexCacheKey}_total_count";
var cached = await Instance.GetAsync<long?>(cacheKey);
if (cached.HasValue)
return cached.Value;
var count = await Instance.ZCountAsync(redisZSetScoresIndexCacheKey, 0, decimal.MaxValue);
await Instance.SetExAsync(cacheKey, 30, count); // 缓存30秒
return count;
}
}
}

View File

@ -23,6 +23,9 @@ using JiShe.CollectBus.Common.DeviceBalanceControl;
using JiShe.CollectBus.Kafka.Attributes;
using System.Text.Json;
using JiShe.CollectBus.Kafka;
using JiShe.CollectBus.Application.Contracts;
using JiShe.CollectBus.Common.Models;
using System.Diagnostics;
namespace JiShe.CollectBus.Samples;
@ -32,17 +35,23 @@ public class SampleAppService : CollectBusAppService, ISampleAppService, IKafkaS
private readonly IIoTDBProvider _iotDBProvider;
private readonly IoTDBRuntimeContext _dbContext;
private readonly IoTDBOptions _options;
private readonly IRedisDataCacheService _redisDataCacheService;
public SampleAppService(IIoTDBProvider iotDBProvider, IOptions<IoTDBOptions> options,
IoTDBRuntimeContext dbContext, ILogger<SampleAppService> logger)
IoTDBRuntimeContext dbContext, ILogger<SampleAppService> logger, IRedisDataCacheService redisDataCacheService)
{
_iotDBProvider = iotDBProvider;
_options = options.Value;
_dbContext = dbContext;
_logger = logger;
_redisDataCacheService = redisDataCacheService;
}
/// <summary>
/// 测试 UseSessionPool
/// </summary>
/// <param name="timestamps"></param>
/// <returns></returns>
[HttpGet]
public async Task UseSessionPool(long timestamps)
{
@ -72,7 +81,10 @@ public class SampleAppService : CollectBusAppService, ISampleAppService, IKafkaS
await _iotDBProvider.InsertAsync(meter);
}
/// <summary>
/// 测试Session切换
/// </summary>
/// <returns></returns>
[HttpGet]
public async Task UseTableSessionPool()
{
@ -125,7 +137,7 @@ public class SampleAppService : CollectBusAppService, ISampleAppService, IKafkaS
var timeDensity = "15";
//获取缓存中的电表信息
var redisKeyList = $"{string.Format(RedisConst.CacheMeterInfoKey, "Energy", "JiSheCollectBus", MeterTypeEnum.Ammeter.ToString(), timeDensity)}*";
var redisKeyList = $"{string.Format(RedisConst.CacheMeterInfoHashKey, "Energy", "JiSheCollectBus", MeterTypeEnum.Ammeter.ToString(), timeDensity)}*";
var oneMinutekeyList = await FreeRedisProvider.Instance.KeysAsync(redisKeyList);
var meterInfos = await GetMeterRedisCacheListData<AmmeterInfo>(oneMinutekeyList, "Energy", "JiSheCollectBus", timeDensity, MeterTypeEnum.Ammeter);
@ -178,6 +190,44 @@ public class SampleAppService : CollectBusAppService, ISampleAppService, IKafkaS
await _iotDBProvider.InsertAsync(meter);
}
/// <summary>
/// 测试Redis批量读取10万条数据性能
/// </summary>
/// <returns></returns>
[HttpGet]
public async Task TestRedisCacheGetAllPagedData()
{
var timeDensity = "15";
string SystemType = "Energy";
string ServerTagName = "JiSheCollectBus2";
var redisCacheMeterInfoHashKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
var redisCacheMeterInfoSetIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoSetIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
var redisCacheMeterInfoZSetScoresIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoZSetScoresIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
var timer1 = Stopwatch.StartNew();
decimal? cursor = null;
string member = null;
bool hasNext;
List<AmmeterInfo> meterInfos = new List<AmmeterInfo>();
do
{
var page = await _redisDataCacheService.GetAllPagedData<AmmeterInfo>(
redisCacheMeterInfoHashKeyTemp,
redisCacheMeterInfoZSetScoresIndexKeyTemp,
pageSize: 1000,
lastScore: cursor,
lastMember: member);
meterInfos.AddRange(page.Items);
cursor = page.HasNext ? page.NextScore : null;
member = page.HasNext ? page.NextMember : null;
hasNext = page.HasNext;
} while (hasNext);
timer1.Stop();
_logger.LogError($"读取数据更花费时间{timer1.ElapsedMilliseconds}毫秒");
}
public Task<SampleDto> GetAsync()
{
@ -215,11 +265,12 @@ public class SampleAppService : CollectBusAppService, ISampleAppService, IKafkaS
return aa == null;
}
[KafkaSubscribe(["test-topic"])]
//[KafkaSubscribe("test-topic1")]
public async Task<bool> KafkaSubscribeAsync(string obj)
public async Task<ISubscribeAck> KafkaSubscribeAsync(object obj)
{
_logger.LogWarning($"收到订阅消息: {obj}");
return await Task.FromResult(true);
return SubscribeAck.Success();
}
}

View File

@ -1,25 +1,32 @@
using DotNetCore.CAP;
using JiShe.CollectBus.Ammeters;
using JiShe.CollectBus.Application.Contracts;
using JiShe.CollectBus.Common.BuildSendDatas;
using JiShe.CollectBus.Common.Consts;
using JiShe.CollectBus.Common.DeviceBalanceControl;
using JiShe.CollectBus.Common.Enums;
using JiShe.CollectBus.Common.Extensions;
using JiShe.CollectBus.Common.Helpers;
using JiShe.CollectBus.Common.Models;
using JiShe.CollectBus.GatherItem;
using JiShe.CollectBus.IoTDBProvider;
using JiShe.CollectBus.IotSystems.MessageIssueds;
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
using JiShe.CollectBus.IotSystems.Watermeter;
using JiShe.CollectBus.Kafka;
using JiShe.CollectBus.Kafka.Producer;
using JiShe.CollectBus.Protocol.Contracts;
using JiShe.CollectBus.RedisDataCache;
using JiShe.CollectBus.Repository.MeterReadingRecord;
using Mapster;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using static FreeSql.Internal.GlobalFilter;
namespace JiShe.CollectBus.ScheduledMeterReading
{
@ -29,24 +36,26 @@ namespace JiShe.CollectBus.ScheduledMeterReading
public abstract class BasicScheduledMeterReadingService : CollectBusAppService, IScheduledMeterReadingService
{
private readonly ILogger<BasicScheduledMeterReadingService> _logger;
private readonly ICapPublisher _producerBus;
private readonly IIoTDBProvider _dbProvider;
private readonly IMeterReadingRecordRepository _meterReadingRecordRepository;
private readonly IProducerService _producerService;
private readonly IRedisDataCacheService _redisDataCacheService;
private readonly KafkaOptionConfig _kafkaOptions;
public BasicScheduledMeterReadingService(
ILogger<BasicScheduledMeterReadingService> logger,
ICapPublisher producerBus,
IMeterReadingRecordRepository meterReadingRecordRepository,
IProducerService producerService,
IIoTDBProvider dbProvider)
IRedisDataCacheService redisDataCacheService,
IIoTDBProvider dbProvider,
IOptions<KafkaOptionConfig> kafkaOptions)
{
_producerBus = producerBus;
_logger = logger;
_dbProvider = dbProvider;
_meterReadingRecordRepository = meterReadingRecordRepository;
_producerService = producerService;
_redisDataCacheService = redisDataCacheService;
_kafkaOptions = kafkaOptions.Value;
}
/// <summary>
@ -117,31 +126,44 @@ namespace JiShe.CollectBus.ScheduledMeterReading
continue;
}
//获取缓存中的表信息
var redisKeyList = $"{string.Format(RedisConst.CacheMeterInfoKey, SystemType, ServerTagName, meteryType, timeDensity)}*";
var oneMinutekeyList = await FreeRedisProvider.Instance.KeysAsync(redisKeyList);
if (oneMinutekeyList == null || oneMinutekeyList.Length <= 0)
{
_logger.LogError($"{nameof(CreateToBeIssueTasks)} {timeDensity}分钟采集待下发任务创建失败,没有获取到缓存信息,-104");
return;
}
var meterTypes = EnumExtensions.ToEnumDictionary<MeterTypeEnum>();
if (meteryType == MeterTypeEnum.Ammeter.ToString())
{
// 解析结果(结果为嵌套数组)
var meterInfos = await GetMeterRedisCacheListData<AmmeterInfo>(oneMinutekeyList, SystemType, ServerTagName, $"{timeDensity}", meterTypes[meteryType]);
var timer = Stopwatch.StartNew();
//获取对应频率中的所有电表信息
var redisCacheMeterInfoHashKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
var redisCacheMeterInfoSetIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoSetIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
var redisCacheMeterInfoZSetScoresIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoZSetScoresIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
List<AmmeterInfo> meterInfos = new List<AmmeterInfo>();
decimal? cursor = null;
string member = null;
bool hasNext;
do
{
var page = await _redisDataCacheService.GetAllPagedData<AmmeterInfo>(
redisCacheMeterInfoHashKeyTemp,
redisCacheMeterInfoZSetScoresIndexKeyTemp,
pageSize: 1000,
lastScore: cursor,
lastMember: member);
meterInfos.AddRange(page.Items);
cursor = page.HasNext ? page.NextScore : null;
member = page.HasNext ? page.NextMember : null;
hasNext = page.HasNext;
} while (hasNext);
if (meterInfos == null || meterInfos.Count <= 0)
{
timer.Stop();
_logger.LogError($"{nameof(CreateToBeIssueTasks)} {timeDensity}分钟采集待下发任务创建失败,没有获取到缓存信息,-105");
return;
}
//await AmmerterScheduledMeterReadingIssued(timeDensity, meterInfos);
var timer = Stopwatch.StartNew();
//处理数据
//await DeviceGroupBalanceControl.ProcessGenericListAsync(
@ -158,14 +180,14 @@ namespace JiShe.CollectBus.ScheduledMeterReading
await DeviceGroupBalanceControl.ProcessWithThrottleAsync(
items: meterInfos,
deviceIdSelector: data => data.FocusAddress,
processor: data =>
processor: (data,groupIndex) =>
{
_ = AmmerterCreatePublishTask(timeDensity, data);
_ = AmmerterCreatePublishTask(timeDensity, data, groupIndex,tasksToBeIssueModel.NextTaskTime.ToString("yyyyMMddHHmmss"));
}
);
timer.Stop();
_logger.LogInformation($"{nameof(CreateToBeIssueTasks)} {timeDensity}分钟采集待下发任务创建完成,{timer.ElapsedMilliseconds},{oneMinutekeyList.Length}");
_logger.LogInformation($"{nameof(CreateToBeIssueTasks)} {timeDensity}分钟采集待下发任务创建完成,{timer.ElapsedMilliseconds},总共{meterInfos.Count}表计信息");
}
else if (meteryType == MeterTypeEnum.WaterMeter.ToString())
@ -208,25 +230,86 @@ namespace JiShe.CollectBus.ScheduledMeterReading
public virtual async Task InitAmmeterCacheData(string gatherCode = "")
{
#if DEBUG
var timeDensity = "15";
//获取缓存中的电表信息
var redisKeyList = $"{string.Format(RedisConst.CacheMeterInfoKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}*";
//var timeDensity = "15";
//string tempCacheMeterInfoKey = $"CollectBus:{"{0}:{1}"}:MeterInfo:{"{2}"}:{"{3}"}";
////获取缓存中的电表信息
//var redisKeyList = $"{string.Format(tempCacheMeterInfoKey, SystemType, "JiSheCollectBus", MeterTypeEnum.Ammeter, timeDensity)}*";
var oneMinutekeyList = await FreeRedisProvider.Instance.KeysAsync(redisKeyList);
var meterInfos = await GetMeterRedisCacheListData<AmmeterInfo>(oneMinutekeyList, SystemType, ServerTagName, timeDensity, MeterTypeEnum.Ammeter);
//var oneMinutekeyList = await FreeRedisProvider.Instance.KeysAsync(redisKeyList);
//var tempMeterInfos = await GetMeterRedisCacheListData<AmmeterInfoTemp>(oneMinutekeyList, SystemType, ServerTagName, timeDensity, MeterTypeEnum.Ammeter);
////List<string> focusAddressDataLista = new List<string>();
//List<AmmeterInfo> meterInfos = new List<AmmeterInfo>();
//foreach (var item in tempMeterInfos)
//{
// var tempData = item.Adapt<AmmeterInfo>();
// tempData.FocusId = item.FocusID;
// tempData.MeterId = item.Id;
// meterInfos.Add(tempData);
// //focusAddressDataLista.Add(item.FocusAddress);
//}
var timeDensity = "15";
var redisCacheMeterInfoHashKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoHashKey, SystemType, "JiSheCollectBus2", MeterTypeEnum.Ammeter, timeDensity)}";
var redisCacheMeterInfoSetIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoSetIndexKey, SystemType, "JiSheCollectBus2", MeterTypeEnum.Ammeter, timeDensity)}";
var redisCacheMeterInfoZSetScoresIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoZSetScoresIndexKey, SystemType, "JiSheCollectBus2", MeterTypeEnum.Ammeter, timeDensity)}";
List<AmmeterInfo> meterInfos = new List<AmmeterInfo>();
List<string> focusAddressDataLista = new List<string>();
foreach (var item in meterInfos)
var timer1 = Stopwatch.StartNew();
//decimal? cursor = null;
//string member = null;
//bool hasNext;
//do
//{
// var page = await _redisDataCacheService.GetAllPagedDataOptimized<AmmeterInfo>(
// redisCacheMeterInfoHashKeyTemp,
// redisCacheMeterInfoZSetScoresIndexKeyTemp,
// pageSize: 1000,
// lastScore: cursor,
// lastMember: member);
// meterInfos.AddRange(page.Items);
// cursor = page.HasNext ? page.NextScore : null;
// member = page.HasNext ? page.NextMember : null;
// hasNext = page.HasNext;
//} while (hasNext);
var allIds = new HashSet<string>();
decimal? score = null;
string member = null;
while (true)
{
focusAddressDataLista.Add(item.FocusAddress);
var page = await _redisDataCacheService.GetAllPagedDataOptimized<AmmeterInfo>(
redisCacheMeterInfoHashKeyTemp,
redisCacheMeterInfoZSetScoresIndexKeyTemp,
pageSize: 1000,
lastScore: score,
lastMember: member);
meterInfos.AddRange(page.Items);
focusAddressDataLista.AddRange(page.Items.Select(d=>d.FocusAddress));
foreach (var item in page.Items)
{
if (!allIds.Add(item.MemberId))
throw new Exception("Duplicate data found!");
}
if (!page.HasNext) break;
score = page.NextScore;
member = page.NextMember;
}
DeviceGroupBalanceControl.InitializeCache(focusAddressDataLista);
timer1.Stop();
_logger.LogError($"读取数据更花费时间{timer1.ElapsedMilliseconds}毫秒");
DeviceGroupBalanceControl.InitializeCache(focusAddressDataLista, _kafkaOptions.NumPartitions);
return;
#else
var meterInfos = await GetAmmeterInfoList(gatherCode);
#endif
if (meterInfos == null || meterInfos.Count <= 0)
{
throw new NullReferenceException($"{nameof(InitAmmeterCacheData)} 初始化电表缓存数据时,电表数据为空");
@ -238,6 +321,7 @@ namespace JiShe.CollectBus.ScheduledMeterReading
{
throw new NullReferenceException($"{nameof(InitAmmeterCacheData)} 初始化电表缓存数据时,采集项类型数据为空");
}
var timer = Stopwatch.StartNew();
List<string> focusAddressDataList = new List<string>();//用于处理Kafka主题分区数据的分发和处理。
@ -245,6 +329,11 @@ namespace JiShe.CollectBus.ScheduledMeterReading
var meterInfoGroupByTimeDensity = meterInfos.GroupBy(d => d.TimeDensity);
foreach (var itemTimeDensity in meterInfoGroupByTimeDensity)
{
var redisCacheMeterInfoHashKey = $"{string.Format(RedisConst.CacheMeterInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, itemTimeDensity.Key)}";
var redisCacheMeterInfoSetIndexKey = $"{string.Format(RedisConst.CacheMeterInfoSetIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, itemTimeDensity.Key)}";
var redisCacheMeterInfoZSetScoresIndexKey = $"{string.Format(RedisConst.CacheMeterInfoZSetScoresIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, itemTimeDensity.Key)}";
List<AmmeterInfo> ammeterInfos = new List<AmmeterInfo>();
//将表计信息根据集中器分组,获得集中器号
var meterInfoGroup = itemTimeDensity.GroupBy(x => x.FocusAddress).ToList();
foreach (var item in meterInfoGroup)
@ -256,17 +345,17 @@ namespace JiShe.CollectBus.ScheduledMeterReading
focusAddressDataList.Add(item.Key);
var redisCacheKey = $"{string.Format(RedisConst.CacheMeterInfoKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, itemTimeDensity.Key)}{item.Key}";
// var redisCacheKey = $"{string.Format(RedisConst.CacheMeterInfoKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, itemTimeDensity.Key)}{item.Key}";
#if DEBUG
//每次缓存时,删除缓存,避免缓存数据有不准确的问题
//await FreeRedisProvider.Instance.DelAsync(redisCacheKey);
#else
//每次缓存时,删除缓存,避免缓存数据有不准确的问题
await FreeRedisProvider.Instance.DelAsync(redisCacheKey);
//await FreeRedisProvider.Instance.DelAsync(redisCacheKey);
#endif
Dictionary<string, AmmeterInfo> keyValuePairs = new Dictionary<string, AmmeterInfo>();
//Dictionary<string, AmmeterInfo> keyValuePairs = new Dictionary<string, AmmeterInfo>();
foreach (var ammeter in item)
{
//处理ItemCode
@ -311,11 +400,17 @@ namespace JiShe.CollectBus.ScheduledMeterReading
}
}
keyValuePairs.TryAdd($"{ammeter.ID}", ammeter);
ammeterInfos.Add(ammeter);
//keyValuePairs.TryAdd($"{ammeter.MeterId}", ammeter);
}
await FreeRedisProvider.Instance.HSetAsync(redisCacheKey, keyValuePairs);
//await FreeRedisProvider.Instance.HSetAsync(redisCacheKey, keyValuePairs);
}
await _redisDataCacheService.BatchInsertDataAsync<AmmeterInfo>(
redisCacheMeterInfoHashKey,
redisCacheMeterInfoSetIndexKey,
redisCacheMeterInfoZSetScoresIndexKey,ammeterInfos);
//在缓存表信息数据的时候新增下一个时间的自动处理任务1分钟后执行所有的采集频率任务
TasksToBeIssueModel nextTask = new TasksToBeIssueModel()
{
@ -335,10 +430,12 @@ namespace JiShe.CollectBus.ScheduledMeterReading
}
else
{
DeviceGroupBalanceControl.InitializeCache(focusAddressDataList);
DeviceGroupBalanceControl.InitializeCache(focusAddressDataList, _kafkaOptions.NumPartitions);
}
_logger.LogInformation($"{nameof(InitAmmeterCacheData)} 初始化电表缓存数据完成");
timer.Stop();
_logger.LogInformation($"{nameof(InitAmmeterCacheData)} 初始化电表缓存数据完成,耗时{timer.ElapsedMilliseconds}毫秒");
}
/// <summary>
@ -381,7 +478,9 @@ namespace JiShe.CollectBus.ScheduledMeterReading
FocusAddress = ammerterItem.Value.FocusAddress,
TimeDensity = timeDensity.ToString(),
};
_ = _producerBus.PublishDelayAsync(TimeSpan.FromMicroseconds(500), ProtocolConst.AmmeterSubscriberWorkerOneMinuteIssuedEventName, tempMsg);
//_ = _producerBus.PublishDelayAsync(TimeSpan.FromMicroseconds(500), ProtocolConst.AmmeterSubscriberWorkerOneMinuteIssuedEventName, tempMsg);
_ = _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerOneMinuteIssuedEventName, tempMsg);
//_= _producerBus.Publish(tempMsg);
@ -445,7 +544,9 @@ namespace JiShe.CollectBus.ScheduledMeterReading
FocusAddress = ammerterItem.Value.FocusAddress,
TimeDensity = timeDensity.ToString(),
};
_ = _producerBus.PublishDelayAsync(TimeSpan.FromMicroseconds(500), ProtocolConst.AmmeterSubscriberWorkerFiveMinuteIssuedEventName, tempMsg);
//_ = _producerBus.PublishDelayAsync(TimeSpan.FromMicroseconds(500), ProtocolConst.AmmeterSubscriberWorkerFiveMinuteIssuedEventName, tempMsg);
_ = _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerFiveMinuteIssuedEventName, tempMsg);
//_ = _producerBus.Publish(tempMsg);
@ -509,8 +610,9 @@ namespace JiShe.CollectBus.ScheduledMeterReading
FocusAddress = ammerterItem.Value.FocusAddress,
TimeDensity = timeDensity.ToString(),
};
//_ = _producerBus.PublishDelayAsync(TimeSpan.FromMicroseconds(500), ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName, tempMsg);
_ = _producerBus.PublishDelayAsync(TimeSpan.FromMicroseconds(500), ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName, tempMsg);
_ = _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName, tempMsg);
//_ = _producerBus.Publish(tempMsg);
@ -604,9 +706,11 @@ namespace JiShe.CollectBus.ScheduledMeterReading
/// </summary>
/// <param name="timeDensity">采集频率</param>
/// <param name="ammeterInfo">集中器号hash分组的集中器集合数据</param>
/// <param name="groupIndex">集中器所在分组</param>
/// <param name="taskBatch">时间格式的任务批次名称</param>
/// <returns></returns>
private async Task AmmerterCreatePublishTask(int timeDensity
, AmmeterInfo ammeterInfo)
, AmmeterInfo ammeterInfo,int groupIndex,string taskBatch)
{
var handlerPacketBuilder = TelemetryPacketBuilder.AFNHandlersDictionary;
//todo 检查需要待补抄的电表的时间点信息,保存到需要待补抄的缓存中。如果此线程异常,该如何补偿?
@ -614,7 +718,9 @@ namespace JiShe.CollectBus.ScheduledMeterReading
var currentTime = DateTime.Now;
var pendingCopyReadTime = currentTime.AddMinutes(timeDensity);
//构建缓存任务key依然 表计类型+采集频率+集中器地址存hash类型
var redisCacheKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}{ammeterInfo.FocusAddress}";
var redisCacheTelemetryPacketInfoHashKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
var redisCacheTelemetryPacketInfoSetIndexKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoSetIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
var redisCacheTelemetryPacketInfoZSetScoresIndexKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoZSetScoresIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
if (string.IsNullOrWhiteSpace(ammeterInfo.ItemCodes))
{
@ -695,7 +801,8 @@ namespace JiShe.CollectBus.ScheduledMeterReading
}
}
Dictionary<string, MeterReadingRecords> keyValuePairs = new Dictionary<string, MeterReadingRecords>();
//Dictionary<string, MeterReadingRecords> keyValuePairs = new Dictionary<string, MeterReadingRecords>();
List<MeterReadingTelemetryPacketInfo> taskList = new List<MeterReadingTelemetryPacketInfo>();
foreach (var tempItem in tempCodes)
{
@ -750,17 +857,17 @@ namespace JiShe.CollectBus.ScheduledMeterReading
var meterReadingRecords = new MeterReadingRecords()
var meterReadingRecords = new MeterReadingTelemetryPacketInfo()
{
ProjectID = ammeterInfo.ProjectID,
DatabaseBusiID = ammeterInfo.DatabaseBusiID,
PendingCopyReadTime = pendingCopyReadTime,
CreationTime = currentTime,
MeterAddress = ammeterInfo.AmmerterAddress,
MeterId = ammeterInfo.ID,
MeterId = ammeterInfo.MeterId,
MeterType = MeterTypeEnum.Ammeter,
FocusAddress = ammeterInfo.FocusAddress,
FocusID = ammeterInfo.FocusID,
FocusId = ammeterInfo.FocusId,
AFN = aFN,
Fn = fn,
ItemCode = tempItem,
@ -770,9 +877,10 @@ namespace JiShe.CollectBus.ScheduledMeterReading
IssuedMessageId = GuidGenerator.Create().ToString(),
IssuedMessageHexString = Convert.ToHexString(dataInfos),
};
meterReadingRecords.CreateDataId(GuidGenerator.Create());
keyValuePairs.TryAdd($"{ammeterInfo.ID}_{tempItem}", meterReadingRecords);
//meterReadingRecords.CreateDataId(GuidGenerator.Create());
taskList.Add(meterReadingRecords);
}
//TimeSpan timeSpan = TimeSpan.FromMicroseconds(5);
//await Task.Delay(timeSpan);
@ -780,14 +888,25 @@ namespace JiShe.CollectBus.ScheduledMeterReading
//return keyValuePairs;
// await FreeRedisProvider.Instance.HSetAsync(redisCacheKey, keyValuePairs);
using (var pipe = FreeRedisProvider.Instance.StartPipe())
//using (var pipe = FreeRedisProvider.Instance.StartPipe())
//{
// pipe.HSet(redisCacheKey, keyValuePairs);
// object[] ret = pipe.EndPipe();
//}
if (taskList == null
|| taskList.Count() <= 0
|| string.IsNullOrWhiteSpace(redisCacheTelemetryPacketInfoHashKey)
|| string.IsNullOrWhiteSpace(redisCacheTelemetryPacketInfoSetIndexKey)
|| string.IsNullOrWhiteSpace(redisCacheTelemetryPacketInfoZSetScoresIndexKey))
{
pipe.HSet(redisCacheKey, keyValuePairs);
object[] ret = pipe.EndPipe();
_logger.LogError($"{nameof(AmmerterCreatePublishTask)} 写入参数异常,{redisCacheTelemetryPacketInfoHashKey}{redisCacheTelemetryPacketInfoSetIndexKey}{redisCacheTelemetryPacketInfoZSetScoresIndexKey}-101");
return;
}
await Task.CompletedTask;
await _redisDataCacheService.BatchInsertDataAsync(
redisCacheTelemetryPacketInfoHashKey,
redisCacheTelemetryPacketInfoSetIndexKey,
redisCacheTelemetryPacketInfoZSetScoresIndexKey,
taskList);
}
/// <summary>
@ -845,8 +964,9 @@ namespace JiShe.CollectBus.ScheduledMeterReading
FocusAddress = ammerterItem.Value.FocusAddress,
TimeDensity = timeDensity.ToString(),
};
//_ = _producerBus.PublishDelayAsync(TimeSpan.FromMicroseconds(500), ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName, tempMsg);
_ = _producerBus.PublishDelayAsync(TimeSpan.FromMicroseconds(500), ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName, tempMsg);
_ = _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName, tempMsg);
//_ = _producerBus.Publish(tempMsg);
@ -876,7 +996,7 @@ namespace JiShe.CollectBus.ScheduledMeterReading
foreach (var focusInfo in focusGroup)
{
//构建缓存任务key依然 表计类型+采集频率+集中器地址存hash类型
var redisCacheKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}{focusInfo.Key}";
var redisCacheKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}{focusInfo.Key}";
foreach (var ammeterInfo in focusInfo.Value)
{
@ -910,22 +1030,22 @@ namespace JiShe.CollectBus.ScheduledMeterReading
if (string.IsNullOrWhiteSpace(ammeter.AreaCode))
{
_logger.LogError($"{nameof(AmmerterCreatePublishTask)} 表ID:{ammeter.ID},集中器通信区号为空");
_logger.LogError($"{nameof(AmmerterCreatePublishTask)} 表ID:{ammeter.MeterId},集中器通信区号为空");
continue;
}
if (string.IsNullOrWhiteSpace(ammeter.Address))
{
_logger.LogError($"{nameof(AmmerterCreatePublishTask)} 表ID:{ammeter.ID},集中器通信地址为空");
_logger.LogError($"{nameof(AmmerterCreatePublishTask)} 表ID:{ammeter.MeterId},集中器通信地址为空");
continue;
}
if (Convert.ToInt32(ammeter.Address) > 65535)
{
_logger.LogError($"{nameof(AmmerterCreatePublishTask)} 表ID:{ammeter.ID},集中器通信地址无效,确保大于65535");
_logger.LogError($"{nameof(AmmerterCreatePublishTask)} 表ID:{ammeter.MeterId},集中器通信地址无效,确保大于65535");
continue;
}
if (ammeter.MeteringCode <= 0 || ammeter.MeteringCode > 2033)
{
_logger.LogError($"{nameof(AmmerterCreatePublishTask)} 表ID:{ammeter.ID},非有效测量点号({ammeter.MeteringCode})");
_logger.LogError($"{nameof(AmmerterCreatePublishTask)} 表ID:{ammeter.MeterId},非有效测量点号({ammeter.MeteringCode})");
continue;
}
@ -1023,10 +1143,10 @@ namespace JiShe.CollectBus.ScheduledMeterReading
PendingCopyReadTime = pendingCopyReadTime,
CreationTime = currentTime,
MeterAddress = ammeter.AmmerterAddress,
MeterId = ammeter.ID,
MeterId = ammeter.MeterId,
MeterType = MeterTypeEnum.Ammeter,
FocusAddress = ammeter.FocusAddress,
FocusID = ammeter.FocusID,
FocusID = ammeter.FocusId,
AFN = aFN,
Fn = fn,
ItemCode = tempItem,
@ -1036,9 +1156,9 @@ namespace JiShe.CollectBus.ScheduledMeterReading
IssuedMessageId = GuidGenerator.Create().ToString(),
IssuedMessageHexString = Convert.ToHexString(dataInfos),
};
meterReadingRecords.CreateDataId(GuidGenerator.Create());
//meterReadingRecords.CreateDataId(GuidGenerator.Create());
keyValuePairs.TryAdd($"{ammeter.ID}_{tempItem}", meterReadingRecords);
keyValuePairs.TryAdd($"{ammeter.MeterId}_{tempItem}", meterReadingRecords);
}
await FreeRedisProvider.Instance.HSetAsync(redisCacheKey, keyValuePairs);
}
@ -1092,12 +1212,12 @@ namespace JiShe.CollectBus.ScheduledMeterReading
continue;
}
var redisCacheKey = $"{string.Format(RedisConst.CacheMeterInfoKey, SystemType, ServerTagName, MeterTypeEnum.WaterMeter, itemTimeDensity.Key)}{item.Key}";
var redisCacheKey = $"{string.Format(RedisConst.CacheMeterInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.WaterMeter, itemTimeDensity.Key)}{item.Key}";
Dictionary<string, WatermeterInfo> keyValuePairs = new Dictionary<string, WatermeterInfo>();
foreach (var subItem in item)
{
keyValuePairs.TryAdd($"{subItem.ID}", subItem);
keyValuePairs.TryAdd($"{subItem.MeterId}", subItem);
}
await FreeRedisProvider.Instance.HSetAsync(redisCacheKey, keyValuePairs);
}
@ -1153,6 +1273,7 @@ namespace JiShe.CollectBus.ScheduledMeterReading
FocusAddress = ammerterItem.Value.FocusAddress,
TimeDensity = timeDensity.ToString(),
};
//await _producerBus.PublishAsync(ProtocolConst.WatermeterSubscriberWorkerAutoReadingIssuedEventName, tempMsg);
//_ = _producerBus.Publish(tempMsg);
@ -1239,7 +1360,7 @@ namespace JiShe.CollectBus.ScheduledMeterReading
/// <returns></returns>
private string GetTelemetryPacketCacheKeyPrefix(int timeDensity, MeterTypeEnum meterType)
{
return $"{string.Format(RedisConst.CacheTelemetryPacketInfoKey, SystemType, ServerTagName, meterType, timeDensity)}*";
return $"{string.Format(RedisConst.CacheTelemetryPacketInfoHashKey, SystemType, ServerTagName, meterType, timeDensity)}*";
}
#endregion

View File

@ -4,6 +4,7 @@ using System.Threading.Tasks;
using Confluent.Kafka;
using DotNetCore.CAP;
using JiShe.CollectBus.Ammeters;
using JiShe.CollectBus.Application.Contracts;
using JiShe.CollectBus.Common.Consts;
using JiShe.CollectBus.Common.DeviceBalanceControl;
using JiShe.CollectBus.Common.Helpers;
@ -14,6 +15,7 @@ using JiShe.CollectBus.IotSystems.Devices;
using JiShe.CollectBus.IotSystems.MessageIssueds;
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
using JiShe.CollectBus.IotSystems.Watermeter;
using JiShe.CollectBus.Kafka;
using JiShe.CollectBus.Kafka.Producer;
using JiShe.CollectBus.Repository;
using JiShe.CollectBus.Repository.MeterReadingRecord;
@ -22,6 +24,7 @@ using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Volo.Abp.Domain.Repositories;
using Volo.Abp.Uow;
@ -35,10 +38,21 @@ namespace JiShe.CollectBus.ScheduledMeterReading
public class EnergySystemScheduledMeterReadingService : BasicScheduledMeterReadingService
{
string serverTagName = string.Empty;
public EnergySystemScheduledMeterReadingService(ILogger<EnergySystemScheduledMeterReadingService> logger,
ICapPublisher producerBus, IIoTDBProvider dbProvider, IMeterReadingRecordRepository meterReadingRecordRepository,IConfiguration configuration, IProducerService producerService) : base(logger, producerBus, meterReadingRecordRepository, producerService,dbProvider)
public EnergySystemScheduledMeterReadingService(
ILogger<EnergySystemScheduledMeterReadingService> logger,
IIoTDBProvider dbProvider,
IMeterReadingRecordRepository meterReadingRecordRepository,
IOptions<KafkaOptionConfig> kafkaOptions,
IProducerService producerService,
IRedisDataCacheService redisDataCacheService)
: base(logger,
meterReadingRecordRepository,
producerService,
redisDataCacheService,
dbProvider,
kafkaOptions)
{
serverTagName = configuration.GetValue<string>(CommonConst.ServerTagName)!;
serverTagName = kafkaOptions.Value.ServerTagName;
}
public sealed override string SystemType => SystemTypeConst.Energy;
@ -80,11 +94,11 @@ namespace JiShe.CollectBus.ScheduledMeterReading
// Baudrate = 2400,
// FocusAddress = "402440506",
// Name = "张家祠工务(三相电表)",
// FocusID = 95780,
// FocusId = 95780,
// DatabaseBusiID = 1,
// MeteringCode = 1,
// AmmerterAddress = "402410040506",
// ID = 127035,
// MeterId = 127035,
// TypeName = 3,
// DataTypes = "449,503,581,582,583,584,585,586,587,588,589,590,591,592,593,594,597,598,599,600,601,602,603,604,605,606,607,608,661,663,677,679",
// TimeDensity = 15,
@ -94,11 +108,11 @@ namespace JiShe.CollectBus.ScheduledMeterReading
// Baudrate = 2400,
// FocusAddress = "542400504",
// Name = "五号配(长芦二所四排)(单相电表)",
// FocusID = 69280,
// FocusId = 69280,
// DatabaseBusiID = 1,
// MeteringCode = 2,
// AmmerterAddress = "542410000504",
// ID = 95594,
// MeterId = 95594,
// TypeName = 1,
// DataTypes = "581,589,592,597,601",
// TimeDensity = 15,
@ -106,7 +120,7 @@ namespace JiShe.CollectBus.ScheduledMeterReading
//return ammeterInfos;
string sql = $@"SELECT C.ID,C.Name,C.FocusID,C.SingleRate,C.MeteringCode,C.Code AS BrandType,C.Baudrate,C.Password,C.MeteringPort,C.[Address] AS AmmerterAddress,C.TypeName,C.Protocol,C.TripState,C.[State],B.[Address],B.AreaCode,B.AutomaticReport,D.DataTypes,B.TimeDensity,A.GatherCode,C.Special,C.[ProjectID],B.AbnormalState,B.LastTime,CONCAT(B.AreaCode, B.[Address]) AS FocusAddress,(select top 1 DatabaseBusiID from TB_Project where ID = B.ProjectID) AS DatabaseBusiID
string sql = $@"SELECT C.ID as MeterId,C.Name,C.FocusID as FocusId,C.SingleRate,C.MeteringCode,C.Code AS BrandType,C.Baudrate,C.Password,C.MeteringPort,C.[Address] AS AmmerterAddress,C.TypeName,C.Protocol,C.TripState,C.[State],B.[Address],B.AreaCode,B.AutomaticReport,D.DataTypes,B.TimeDensity,A.GatherCode,C.Special,C.[ProjectID],B.AbnormalState,B.LastTime,CONCAT(B.AreaCode, B.[Address]) AS FocusAddress,(select top 1 DatabaseBusiID from TB_Project where ID = B.ProjectID) AS DatabaseBusiID
FROM TB_GatherInfo(NOLOCK) AS A
INNER JOIN TB_FocusInfo(NOLOCK) AS B ON A.ID = B.GatherInfoID AND B.RemoveState >= 0 AND B.State>=0
INNER JOIN TB_AmmeterInfo(NOLOCK) AS C ON B.ID = C.FocusID AND C.State>= 0 AND C.State<100
@ -133,9 +147,9 @@ namespace JiShe.CollectBus.ScheduledMeterReading
public override async Task<List<WatermeterInfo>> GetWatermeterInfoList(string gatherCode = "V4-Gather-8890")
{
string sql = $@"SELECT
A.ID,
A.ID as MeterId,
A.Name,
A.FocusID,
A.FocusID as FocusId,
A.MeteringCode,
A.Baudrate,
A.MeteringPort,

View File

@ -1,4 +1,5 @@
using DotNetCore.CAP;
using JiShe.CollectBus.Common.Consts;
using JiShe.CollectBus.Common.Enums;
using JiShe.CollectBus.Common.Helpers;
using JiShe.CollectBus.Common.Models;
@ -6,6 +7,8 @@ using JiShe.CollectBus.IoTDBProvider;
using JiShe.CollectBus.IotSystems.Devices;
using JiShe.CollectBus.IotSystems.MessageReceiveds;
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
using JiShe.CollectBus.Kafka;
using JiShe.CollectBus.Kafka.Attributes;
using JiShe.CollectBus.Protocol.Contracts;
using JiShe.CollectBus.Protocol.Contracts.Interfaces;
using JiShe.CollectBus.Protocol.Contracts.Models;
@ -20,7 +23,7 @@ using Volo.Abp.Domain.Repositories;
namespace JiShe.CollectBus.Subscribers
{
public class SubscriberAppService : CollectBusAppService, ISubscriberAppService, ICapSubscribe
public class SubscriberAppService : CollectBusAppService, ISubscriberAppService, ICapSubscribe, IKafkaSubscribe
{
private readonly ILogger<SubscriberAppService> _logger;
private readonly ITcpService _tcpService;
@ -63,9 +66,11 @@ namespace JiShe.CollectBus.Subscribers
_dbProvider = dbProvider;
}
[CapSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
public async Task LoginIssuedEvent(IssuedEventMessage issuedEventMessage)
[KafkaSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
//[CapSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
public async Task<ISubscribeAck> LoginIssuedEvent(IssuedEventMessage issuedEventMessage)
{
bool isAck = false;
switch (issuedEventMessage.Type)
{
case IssuedEventType.Heartbeat:
@ -76,6 +81,7 @@ namespace JiShe.CollectBus.Subscribers
loginEntity.AckTime = Clock.Now;
loginEntity.IsAck = true;
await _messageReceivedLoginEventRepository.UpdateAsync(loginEntity);
isAck = true;
break;
case IssuedEventType.Data:
break;
@ -90,11 +96,14 @@ namespace JiShe.CollectBus.Subscribers
//}
await _tcpService.SendAsync(issuedEventMessage.ClientId, issuedEventMessage.Message);
return isAck? SubscribeAck.Success(): SubscribeAck.Fail();
}
[CapSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
public async Task HeartbeatIssuedEvent(IssuedEventMessage issuedEventMessage)
[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
//[CapSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
public async Task<ISubscribeAck> HeartbeatIssuedEvent(IssuedEventMessage issuedEventMessage)
{
bool isAck = false;
switch (issuedEventMessage.Type)
{
case IssuedEventType.Heartbeat:
@ -103,6 +112,7 @@ namespace JiShe.CollectBus.Subscribers
heartbeatEntity.AckTime = Clock.Now;
heartbeatEntity.IsAck = true;
await _messageReceivedHeartbeatEventRepository.UpdateAsync(heartbeatEntity);
isAck = true;
break;
case IssuedEventType.Data:
break;
@ -117,10 +127,12 @@ namespace JiShe.CollectBus.Subscribers
//}
await _tcpService.SendAsync(issuedEventMessage.ClientId, issuedEventMessage.Message);
return isAck ? SubscribeAck.Success() : SubscribeAck.Fail();
}
[CapSubscribe(ProtocolConst.SubscriberReceivedEventName)]
public async Task ReceivedEvent(MessageReceived receivedMessage)
[KafkaSubscribe(ProtocolConst.SubscriberReceivedEventName)]
//[CapSubscribe(ProtocolConst.SubscriberReceivedEventName)]
public async Task<ISubscribeAck> ReceivedEvent(MessageReceived receivedMessage)
{
var currentTime = Clock.Now;
@ -137,13 +149,13 @@ namespace JiShe.CollectBus.Subscribers
if(fN == null)
{
Logger.LogError($"数据解析失败:{receivedMessage.Serialize()}");
return;
return SubscribeAck.Success();
}
var tb3761FN = fN.FnList.FirstOrDefault();
if (tb3761FN == null)
{
Logger.LogError($"数据解析失败:{receivedMessage.Serialize()}");
return;
return SubscribeAck.Success();
}
//报文入库
@ -169,11 +181,15 @@ namespace JiShe.CollectBus.Subscribers
//todo 查找是否有下发任务
//await _messageReceivedEventRepository.InsertAsync(receivedMessage);
}
return SubscribeAck.Success();
}
[CapSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
public async Task ReceivedHeartbeatEvent(MessageReceivedHeartbeat receivedHeartbeatMessage)
[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
//[CapSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
public async Task<ISubscribeAck> ReceivedHeartbeatEvent(MessageReceivedHeartbeat receivedHeartbeatMessage)
{
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
if (protocolPlugin == null)
@ -185,10 +201,12 @@ namespace JiShe.CollectBus.Subscribers
await protocolPlugin.HeartbeatAsync(receivedHeartbeatMessage);
await _messageReceivedHeartbeatEventRepository.InsertAsync(receivedHeartbeatMessage);
}
return SubscribeAck.Success();
}
[CapSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
public async Task ReceivedLoginEvent(MessageReceivedLogin receivedLoginMessage)
[KafkaSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
//[CapSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
public async Task<ISubscribeAck> ReceivedLoginEvent(MessageReceivedLogin receivedLoginMessage)
{
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
if (protocolPlugin == null)
@ -200,6 +218,7 @@ namespace JiShe.CollectBus.Subscribers
await protocolPlugin.LoginAsync(receivedLoginMessage);
await _messageReceivedLoginEventRepository.InsertAsync(receivedLoginMessage);
}
return SubscribeAck.Success();
}
}
}

View File

@ -3,11 +3,14 @@ using System.Collections.Generic;
using System.Threading.Tasks;
using DeviceDetectorNET.Parser.Device;
using DotNetCore.CAP;
using JiShe.CollectBus.Common.Consts;
using JiShe.CollectBus.Common.Enums;
using JiShe.CollectBus.IotSystems.Devices;
using JiShe.CollectBus.IotSystems.MessageIssueds;
using JiShe.CollectBus.IotSystems.MessageReceiveds;
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
using JiShe.CollectBus.Kafka;
using JiShe.CollectBus.Kafka.Attributes;
using JiShe.CollectBus.Protocol.Contracts;
using JiShe.CollectBus.Protocol.Contracts.Interfaces;
using JiShe.CollectBus.Repository.MeterReadingRecord;
@ -24,7 +27,7 @@ namespace JiShe.CollectBus.Subscribers
/// 定时抄读任务消息消费订阅
/// </summary>
[Route($"/worker/app/subscriber")]
public class WorkerSubscriberAppService : CollectBusAppService, IWorkerSubscriberAppService,ICapSubscribe
public class WorkerSubscriberAppService : CollectBusAppService, IWorkerSubscriberAppService, ICapSubscribe, IKafkaSubscribe
{
private readonly ILogger<WorkerSubscriberAppService> _logger;
private readonly ITcpService _tcpService;
@ -63,8 +66,9 @@ namespace JiShe.CollectBus.Subscribers
/// <returns></returns>
[HttpPost]
[Route("ammeter/oneminute/issued-event")]
[CapSubscribe(ProtocolConst.AmmeterSubscriberWorkerOneMinuteIssuedEventName)]
public async Task AmmeterScheduledMeterOneMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage receivedMessage)
[KafkaSubscribe(ProtocolConst.AmmeterSubscriberWorkerOneMinuteIssuedEventName)]
//[CapSubscribe(ProtocolConst.AmmeterSubscriberWorkerOneMinuteIssuedEventName)]
public async Task<ISubscribeAck> AmmeterScheduledMeterOneMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage receivedMessage)
{
_logger.LogInformation("1分钟采集电表数据下行消息消费队列开始处理");
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
@ -81,6 +85,7 @@ namespace JiShe.CollectBus.Subscribers
}
}
return SubscribeAck.Success();
}
/// <summary>
@ -90,8 +95,9 @@ namespace JiShe.CollectBus.Subscribers
/// <returns></returns>
[HttpPost]
[Route("ammeter/fiveminute/issued-event")]
[CapSubscribe(ProtocolConst.AmmeterSubscriberWorkerFiveMinuteIssuedEventName)]
public async Task AmmeterScheduledMeterFiveMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage receivedMessage)
[KafkaSubscribe(ProtocolConst.AmmeterSubscriberWorkerFiveMinuteIssuedEventName)]
//[CapSubscribe(ProtocolConst.AmmeterSubscriberWorkerFiveMinuteIssuedEventName)]
public async Task<ISubscribeAck> AmmeterScheduledMeterFiveMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage receivedMessage)
{
_logger.LogInformation("5分钟采集电表数据下行消息消费队列开始处理");
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
@ -108,6 +114,7 @@ namespace JiShe.CollectBus.Subscribers
}
}
return SubscribeAck.Success();
}
/// <summary>
@ -117,8 +124,9 @@ namespace JiShe.CollectBus.Subscribers
/// <returns></returns>
[HttpPost]
[Route("ammeter/fifteenminute/issued-event")]
[CapSubscribe(ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName)]
public async Task AmmeterScheduledMeterFifteenMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage receivedMessage)
[KafkaSubscribe(ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName)]
//[CapSubscribe(ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName)]
public async Task<ISubscribeAck> AmmeterScheduledMeterFifteenMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage receivedMessage)
{
_logger.LogInformation("15分钟采集电表数据下行消息消费队列开始处理");
try
@ -137,6 +145,7 @@ namespace JiShe.CollectBus.Subscribers
}
}
return SubscribeAck.Success();
}
catch (Exception ex)
{
@ -155,8 +164,9 @@ namespace JiShe.CollectBus.Subscribers
/// <returns></returns>
[HttpPost]
[Route("watermeter/fifteenminute/issued-event")]
[CapSubscribe(ProtocolConst.WatermeterSubscriberWorkerAutoReadingIssuedEventName)]
public async Task WatermeterSubscriberWorkerAutoReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage receivedMessage)
[KafkaSubscribe(ProtocolConst.WatermeterSubscriberWorkerAutoReadingIssuedEventName)]
//[CapSubscribe(ProtocolConst.WatermeterSubscriberWorkerAutoReadingIssuedEventName)]
public async Task<ISubscribeAck> WatermeterSubscriberWorkerAutoReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage receivedMessage)
{
_logger.LogInformation("15分钟采集水表数据下行消息消费队列开始处理");
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
@ -172,6 +182,7 @@ namespace JiShe.CollectBus.Subscribers
await _tcpService.SendAsync(device.ClientId, Convert.FromHexString(receivedMessage.MessageHexString));
}
}
return SubscribeAck.Success();
}
#endregion
}

View File

@ -4,7 +4,7 @@ using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace JiShe.CollectBus.Protocol.Contracts
namespace JiShe.CollectBus.Common.Consts
{
public class ProtocolConst
{

View File

@ -28,25 +28,49 @@ namespace JiShe.CollectBus.Common.Consts
/// </summary>
public const string FifteenMinuteAcquisitionTimeInterval = "Fifteen";
public const string MeterInfo = "MeterInfo";
/// <summary>
/// 缓存表计信息,{0}=>系统类型,{1}=>应用服务部署标记,{2}=>表计类别,{3}=>采集频率
/// </summary>
public const string CacheMeterInfoKey = $"{CacheBasicDirectoryKey}{"{0}:{1}"}:MeterInfo:{"{2}"}:{"{3}"}:";
public const string CacheMeterInfoHashKey = $"{CacheBasicDirectoryKey}{"{0}:{1}"}:{MeterInfo}:{"{2}"}:{"{3}"}";
/// <summary>
/// 缓存表计信息索引Set缓存Key,{0}=>系统类型,{1}=>应用服务部署标记,{2}=>表计类别,{3}=>采集频率
/// </summary>
public const string CacheMeterInfoSetIndexKey = $"{CacheBasicDirectoryKey}{"{0}:{1}"}:{MeterInfo}:{"{2}"}:SetIndex:{"{3}"}";
/// <summary>
/// 缓存表计信息排序索引ZSET缓存Key,{0}=>系统类型,{1}=>应用服务部署标记,{2}=>表计类别,{3}=>采集频率
/// </summary>
public const string CacheMeterInfoZSetScoresIndexKey = $"{CacheBasicDirectoryKey}{"{0}:{1}"}:{MeterInfo}:{"{2}"}:ZSetScoresIndex:{"{3}"}";
public const string TaskInfo = "TaskInfo";
/// <summary>
/// 缓存待下发的指令生产任务数据,{0}=>系统类型,{1}=>应用服务部署标记,{2}=>表计类别,{3}=>采集频率
/// </summary>
public const string CacheTasksToBeIssuedKey = $"{CacheBasicDirectoryKey}{"{0}:{1}"}:TaskInfo:{"{2}"}:{"{3}"}";
public const string CacheTasksToBeIssuedKey = $"{CacheBasicDirectoryKey}{"{0}:{1}"}:{TaskInfo}:{"{2}"}:{"{3}"}";
public const string TelemetryPacket = "TelemetryPacket";
/// <summary>
/// 缓存表计下发指令数据集,{0}=>系统类型,{1}=>应用服务部署标记,{2}=>表计类别,{3}=>采集频率,{4}=>集中器所在分组,{5}=>时间格式的任务批次
/// </summary>
public const string CacheTelemetryPacketInfoHashKey = $"{CacheBasicDirectoryKey}{"{0}:{1}"}:{TelemetryPacket}:{"{2}"}:{"{3}"}:{"{4}"}:{"{5}"}";
/// <summary>
/// 缓存表计下发指令数据集,{0}=>系统类型,{1}=>应用服务部署标记,{2}=>表计类别,{3}=>采集频率
/// 缓存表计下发指令数据集索引Set缓存Key,{0}=>系统类型,{1}=>应用服务部署标记,{2}=>表计类别,{3}=>采集频率{4}=>集中器所在分组,{5}=>时间格式的任务批次
/// </summary>
public const string CacheTelemetryPacketInfoKey = $"{CacheBasicDirectoryKey}{"{0}:{1}"}:TelemetryPacket:{"{2}"}:{"{3}"}:";
public const string CacheTelemetryPacketInfoSetIndexKey = $"{CacheBasicDirectoryKey}{"{0}:{1}"}:{TelemetryPacket}:{"{2}"}:SetIndex:{"{3}"}:{"{4}"}:{"{5}"}";
/// <summary>
/// 缓存设备平衡关系映射结果,{0}=>系统类型,{1}=>应用服务部署标记
/// 缓存表计下发指令数据集排序索引ZSET缓存Key,{0}=>系统类型,{1}=>应用服务部署标记{2}=>表计类别,{3}=>采集频率,{4}=>集中器所在分组,{5}=>时间格式的任务批次
/// </summary>
public const string CacheDeviceBalanceRelationMapResultKey = $"{CacheBasicDirectoryKey}{"{0}:{1}"}:RelationMap";
public const string CacheTelemetryPacketInfoZSetScoresIndexKey = $"{CacheBasicDirectoryKey}{"{0}:{1}"}:{TelemetryPacket}:{"{2}"}:ZSetScoresIndex:{"{3}"}:{"{4}"}:{"{5}"}";
///// <summary>
///// 缓存设备平衡关系映射结果,{0}=>系统类型,{1}=>应用服务部署标记
///// </summary>
//public const string CacheDeviceBalanceRelationMapResultKey = $"{CacheBasicDirectoryKey}{"{0}:{1}"}:RelationMap";
public const string CacheAmmeterFocusKey = "CacheAmmeterFocusKey";
}

View File

@ -161,7 +161,6 @@ namespace JiShe.CollectBus.Common.DeviceBalanceControl
MaxDegreeOfParallelism = maxThreads.Value,
};
TimeSpan timeSpan = TimeSpan.FromMicroseconds(5);
await Task.Run(() =>
{
Parallel.For(0, cache.CachedGroups.Length, options, async groupId =>
@ -169,8 +168,7 @@ namespace JiShe.CollectBus.Common.DeviceBalanceControl
var queue = groupQueues[groupId];
while (queue.TryDequeue(out T item))
{
await Task.Delay(timeSpan);
processor(item, Thread.CurrentThread.ManagedThreadId);
processor(item, groupId);
}
});
});
@ -183,14 +181,14 @@ namespace JiShe.CollectBus.Common.DeviceBalanceControl
/// <typeparam name="T">已经分组的设备信息</typeparam>
/// <param name="items">部分或者全部的已经分组的设备集合</param>
/// <param name="deviceIdSelector">从泛型对象提取deviceId</param>
/// <param name="processor">处理委托(参数:当前对象,线程ID</param>
/// <param name="processor">处理委托(参数:当前对象,分组ID</param>
/// <param name="maxConcurrency">可选最佳并发度</param>
/// <returns></returns>
/// <exception cref="InvalidOperationException"></exception>
public static async Task ProcessWithThrottleAsync<T>(
List<T> items,
Func<T, string> deviceIdSelector,
Action<T> processor,
Action<T,int> processor,
int? maxConcurrency = null)
{
var cache = _currentCache ?? throw new InvalidOperationException("缓存未初始化");
@ -244,7 +242,7 @@ namespace JiShe.CollectBus.Common.DeviceBalanceControl
/// <summary>
/// 分组异步处理(带节流)
/// </summary>
private static async Task ProcessItemAsync<T>(T item, Action<T> processor, int groupId)
private static async Task ProcessItemAsync<T>(T item, Action<T,int> processor, int groupId)
{
// 使用内存缓存降低CPU负载
await Task.Yield(); // 立即释放当前线程
@ -255,7 +253,7 @@ namespace JiShe.CollectBus.Common.DeviceBalanceControl
{
ExecutionContext.Run(context!, state =>
{
processor(item);
processor(item,groupId);
}, null);
});
}

View File

@ -89,5 +89,26 @@ namespace JiShe.CollectBus.Common.Extensions
if (buffer.Count > 0)
yield return buffer;
}
//public static IEnumerable<IEnumerable<T>> Batch<T>(this IEnumerable<T> source, int batchSize)
//{
// if (batchSize <= 0)
// throw new ArgumentOutOfRangeException(nameof(batchSize));
// using var enumerator = source.GetEnumerator();
// while (enumerator.MoveNext())
// {
// yield return GetBatch(enumerator, batchSize);
// }
//}
//private static IEnumerable<T> GetBatch<T>(IEnumerator<T> enumerator, int batchSize)
//{
// do
// {
// yield return enumerator.Current;
// batchSize--;
// } while (batchSize > 0 && enumerator.MoveNext());
//}
}
}

View File

@ -1,4 +1,5 @@
using JiShe.CollectBus.Common.Enums;
using JiShe.CollectBus.Common.Consts;
using JiShe.CollectBus.Common.Enums;
using JiShe.CollectBus.Common.Extensions;
using System;
using System.Collections.Generic;
@ -7,7 +8,7 @@ using System.Reflection;
using System.Text;
using System.Threading.Tasks;
namespace JiShe.CollectBus.Protocol.Contracts
namespace JiShe.CollectBus.Common.Extensions
{
public class ProtocolConstExtensions
{

View File

@ -37,7 +37,8 @@ namespace JiShe.CollectBus.Common.Helpers
ReadCommentHandling = JsonCommentHandling.Skip, // 忽略注释
PropertyNameCaseInsensitive = true, // 属性名称大小写不敏感
PropertyNamingPolicy = JsonNamingPolicy.CamelCase, // 属性名称使用驼峰命名规则
Converters = { new DateTimeJsonConverter() } // 注册你的自定义转换器,
Converters = { new DateTimeJsonConverter() }, // 注册你的自定义转换器,
DefaultBufferSize = 4096,
};
}
@ -77,7 +78,8 @@ namespace JiShe.CollectBus.Common.Helpers
ReadCommentHandling = JsonCommentHandling.Skip, // 忽略注释
PropertyNameCaseInsensitive = true, // 属性名称大小写不敏感
PropertyNamingPolicy = JsonNamingPolicy.CamelCase, // 属性名称使用驼峰命名规则
Converters = { new DateTimeJsonConverter() } // 注册你的自定义转换器,
Converters = { new DateTimeJsonConverter() }, // 注册你的自定义转换器,
DefaultBufferSize = 4096,
};
}

View File

@ -17,6 +17,7 @@
<ItemGroup>
<PackageReference Include="JetBrains.Annotations" Version="2024.2.0" />
<PackageReference Include="Mapster" Version="7.4.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="8.0.1" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="8.0.2" />

View File

@ -0,0 +1,56 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace JiShe.CollectBus.Common.Models
{
/// <summary>
/// 缓存全局分页结果
/// </summary>
/// <typeparam name="T"></typeparam>
public class BusCacheGlobalPagedResult<T>
{
/// <summary>
/// 数据集合
/// </summary>
public List<T> Items { get; set; }
/// <summary>
/// 总条数
/// </summary>
public long TotalCount { get; set; }
/// <summary>
/// 每页条数
/// </summary>
public int PageSize { get; set; }
/// <summary>
/// 总页数
/// </summary>
public int PageCount
{
get
{
return (int)Math.Ceiling((double)TotalCount / PageSize);
}
}
/// <summary>
/// 是否有下一页
/// </summary>
public bool HasNext { get; set; }
/// <summary>
/// 下一页的分页索引
/// </summary>
public decimal? NextScore { get; set; }
/// <summary>
/// 下一页的分页索引
/// </summary>
public string NextMember { get; set; }
}
}

View File

@ -1,31 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace JiShe.CollectBus.Common.Models
{
public class GlobalPagedResult<T>
{
/// <summary>
/// 数据集合
/// </summary>
public List<T> Items { get; set; }
/// <summary>
/// 是否有下一页
/// </summary>
public bool HasNext { get; set; }
/// <summary>
/// 下一页的分页索引
/// </summary>
public long? NextScore { get; set; }
/// <summary>
/// 下一页的分页索引
/// </summary>
public string NextMember { get; set; }
}
}

View File

@ -31,5 +31,20 @@ namespace JiShe.CollectBus.Common.Models
/// 数据集合
/// </summary>
public IEnumerable<T> Items { get; set; }
/// <summary>
/// 是否有下一页
/// </summary>
public bool HasNext { get; set; }
/// <summary>
/// 下一页的分页索引
/// </summary>
public decimal? NextScore { get; set; }
/// <summary>
/// 下一页的分页索引
/// </summary>
public string NextMember { get; set; }
}
}

View File

@ -9,7 +9,7 @@ namespace JiShe.CollectBus.Common.Models
/// <summary>
/// 设备缓存基础模型
/// </summary>
public class DeviceCacheBasicModel
public abstract class DeviceCacheBasicModel
{
/// <summary>
/// 集中器Id
@ -20,5 +20,15 @@ namespace JiShe.CollectBus.Common.Models
/// 表Id
/// </summary>
public int MeterId { get; set; }
/// <summary>
/// 关系映射标识用于ZSet的Member字段和Set的Value字段具体值可以根据不同业务场景进行定义
/// </summary>
public virtual string MemberId => $"{FocusId}:{MeterId}";
/// <summary>
/// ZSet排序索引分数值具体值可以根据不同业务场景进行定义例如时间戳
/// </summary>
public virtual long ScoreValue=> ((long)FocusId << 32) | (uint)MeterId;
}
}

View File

@ -1,4 +1,6 @@
using System;
using FreeSql.DataAnnotations;
using JiShe.CollectBus.Common.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
@ -6,23 +8,25 @@ using System.Threading.Tasks;
namespace JiShe.CollectBus.Ammeters
{
public class AmmeterInfo
public class AmmeterInfo: DeviceCacheBasicModel
{
/// <summary>
/// 电表ID
/// 关系映射标识用于ZSet的Member字段和Set的Value字段具体值可以根据不同业务场景进行定义
/// </summary>
public int ID { get; set; }
[Column(IsIgnore = true)]
public override string MemberId => $"{FocusId}:{MeterId}";
/// <summary>
/// ZSet排序索引分数值具体值可以根据不同业务场景进行定义例如时间戳
/// </summary>
[Column(IsIgnore = true)]
public override long ScoreValue => ((long)FocusId << 32) | (uint)DateTime.Now.Ticks;
/// <summary>
/// 电表名称
/// </summary>
public string Name { get; set; }
/// <summary>
/// 集中器ID
/// </summary>
public int FocusID { get; set; }
/// <summary>
/// 集中器地址
/// </summary>

View File

@ -0,0 +1,152 @@
using JiShe.CollectBus.Common.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace JiShe.CollectBus.Ammeters
{
public class AmmeterInfoTemp
{
/// <summary>
/// 集中器Id
/// </summary>
public int FocusID { get; set; }
/// <summary>
/// 表Id
/// </summary>
public int Id { get; set; }
/// <summary>
/// 电表名称
/// </summary>
public string Name { get; set; }
/// <summary>
/// 集中器地址
/// </summary>
public string FocusAddress { get; set; }
/// <summary>
/// 集中器地址
/// </summary>
public string Address { get; set; }
/// <summary>
/// 集中器区域代码
/// </summary>
public string AreaCode { get; set; }
/// <summary>
/// 电表类别 1单相、2三相三线、3三相四线,
/// 07协议 开合闸指令(1A开闸断电,1C单相表合闸,1B多相表合闸) 645 2007 表
/// 97协议//true(合闸);false(跳闸) 545 1997 没有单相多相 之分 "true" ? "9966" : "3355"
/// </summary>
public int TypeName { get; set; }
/// <summary>
/// 跳合闸状态字段: 0 合闸1 跳闸
/// 电表TripState 0 合闸-通电, 1 断开、跳闸);
/// </summary>
public int TripState { get; set; }
/// <summary>
/// 规约 -电表default(30) 197协议3007协议
/// </summary>
public int? Protocol { get; set; }
/// <summary>
/// 一个集中器下的[MeteringCode]必须唯一。 PN
/// </summary>
public int MeteringCode { get; set; }
/// <summary>
/// 电表通信地址
/// </summary>
public string AmmerterAddress { get; set; }
/// <summary>
/// 波特率 default(2400)
/// </summary>
public int Baudrate { get; set; }
/// <summary>
/// MeteringPort 端口就几个可以枚举。
/// </summary>
public int MeteringPort { get; set; }
/// <summary>
/// 电表密码
/// </summary>
public string Password { get; set; }
/// <summary>
/// 采集时间间隔(分钟如15)
/// </summary>
public int TimeDensity { get; set; }
/// <summary>
/// 该电表方案下采集项JSON格式["0D_80","0D_80"]
/// </summary>
public string ItemCodes { get; set; }
/// <summary>
/// State表状态:
/// 0新装未下发1运行(档案下发成功时设置状态值1) 2暂停, 100销表销表后是否重新启用
/// 特定State -1 已删除
/// </summary>
public int State { get; set; }
/// <summary>
/// 是否自动采集(0:主动采集1自动采集)
/// </summary>
public int AutomaticReport { get; set; }
/// <summary>
/// 该电表方案下采集项编号
/// </summary>
public string DataTypes { get; set; }
/// <summary>
/// 品牌型号
/// </summary>
public string BrandType { get; set; }
/// <summary>
/// 采集器编号
/// </summary>
public string GatherCode { get; set; }
/// <summary>
/// 是否特殊表1是特殊电表
/// </summary>
public int Special { get; set; }
/// <summary>
/// 费率类型,单、多 (SingleRate :单费率单相表1多费率其他0 与TypeName字段无关)
/// SingleRate ? "单" : "复"
/// [SingleRate] --0 复费率 false 1 单费率 true 与PayPlanID保持一致
///对应 TB_PayPlan.Type: 1复费率2单费率
/// </summary>
public bool SingleRate { get; set; }
/// <summary>
/// 项目ID
/// </summary>
public int ProjectID { get; set; }
/// <summary>
/// 数据库业务ID
/// </summary>
public int DatabaseBusiID { get; set; }
/// <summary>
/// 是否异常集中器 0:正常1异常
/// </summary>
public int AbnormalState { get; set; }
public DateTime LastTime { get; set; }
}
}

View File

@ -0,0 +1,141 @@
using JiShe.CollectBus.Common.Enums;
using JiShe.CollectBus.Common.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Volo.Abp.Domain.Entities;
using Volo.Abp.Domain.Entities.Auditing;
namespace JiShe.CollectBus.IotSystems.MeterReadingRecords
{
/// <summary>
/// 抄读任务Redis缓存数据记录
/// </summary>
public class MeterReadingTelemetryPacketInfo : DeviceCacheBasicModel
{
/// <summary>
/// 关系映射标识用于ZSet的Member字段和Set的Value字段具体值可以根据不同业务场景进行定义
/// </summary>
public override string MemberId => $"{FocusId}:{MeterId}:{ItemCode}";
/// <summary>
/// ZSet排序索引分数值具体值可以根据不同业务场景进行定义例如时间戳
/// </summary>
public override long ScoreValue => ((long)FocusId << 32) | (uint)DateTime.Now.Ticks;
/// <summary>
/// 是否手动操作
/// </summary>
public bool ManualOrNot { get; set; }
/// <summary>
/// 任务数据唯一标记
/// </summary>
public string TaskMark { get; set; }
/// <summary>
/// 时间戳标记IoTDB时间列处理上报通过构建标记获取唯一标记匹配时间戳。
/// </summary>
public long Timestamps { get; set; }
/// <summary>
/// 是否超时
/// </summary>
public bool IsTimeout { get; set; } = false;
/// <summary>
/// 待抄读时间
/// </summary>
public DateTime PendingCopyReadTime { get; set; }
/// <summary>
/// 集中器地址
/// </summary>
public string FocusAddress { get; set; }
/// <summary>
/// 表地址
/// </summary>
public string MeterAddress { get; set; }
/// <summary>
/// 表类型
/// </summary>
public MeterTypeEnum MeterType { get; set; }
/// <summary>
/// 项目ID
/// </summary>
public int ProjectID { get; set; }
/// <summary>
/// 数据库业务ID
/// </summary>
public int DatabaseBusiID { get; set; }
/// <summary>
/// AFN功能码
/// </summary>
public AFN AFN { get; set; }
/// <summary>
/// 抄读功能码
/// </summary>
public int Fn { get; set; }
/// <summary>
/// 抄读计量点
/// </summary>
public int Pn { get; set; }
/// <summary>
/// 采集项编码
/// </summary>
public string ItemCode { get; set;}
/// <summary>
/// 创建时间
/// </summary>
public DateTime CreationTime { get; set; }
/// <summary>
/// 下发消息内容
/// </summary>
public string IssuedMessageHexString { get; set; }
/// <summary>
/// 下发消息Id
/// </summary>
public string IssuedMessageId { get; set; }
/// <summary>
/// 消息上报内容
/// </summary>
public string? ReceivedMessageHexString { get; set; }
/// <summary>
/// 消息上报时间
/// </summary>
public DateTime? ReceivedTime { get; set; }
/// <summary>
/// 上报消息Id
/// </summary>
public string ReceivedMessageId { get; set; }
/// <summary>
/// 上报报文解析备注,异常情况下才有
/// </summary>
public string ReceivedRemark { get; set; }
//public void CreateDataId(Guid Id)
//{
// this.Id = Id;
//}
}
}

View File

@ -1,4 +1,5 @@
using JiShe.CollectBus.Common.Enums;
using JiShe.CollectBus.Common.Models;
using System;
using System.Collections.Generic;
using System.Linq;
@ -10,13 +11,8 @@ namespace JiShe.CollectBus.IotSystems.Watermeter
/// <summary>
/// 水表信息
/// </summary>
public class WatermeterInfo
public class WatermeterInfo: DeviceCacheBasicModel
{
/// <summary>
/// 水表ID
/// </summary>
public int ID { get; set; }
/// <summary>
/// 水表名称
/// </summary>
@ -26,11 +22,6 @@ namespace JiShe.CollectBus.IotSystems.Watermeter
/// </summary>
public string Password { get; set; }
/// <summary>
/// 集中器ID
/// </summary>
public int FocusID { get; set; }
/// <summary>
/// 集中器地址
/// </summary>

View File

@ -1,11 +1,14 @@
using FreeRedis;
using JiShe.CollectBus.Common.Helpers;
using JiShe.CollectBus.Common.Models;
using JiShe.CollectBus.Common.Extensions;
using JiShe.CollectBus.FreeRedisProvider.Options;
using Microsoft.Extensions.Options;
using System.Diagnostics;
using System.Text.Json;
using Volo.Abp.DependencyInjection;
using static System.Runtime.InteropServices.JavaScript.JSType;
using System.Collections.Concurrent;
namespace JiShe.CollectBus.FreeRedisProvider
{
@ -33,7 +36,7 @@ namespace JiShe.CollectBus.FreeRedisProvider
public IRedisClient GetInstance()
{
var connectionString = $"{_option.Configuration},defaultdatabase={_option.DefaultDB}";
var connectionString = $"{_option.Configuration},defaultdatabase={_option.DefaultDB},MaxPoolSize={_option.MaxPoolSize}";
Instance = new RedisClient(connectionString);
Instance.Serialize = obj => BusJsonSerializer.Serialize(obj);
Instance.Deserialize = (json, type) => BusJsonSerializer.Deserialize(json, type);
@ -41,442 +44,464 @@ namespace JiShe.CollectBus.FreeRedisProvider
return Instance;
}
//public async Task AddMeterZSetCacheData<T>(string redisCacheKey, string redisCacheIndexKey, decimal score, T data)
///// <summary>
///// 单个添加数据
///// </summary>
///// <typeparam name="T"></typeparam>
///// <param name="redisCacheKey">主数据存储Hash缓存Key</param>
///// <param name="redisCacheFocusIndexKey">集中器索引Set缓存Key</param>
///// <param name="redisCacheScoresIndexKey">集中器排序索引ZSET缓存Key</param>
///// <param name="redisCacheGlobalIndexKey">集中器采集频率分组全局索引ZSet缓存Key</param>
///// <param name="data">表计信息</param>
///// <param name="timestamp">可选时间戳</param>
///// <returns></returns>
//public async Task AddMeterCacheData<T>(
//string redisCacheKey,
//string redisCacheFocusIndexKey,
//string redisCacheScoresIndexKey,
//string redisCacheGlobalIndexKey,
//T data,
//DateTimeOffset? timestamp = null) where T : DeviceCacheBasicModel
//{
// if (score < 0 || data == null || string.IsNullOrWhiteSpace(redisCacheKey) || string.IsNullOrWhiteSpace(redisCacheIndexKey))
// // 参数校验增强
// if (data == null || string.IsNullOrWhiteSpace(redisCacheKey)
// || string.IsNullOrWhiteSpace(redisCacheFocusIndexKey)
// || string.IsNullOrWhiteSpace(redisCacheScoresIndexKey)
// || string.IsNullOrWhiteSpace(redisCacheGlobalIndexKey))
// {
// throw new Exception($"{nameof(AddMeterZSetCacheData)} 参数异常,-101");
// throw new ArgumentException($"{nameof(AddMeterCacheData)} 参数异常,-101");
// }
// // 生成唯一member标识
// var member = data.Serialize();
// // 计算组合score分类ID + 时间戳)
// var actualTimestamp = timestamp ?? DateTimeOffset.UtcNow;
// // 计算score范围
// decimal dataScore = (long)score << 32;
// long scoreValue = ((long)data.FocusId << 32) | (uint)actualTimestamp.Ticks;
// //// 事务操作
// //using (var tran = FreeRedisProvider.Instance.Multi())
// //{
// // await tran.ZAddAsync(cacheKey, score,member);
// // await tran.SAddAsync($"cat_index:{categoryId}", member);
// // object[] ret = tran.Exec();
// //}
// //全局索引写入
// long globalScore = actualTimestamp.ToUnixTimeMilliseconds();
// using (var pipe = Instance.StartPipe())
// // 使用事务保证原子性
// using (var trans = Instance.Multi())
// {
// pipe.ZAdd(redisCacheKey, dataScore, member);
// pipe.SAdd(redisCacheIndexKey, member);
// object[] ret = pipe.EndPipe();
// // 主数据存储Hash
// trans.HSet(redisCacheKey, data.MemberID, data.Serialize());
// // 分类索引
// trans.SAdd(redisCacheFocusIndexKey, data.MemberID);
// // 排序索引使用ZSET
// trans.ZAdd(redisCacheScoresIndexKey, scoreValue, data.MemberID);
// //全局索引
// trans.ZAdd(redisCacheGlobalIndexKey, globalScore, data.MemberID);
// var results = trans.Exec();
// if (results == null || results.Length <= 0)
// throw new Exception($"{nameof(AddMeterCacheData)} 事务提交失败,-102");
// }
// await Task.CompletedTask;
//}
//public async Task<BusPagedResult<T>> GetMeterZSetPagedData<T>(
///// <summary>
///// 批量添加数据
///// </summary>
///// <typeparam name="T"></typeparam>
///// <param name="redisCacheKey">主数据存储Hash缓存Key</param>
///// <param name="redisCacheFocusIndexKey">集中器索引Set缓存Key</param>
///// <param name="redisCacheScoresIndexKey">集中器排序索引ZSET缓存Key</param>
///// <param name="redisCacheGlobalIndexKey">集中器采集频率分组全局索引ZSet缓存Key</param>
///// <param name="items">数据集合</param>
///// <param name="timestamp">可选时间戳</param>
///// <returns></returns>
//public async Task BatchAddMeterData<T>(
//string redisCacheKey,
//string redisCacheIndexKey,
//decimal score,
//int pageSize = 10,
//int pageIndex = 1)
//string redisCacheFocusIndexKey,
//string redisCacheScoresIndexKey,
//string redisCacheGlobalIndexKey,
//IEnumerable<T> items,
//DateTimeOffset? timestamp = null) where T : DeviceCacheBasicModel
//{
// if (score < 0 || string.IsNullOrWhiteSpace(redisCacheKey) || string.IsNullOrWhiteSpace(redisCacheIndexKey))
// if (items == null
// || items.Count() <=0
// || string.IsNullOrWhiteSpace(redisCacheKey)
// || string.IsNullOrWhiteSpace(redisCacheFocusIndexKey)
// || string.IsNullOrWhiteSpace(redisCacheScoresIndexKey)
// || string.IsNullOrWhiteSpace(redisCacheGlobalIndexKey))
// {
// throw new Exception($"{nameof(GetMeterZSetPagedData)} 参数异常,-101");
// throw new ArgumentException($"{nameof(BatchAddMeterData)} 参数异常,-101");
// }
// // 计算score范围
// decimal minScore = (long)score << 32;
// decimal maxScore = ((long)score + 1) << 32;
// const int BATCH_SIZE = 1000; // 每批1000条
// var semaphore = new SemaphoreSlim(Environment.ProcessorCount * 2);
// // 分页参数
// foreach (var batch in items.Batch(BATCH_SIZE))
// {
// await semaphore.WaitAsync();
// _ = Task.Run(() =>
// {
// using (var pipe = Instance.StartPipe())
// {
// foreach (var item in batch)
// {
// // 计算组合score分类ID + 时间戳)
// var actualTimestamp = timestamp ?? DateTimeOffset.UtcNow;
// long scoreValue = ((long)item.FocusId << 32) | (uint)actualTimestamp.Ticks;
// //全局索引写入
// long globalScore = actualTimestamp.ToUnixTimeMilliseconds();
// // 主数据存储Hash
// pipe.HSet(redisCacheKey, item.MemberID, item.Serialize());
// // 分类索引Set
// pipe.SAdd(redisCacheFocusIndexKey, item.MemberID);
// // 排序索引使用ZSET
// pipe.ZAdd(redisCacheScoresIndexKey, scoreValue, item.MemberID);
// //全局索引
// pipe.ZAdd(redisCacheGlobalIndexKey, globalScore, item.MemberID);
// }
// pipe.EndPipe();
// }
// semaphore.Release();
// });
// }
// await Task.CompletedTask;
//}
///// <summary>
///// 删除指定redis缓存key的缓存数据
///// </summary>
///// <typeparam name="T"></typeparam>
///// <param name="redisCacheKey">主数据存储Hash缓存Key</param>
///// <param name="redisCacheFocusIndexKey">集中器索引Set缓存Key</param>
///// <param name="redisCacheScoresIndexKey">集中器排序索引ZSET缓存Key</param>
///// <param name="redisCacheGlobalIndexKey">集中器采集频率分组全局索引ZSet缓存Key</param>
///// <param name="data">表计信息</param>
///// <returns></returns>
//public async Task RemoveMeterData<T>(
//string redisCacheKey,
//string redisCacheFocusIndexKey,
//string redisCacheScoresIndexKey,
//string redisCacheGlobalIndexKey,
//T data) where T : DeviceCacheBasicModel
//{
// if (data == null
// || string.IsNullOrWhiteSpace(redisCacheKey)
// || string.IsNullOrWhiteSpace(redisCacheFocusIndexKey)
// || string.IsNullOrWhiteSpace(redisCacheScoresIndexKey)
// || string.IsNullOrWhiteSpace(redisCacheGlobalIndexKey))
// {
// throw new ArgumentException($"{nameof(RemoveMeterData)} 参数异常,-101");
// }
// const string luaScript = @"
// local mainKey = KEYS[1]
// local focusIndexKey = KEYS[2]
// local scoresIndexKey = KEYS[3]
// local globalIndexKey = KEYS[4]
// local member = ARGV[1]
// local deleted = 0
// if redis.call('HDEL', mainKey, member) > 0 then
// deleted = 1
// end
// redis.call('SREM', focusIndexKey, member)
// redis.call('ZREM', scoresIndexKey, member)
// redis.call('ZREM', globalIndexKey, member)
// return deleted
// ";
// var keys = new[]
// {
// redisCacheKey,
// redisCacheFocusIndexKey,
// redisCacheScoresIndexKey,
// redisCacheGlobalIndexKey
// };
// var result = await Instance.EvalAsync(luaScript, keys, new[] { data.MemberID });
// if ((int)result == 0)
// throw new KeyNotFoundException("指定数据不存在");
//}
///// <summary>
///// 修改表计缓存信息
///// </summary>
///// <typeparam name="T"></typeparam>
///// <param name="redisCacheKey">主数据存储Hash缓存Key</param>
///// <param name="oldRedisCacheFocusIndexKey">旧集中器索引Set缓存Key</param>
///// <param name="newRedisCacheFocusIndexKey">新集中器索引Set缓存Key</param>
///// <param name="redisCacheScoresIndexKey">集中器排序索引ZSET缓存Key</param>
///// <param name="redisCacheGlobalIndexKey">集中器采集频率分组全局索引ZSet缓存Key</param>
///// <param name="newData">表计信息</param>
///// <param name="newTimestamp">可选时间戳</param>
///// <returns></returns>
//public async Task UpdateMeterData<T>(
//string redisCacheKey,
//string oldRedisCacheFocusIndexKey,
//string newRedisCacheFocusIndexKey,
//string redisCacheScoresIndexKey,
//string redisCacheGlobalIndexKey,
//T newData,
//DateTimeOffset? newTimestamp = null) where T : DeviceCacheBasicModel
//{
// if (newData == null
// || string.IsNullOrWhiteSpace(redisCacheKey)
// || string.IsNullOrWhiteSpace(oldRedisCacheFocusIndexKey)
// || string.IsNullOrWhiteSpace(newRedisCacheFocusIndexKey)
// || string.IsNullOrWhiteSpace(redisCacheScoresIndexKey)
// || string.IsNullOrWhiteSpace(redisCacheGlobalIndexKey))
// {
// throw new ArgumentException($"{nameof(UpdateMeterData)} 参数异常,-101");
// }
// var luaScript = @"
// local mainKey = KEYS[1]
// local oldFocusIndexKey = KEYS[2]
// local newFocusIndexKey = KEYS[3]
// local scoresIndexKey = KEYS[4]
// local globalIndexKey = KEYS[5]
// local member = ARGV[1]
// local newData = ARGV[2]
// local newScore = ARGV[3]
// local newGlobalScore = ARGV[4]
// -- 校验存在性
// if redis.call('HEXISTS', mainKey, member) == 0 then
// return 0
// end
// -- 更新主数据
// redis.call('HSET', mainKey, member, newData)
// -- 处理变更
// if newScore ~= '' then
// -- 删除旧索引
// redis.call('SREM', oldFocusIndexKey, member)
// redis.call('ZREM', scoresIndexKey, member)
// -- 添加新索引
// redis.call('SADD', newFocusIndexKey, member)
// redis.call('ZADD', scoresIndexKey, newScore, member)
// end
// -- 更新全局索引
// if newGlobalScore ~= '' then
// -- 删除旧索引
// redis.call('ZREM', globalIndexKey, member)
// -- 添加新索引
// redis.call('ZADD', globalIndexKey, newGlobalScore, member)
// end
// return 1
// ";
// var actualTimestamp = newTimestamp ?? DateTimeOffset.UtcNow;
// var newGlobalScore = actualTimestamp.ToUnixTimeMilliseconds();
// var newScoreValue = ((long)newData.FocusId << 32) | (uint)actualTimestamp.Ticks;
// var result = await Instance.EvalAsync(luaScript,
// new[]
// {
// redisCacheKey,
// oldRedisCacheFocusIndexKey,
// newRedisCacheFocusIndexKey,
// redisCacheScoresIndexKey,
// redisCacheGlobalIndexKey
// },
// new object[]
// {
// newData.MemberID,
// newData.Serialize(),
// newScoreValue.ToString() ?? "",
// newGlobalScore.ToString() ?? ""
// });
// if ((int)result == 0)
// {
// throw new KeyNotFoundException($"{nameof(UpdateMeterData)}指定Key{redisCacheKey}的数据不存在");
// }
//}
//public async Task<BusPagedResult<T>> SingleGetMeterPagedData<T>(
//string redisCacheKey,
//string redisCacheScoresIndexKey,
//int focusId,
//int pageSize = 10,
//int pageIndex = 1,
//bool descending = true)
//{
// // 计算score范围
// long minScore = (long)focusId << 32;
// long maxScore = ((long)focusId + 1) << 32;
// // 分页参数计算
// int start = (pageIndex - 1) * pageSize;
// // 查询主数据
// var members = await Instance.ZRevRangeByScoreAsync(
// redisCacheKey,
// // 获取排序后的member列表
// var members = descending
// ? await Instance.ZRevRangeByScoreAsync(
// redisCacheScoresIndexKey,
// maxScore,
// minScore,
// offset: start,
// count: pageSize
// );
// start,
// pageSize)
// : await Instance.ZRangeByScoreAsync(
// redisCacheScoresIndexKey,
// minScore,
// maxScore,
// start,
// pageSize);
// if (members == null)
// {
// throw new Exception($"{nameof(GetMeterZSetPagedData)} 获取缓存的信息失败,第 {pageIndex + 1} 页数据未返回,-102");
// }
// // 批量获取实际数据
// var dataTasks = members.Select(m =>
// Instance.HGetAsync<T>(redisCacheKey, m)).ToArray();
// await Task.WhenAll(dataTasks);
// // 查询总数
// var total = await Instance.ZCountAsync(redisCacheKey, minScore, maxScore);
// // 总数统计优化
// var total = await Instance.ZCountAsync(
// redisCacheScoresIndexKey,
// minScore,
// maxScore);
// return new BusPagedResult<T>
// {
// Items = members.Select(m =>
// BusJsonSerializer.Deserialize<T>(m)!).ToList(),
// Items = dataTasks.Select(t => t.Result).ToList(),
// TotalCount = total,
// PageIndex = pageIndex,
// PageSize = pageSize
// };
//}
///// <summary>
///// 删除数据示例
///// </summary>
///// <typeparam name="T"></typeparam>
///// <param name="redisCacheKey">分类</param>
///// <param name="redisCacheIndexKey"></param>
///// <param name="data"></param>
///// <returns></returns>
//public async Task RemoveMeterZSetData<T>(
//public async Task<BusPagedResult<T>> GetFocusPagedData<T>(
//string redisCacheKey,
//string redisCacheIndexKey,
//T data)
//string redisCacheScoresIndexKey,
//int focusId,
//int pageSize = 10,
//long? lastScore = null,
//string lastMember = null,
//bool descending = true) where T : DeviceCacheBasicModel
//{
// // 计算分数范围
// long minScore = (long)focusId << 32;
// long maxScore = ((long)focusId + 1) << 32;
// // 查询需要删除的member
// var members = await Instance.SMembersAsync(redisCacheIndexKey);
// var target = members.FirstOrDefault(m =>
// BusJsonSerializer.Deserialize<T>(m) == data);//泛型此处该如何处理?
// // 获取成员列表
// var members = await GetSortedMembers(
// redisCacheScoresIndexKey,
// minScore,
// maxScore,
// pageSize,
// lastScore,
// lastMember,
// descending);
// if (target != null)
// // 批量获取数据
// var dataDict = await Instance.HMGetAsync<T>(redisCacheKey, members.CurrentItems);
// return new BusPagedResult<T>
// {
// using (var trans = Instance.Multi())
// {
// trans.ZRem(redisCacheKey, target);
// trans.SRem(redisCacheIndexKey, target);
// trans.Exec();
// }
// }
// Items = dataDict,
// TotalCount = await GetTotalCount(redisCacheScoresIndexKey, minScore, maxScore),
// HasNext = members.HasNext,
// NextScore = members.NextScore,
// NextMember = members.NextMember
// };
//}
// await Task.CompletedTask;
//private async Task<(string[] CurrentItems, bool HasNext, decimal? NextScore, string NextMember)>
// GetSortedMembers(
// string zsetKey,
// long minScore,
// long maxScore,
// int pageSize,
// long? lastScore,
// string lastMember,
// bool descending)
//{
// var querySize = pageSize + 1;
// var (startScore, exclude) = descending
// ? (lastScore ?? maxScore, lastMember)
// : (lastScore ?? minScore, lastMember);
// var members = descending
// ? await Instance.ZRevRangeByScoreAsync(
// zsetKey,
// max: startScore,
// min: minScore,
// offset: 0,
// count: querySize)
// : await Instance.ZRangeByScoreAsync(
// zsetKey,
// min: startScore,
// max: maxScore,
// offset: 0,
// count: querySize);
// var hasNext = members.Length > pageSize;
// var currentItems = members.Take(pageSize).ToArray();
// var nextCursor = currentItems.Any()
// ? await GetNextCursor(zsetKey, currentItems.Last(), descending)
// : (null, null);
// return (currentItems, hasNext, nextCursor.score, nextCursor.member);
//}
//private async Task<long> GetTotalCount(string zsetKey, long min, long max)
//{
// // 缓存计数优化
// var cacheKey = $"{zsetKey}_count_{min}_{max}";
// var cached = await Instance.GetAsync<long?>(cacheKey);
// if (cached.HasValue)
// return cached.Value;
// var count = await Instance.ZCountAsync(zsetKey, min, max);
// await Instance.SetExAsync(cacheKey, 60, count); // 缓存60秒
// return count;
//}
public async Task AddMeterZSetCacheData<T>(
string redisCacheKey,
string redisCacheIndexKey,
int categoryId, // 新增分类ID参数
T data,
DateTimeOffset? timestamp = null)
{
// 参数校验增强
if (data == null || string.IsNullOrWhiteSpace(redisCacheKey)
|| string.IsNullOrWhiteSpace(redisCacheIndexKey))
{
throw new ArgumentException("Invalid parameters");
}
// 生成唯一member标识带数据指纹
var member = $"{categoryId}:{Guid.NewGuid()}";
var serializedData = data.Serialize();
// 计算组合score分类ID + 时间戳)
var actualTimestamp = timestamp ?? DateTimeOffset.UtcNow;
long scoreValue = ((long)categoryId << 32) | (uint)actualTimestamp.Ticks;
//全局索引写入
long globalScore = actualTimestamp.ToUnixTimeMilliseconds();
// 使用事务保证原子性
using (var trans = Instance.Multi())
{
// 主数据存储Hash
trans.HSet(redisCacheKey, member, serializedData);
// 排序索引使用ZSET
trans.ZAdd($"{redisCacheKey}_scores", scoreValue, member);
// 分类索引
trans.SAdd(redisCacheIndexKey, member);
//全局索引
trans.ZAdd("global_data_all", globalScore, member);
var results = trans.Exec();
if (results == null || results.Length <= 0)
throw new Exception("Transaction failed");
}
await Task.CompletedTask;
}
public async Task BatchAddMeterData<T>(
string redisCacheKey,
string indexKey,
IEnumerable<T> items) where T : DeviceCacheBasicModel
{
const int BATCH_SIZE = 1000; // 每批1000条
var semaphore = new SemaphoreSlim(Environment.ProcessorCount * 2);
//foreach (var batch in items.Batch(BATCH_SIZE))
//public async Task<Dictionary<int, BusPagedResult<T>>> BatchGetMeterPagedData<T>(
//string redisCacheKey,
//string redisCacheScoresIndexKey,
//IEnumerable<int> focusIds,
//int pageSizePerFocus = 10) where T : DeviceCacheBasicModel
//{
// await semaphore.WaitAsync();
// _ = Task.Run(async () =>
// var results = new ConcurrentDictionary<int, BusPagedResult<T>>();
// var parallelOptions = new ParallelOptions
// {
// using (var pipe = FreeRedisProvider.Instance.StartPipe())
// MaxDegreeOfParallelism = Environment.ProcessorCount * 2
// };
// await Parallel.ForEachAsync(focusIds, parallelOptions, async (focusId, _) =>
// {
// foreach (var item in batch)
// {
// var member = $"{item.CategoryId}:{Guid.NewGuid()}";
// long score = ((long)item.CategoryId << 32) | (uint)item.Timestamp.Ticks;
// var data = await SingleGetMeterPagedData<T>(
// redisCacheKey,
// redisCacheScoresIndexKey,
// focusId,
// pageSizePerFocus);
// // Hash主数据
// pipe.HSet(redisCacheKey, member, item.Data.Serialize());
// // 分类索引
// pipe.ZAdd($"{redisCacheKey}_scores", score, member);
// // 全局索引
// pipe.ZAdd("global_data_all", item.Timestamp.ToUnixTimeMilliseconds(), member);
// // 分类快速索引
// pipe.SAdd(indexKey, member);
// }
// pipe.EndPipe();
// }
// semaphore.Release();
// results.TryAdd(focusId, data);
// });
// return new Dictionary<int, BusPagedResult<T>>(results);
//}
await Task.CompletedTask;
}
public async Task UpdateMeterData<T>(
string redisCacheKey,
string oldCategoryIndexKey,
string newCategoryIndexKey,
string memberId, // 唯一标识(格式:"分类ID:GUID"
T newData,
int? newCategoryId = null,
DateTimeOffset? newTimestamp = null)
{
// 参数校验
if (string.IsNullOrWhiteSpace(memberId))
throw new ArgumentException("Invalid member ID");
var luaScript = @"
local mainKey = KEYS[1]
local scoreKey = KEYS[2]
local oldIndex = KEYS[3]
local newIndex = KEYS[4]
local member = ARGV[1]
local newData = ARGV[2]
local newScore = ARGV[3]
--
if redis.call('HEXISTS', mainKey, member) == 0 then
return 0
end
--
redis.call('HSET', mainKey, member, newData)
--
if newScore ~= '' then
--
redis.call('SREM', oldIndex, member)
--
redis.call('ZADD', scoreKey, newScore, member)
--
redis.call('SADD', newIndex, member)
end
return 1
";
// 计算新score当分类或时间变化时
long? newScoreValue = null;
if (newCategoryId.HasValue || newTimestamp.HasValue)
{
var parts = memberId.Split(':');
var oldCategoryId = int.Parse(parts[0]);
var actualCategoryId = newCategoryId ?? oldCategoryId;
var actualTimestamp = newTimestamp ?? DateTimeOffset.UtcNow;
newScoreValue = ((long)actualCategoryId << 32) | (uint)actualTimestamp.Ticks;
}
var result = await Instance.EvalAsync(luaScript,
new[]
{
redisCacheKey,
$"{redisCacheKey}_scores",
oldCategoryIndexKey,
newCategoryIndexKey
},
new[]
{
memberId,
newData.Serialize(),
newScoreValue?.ToString() ?? ""
});
// 如果时间戳变化则更新全局索引
if (newTimestamp.HasValue)
{
long newGlobalScore = newTimestamp.Value.ToUnixTimeMilliseconds();
await Instance.ZAddAsync("global_data_all", newGlobalScore, memberId);
}
if ((int)result == 0)
throw new KeyNotFoundException("指定数据不存在");
}
public async Task<BusPagedResult<T>> GetMeterZSetPagedData<T>(
string redisCacheKey,
string redisCacheIndexKey,
int categoryId,
int pageSize = 10,
int pageIndex = 1,
bool descending = true)
{
// 计算score范围
long minScore = (long)categoryId << 32;
long maxScore = ((long)categoryId + 1) << 32;
// 分页参数计算
int start = (pageIndex - 1) * pageSize;
// 获取排序后的member列表
var members = descending
? await Instance.ZRevRangeByScoreAsync(
$"{redisCacheKey}_scores",
maxScore,
minScore,
start,
pageSize)
: await Instance.ZRangeByScoreAsync(
$"{redisCacheKey}_scores",
minScore,
maxScore,
start,
pageSize);
// 批量获取实际数据
var dataTasks = members.Select(m =>
Instance.HGetAsync<T>(redisCacheKey, m)).ToArray();
await Task.WhenAll(dataTasks);
// 总数统计优化
var total = await Instance.ZCountAsync(
$"{redisCacheKey}_scores",
minScore,
maxScore);
return new BusPagedResult<T>
{
Items = dataTasks.Select(t => t.Result).ToList(),
TotalCount = total,
PageIndex = pageIndex,
PageSize = pageSize
};
}
public async Task RemoveMeterZSetData<T>(
string redisCacheKey,
string redisCacheIndexKey,
string uniqueId) // 改为基于唯一标识删除
{
// 原子操作
var luaScript = @"
local mainKey = KEYS[1]
local scoreKey = KEYS[2]
local indexKey = KEYS[3]
local member = ARGV[1]
redis.call('HDEL', mainKey, member)
redis.call('ZREM', scoreKey, member)
redis.call('SREM', indexKey, member)
return 1
";
var keys = new[]
{
redisCacheKey,
$"{redisCacheKey}_scores",
redisCacheIndexKey
};
var result = await Instance.EvalAsync(luaScript,
keys,
new[] { uniqueId });
if ((int)result != 1)
throw new Exception("删除操作失败");
}
public async Task<GlobalPagedResult<T>> GetGlobalPagedData<T>(
string redisCacheKey,
int pageSize = 10,
long? lastScore = null,
string lastMember = null,
bool descending = true)
{
const string zsetKey = "global_data_all";
// 分页参数处理
var (startScore, excludeMember) = descending
? (lastScore ?? long.MaxValue, lastMember)
: (lastScore ?? 0, lastMember);
// 获取成员列表
string[] members;
if (descending)
{
members = await Instance.ZRevRangeByScoreAsync(
zsetKey,
max: startScore,
min: 0,
offset: 0,
count: pageSize + 1);
}
else
{
members = await Instance.ZRangeByScoreAsync(
zsetKey,
min: startScore,
max: long.MaxValue,
offset: 0,
count: pageSize + 1);
}
// 处理分页结果
bool hasNext = members.Length > pageSize;
var actualMembers = members.Take(pageSize).ToArray();
// 批量获取数据(优化版本)
var dataTasks = actualMembers
.Select(m => Instance.HGetAsync<T>(redisCacheKey, m))
.ToArray();
await Task.WhenAll(dataTasks);
// 获取下一页游标
(long? nextScore, string nextMember) = actualMembers.Any()
? await GetNextCursor(zsetKey, actualMembers.Last(), descending)
: (null, null);
return new GlobalPagedResult<T>
{
Items = dataTasks.Select(t => t.Result).ToList(),
HasNext = hasNext,
NextScore = nextScore,
NextMember = nextMember
};
}
private async Task<(long? score, string member)> GetNextCursor(
string zsetKey,
string lastMember,
bool descending)
{
var score = await Instance.ZScoreAsync(zsetKey, lastMember);
return (score.HasValue ? (long)score.Value : null, lastMember);
}
}
}

View File

@ -1,4 +1,5 @@
using FreeRedis;
using JiShe.CollectBus.Common.Models;
namespace JiShe.CollectBus.FreeRedisProvider
{

View File

@ -13,6 +13,11 @@ namespace JiShe.CollectBus.FreeRedisProvider.Options
/// </summary>
public string? Configuration { get; set; }
/// <summary>
/// 最大连接数
/// </summary>
public string? MaxPoolSize { get; set; }
/// <summary>
/// 默认数据库
/// </summary>

View File

@ -361,45 +361,5 @@ namespace JiShe.CollectBus.Host
});
});
}
/// <summary>
/// 配置Kafka主题
/// </summary>
/// <param name="context"></param>
/// <param name="configuration"></param>
public void ConfigureKafkaTopic(ServiceConfigurationContext context, IConfiguration configuration)
{
var adminClient = new AdminClientBuilder(new AdminClientConfig
{
BootstrapServers = configuration.GetConnectionString(CommonConst.Kafka)
}).Build();
try
{
List<string> topics = ProtocolConstExtensions.GetAllTopicNamesByIssued();
topics.AddRange(ProtocolConstExtensions.GetAllTopicNamesByReceived());
List<TopicSpecification> topicSpecifications = new List<TopicSpecification>();
foreach (var item in topics)
{
topicSpecifications.Add(new TopicSpecification
{
Name = item,
NumPartitions = 3,
ReplicationFactor = 1
});
}
adminClient.CreateTopicsAsync(topicSpecifications).ConfigureAwait(false).GetAwaiter().GetResult();
}
catch (CreateTopicsException e)
{
if (e.Results[0].Error.Code != ErrorCode.TopicAlreadyExists)
{
throw;
}
}
}
}
}

View File

@ -41,6 +41,7 @@
},
"Redis": {
"Configuration": "192.168.1.9:6380,password=1q2w3e!@#,syncTimeout=30000,abortConnect=false,connectTimeout=30000,allowAdmin=true",
"MaxPoolSize": "50",
"DefaultDB": "14",
"HangfireDB": "15"
},
@ -83,11 +84,15 @@
},
"Kafka": {
"BootstrapServers": "192.168.1.9:29092,192.168.1.9:39092,192.168.1.9:49092",
"EnableFilter": true,
"EnableAuthorization": false,
"SecurityProtocol": "SASL_PLAINTEXT",
"SaslMechanism": "PLAIN",
"SecurityProtocol": "SaslPlaintext",
"SaslMechanism": "Plain",
"SaslUserName": "lixiao",
"SaslPassword": "lixiao1980"
"SaslPassword": "lixiao1980",
"KafkaReplicationFactor": 3,
"NumPartitions": 30,
"ServerTagName": "JiSheCollectBus3"
//"Topic": {
// "ReplicationFactor": 3,
// "NumPartitions": 1000
@ -128,9 +133,7 @@
"OpenDebugMode": true,
"UseTableSessionPoolByDefault": false
},
"ServerTagName": "JiSheCollectBus",
"KafkaReplicationFactor": 3,
"NumPartitions": 30,
"ServerTagName": "JiSheCollectBus3",
"Cassandra": {
"ReplicationStrategy": {
"Class": "NetworkTopologyStrategy", //NetworkTopologyStrategySimpleStrategy

View File

@ -1,6 +1,7 @@
using JiShe.CollectBus.IoTDBProvider.Context;
using JiShe.CollectBus.IoTDBProvider.Interface;
using JiShe.CollectBus.IoTDBProvider.Provider;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using System;
using System.Collections.Generic;
@ -8,6 +9,7 @@ using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Volo.Abp.Modularity;
using static Thrift.Server.TThreadPoolAsyncServer;
namespace JiShe.CollectBus.IoTDBProvider
{
@ -15,7 +17,12 @@ namespace JiShe.CollectBus.IoTDBProvider
{
public override void ConfigureServices(ServiceConfigurationContext context)
{
context.Services.Configure<IoTDBOptions>(context.Services.GetConfiguration().GetSection(nameof(IoTDBOptions)));
var configuration = context.Services.GetConfiguration();
Configure<IoTDBOptions>(options =>
{
configuration.GetSection(nameof(IoTDBOptions)).Bind(options);
});
// 注册上下文为Scoped
context.Services.AddScoped<IoTDBRuntimeContext>();

View File

@ -183,6 +183,19 @@ namespace JiShe.CollectBus.Kafka.AdminClient
return partitions.Any(p => p.PartitionId == targetPartition);
}
/// <summary>
/// 获取主题的分区数量
/// </summary>
/// <param name="topic"></param>
/// <returns></returns>
public int GetTopicPartitionsNum(string topic)
{
var metadata = Instance.GetMetadata(topic, TimeSpan.FromSeconds(10));
if (metadata.Topics.Count == 0)
return 0;
return metadata.Topics[0].Partitions.Count;
}
public void Dispose()
{
Instance?.Dispose();

View File

@ -52,5 +52,12 @@ namespace JiShe.CollectBus.Kafka.AdminClient
/// <param name="targetPartition"></param>
/// <returns></returns>
bool CheckPartitionsExist(string topic, int targetPartition);
/// <summary>
/// 获取主题的分区数量
/// </summary>
/// <param name="topic"></param>
/// <returns></returns>
int GetTopicPartitionsNum(string topic);
}
}

View File

@ -12,7 +12,7 @@ namespace JiShe.CollectBus.Kafka.Attributes
/// <summary>
/// 订阅的主题
/// </summary>
public string[] Topics { get; set; }
public string Topic { get; set; }
/// <summary>
/// 分区
@ -24,30 +24,53 @@ namespace JiShe.CollectBus.Kafka.Attributes
/// </summary>
public string GroupId { get; set; }
public KafkaSubscribeAttribute(string[] topics, string groupId = "default")
/// <summary>
/// 任务数(默认是多少个分区多少个任务)
/// 如设置订阅指定Partition则任务数始终为1
/// </summary>
public int TaskCount { get; set; } = -1;
/// <summary>
/// 批量处理数量
/// </summary>
public int BatchSize { get; set; } = 100;
/// <summary>
/// 是否启用批量处理
/// </summary>
public bool EnableBatch { get; set; } = false;
/// <summary>
/// 批次超时时间
/// </summary>
public TimeSpan? BatchTimeout { get; set; }=null;
/// <summary>
/// 订阅主题
/// </summary>
/// <param name="batchTimeout">batchTimeout格式:("00:05:00")</param>
public KafkaSubscribeAttribute(string topic, string groupId = "default", bool enableBatch = false, int batchSize = 100, string? batchTimeout = null)
{
this.Topics = topics;
this.Topic = topic;
this.GroupId = groupId;
this.EnableBatch = enableBatch;
this.BatchSize = batchSize;
this.BatchTimeout = batchTimeout != null? TimeSpan.Parse(batchTimeout): null;
}
public KafkaSubscribeAttribute(string topic, string groupId = "default")
/// <summary>
/// 订阅主题
/// </summary>
/// <param name="batchTimeout">batchTimeout格式:("00:05:00")</param>
public KafkaSubscribeAttribute(string topic, int partition, string groupId = "default", bool enableBatch = false, int batchSize = 100, string? batchTimeout = null)
{
this.Topics = new string[] { topic };
this.GroupId = groupId;
}
public KafkaSubscribeAttribute(string[] topics, int partition, string groupId = "default")
{
this.Topics = topics;
this.Partition = partition;
this.GroupId = groupId;
}
public KafkaSubscribeAttribute(string topic, int partition, string groupId = "default")
{
this.Topics = new string[] { topic };
this.Topic = topic;
this.Partition = partition;
this.GroupId = groupId;
this.TaskCount = 1;
this.EnableBatch = enableBatch;
this.BatchSize = batchSize;
this.BatchTimeout = batchTimeout != null ? TimeSpan.Parse(batchTimeout) : null;
}
}
}

View File

@ -1,4 +1,5 @@
using Confluent.Kafka;
using JiShe.CollectBus.Common.Consts;
using JiShe.CollectBus.Kafka.Consumer;
using JiShe.CollectBus.Kafka.Producer;
using Microsoft.AspNetCore.Builder;
@ -16,17 +17,39 @@ namespace JiShe.CollectBus.Kafka
{
public override void ConfigureServices(ServiceConfigurationContext context)
{
var configuration = context.Services.GetConfiguration();
//var kafkaSection = configuration.GetSection(CommonConst.Kafka);
//KafkaOptionConfig kafkaOptionConfig = new KafkaOptionConfig ();
//kafkaSection.Bind(kafkaOptionConfig);
//if (configuration[CommonConst.ServerTagName] != null)
//{
// kafkaOptionConfig.ServerTagName = configuration[CommonConst.ServerTagName]!;
//}
//context.Services.AddSingleton(kafkaOptionConfig);
//context.Services.Configure<KafkaOptionConfig>(context.Services.GetConfiguration().GetSection(CommonConst.Kafka));
Configure<KafkaOptionConfig>(options =>
{
configuration.GetSection(CommonConst.Kafka).Bind(options);
});
// 注册Producer
context.Services.AddTransient<IProducerService, ProducerService>();
context.Services.AddSingleton<IProducerService, ProducerService>();
// 注册Consumer
context.Services.AddTransient<IConsumerService, ConsumerService>();
context.Services.AddSingleton<IConsumerService, ConsumerService>();
}
public override void OnApplicationInitialization(ApplicationInitializationContext context)
{
var app = context.GetApplicationBuilder();
app.UseKafkaSubscribers(Assembly.Load("JiShe.CollectBus.Application"));
// 注册Subscriber
app.ApplicationServices.UseKafkaSubscribers();
// 获取程序集
//app.UseKafkaSubscribers(Assembly.Load("JiShe.CollectBus.Application"));
}
}
}

View File

@ -1,14 +1,9 @@
using Confluent.Kafka;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using JiShe.CollectBus.Kafka.Attributes;
using Volo.Abp.DependencyInjection;
using JiShe.CollectBus.Kafka.AdminClient;
using static Confluent.Kafka.ConfigPropertyNames;
using Microsoft.Extensions.Options;
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using NUglify.Html;
using System.Text;
namespace JiShe.CollectBus.Kafka.Consumer
{
@ -18,11 +13,14 @@ namespace JiShe.CollectBus.Kafka.Consumer
private readonly IConfiguration _configuration;
private readonly ConcurrentDictionary<Type, (object Consumer, CancellationTokenSource CTS)>
_consumerStore = new();
private readonly KafkaOptionConfig _kafkaOptionConfig;
private class KafkaConsumer<TKey, TValue> where TKey : notnull where TValue : class { }
public ConsumerService(IConfiguration configuration, ILogger<ConsumerService> logger)
public ConsumerService(IConfiguration configuration, ILogger<ConsumerService> logger, IOptions<KafkaOptionConfig> kafkaOptionConfig)
{
_configuration = configuration;
_logger = logger;
_kafkaOptionConfig = kafkaOptionConfig.Value;
}
#region private
@ -37,28 +35,31 @@ namespace JiShe.CollectBus.Kafka.Consumer
{
var config = BuildConsumerConfig(groupId);
return new ConsumerBuilder<TKey, TValue>(config)
.SetValueDeserializer(new JsonSerializer<TValue>())
.SetLogHandler((_, log) => _logger.LogInformation($"消费者Log: {log.Message}"))
.SetErrorHandler((_, e) => _logger.LogError($"消费者错误: {e.Reason}"))
.Build();
}
private ConsumerConfig BuildConsumerConfig(string? groupId = null)
{
var enableAuth = bool.Parse(_configuration["Kafka:EnableAuthorization"]!);
var config = new ConsumerConfig
{
BootstrapServers = _configuration["Kafka:BootstrapServers"],
BootstrapServers = _kafkaOptionConfig.BootstrapServers,
GroupId = groupId ?? "default",
AutoOffsetReset = AutoOffsetReset.Earliest,
EnableAutoCommit = false // 禁止AutoCommit
EnableAutoCommit = false, // 禁止AutoCommit
EnablePartitionEof = true, // 启用分区末尾标记
//AllowAutoCreateTopics= true, // 启用自动创建
FetchMaxBytes = 1024 * 1024 * 50 // 增加拉取大小50MB
};
if (enableAuth)
if (_kafkaOptionConfig.EnableAuthorization)
{
config.SecurityProtocol = SecurityProtocol.SaslPlaintext;
config.SaslMechanism = SaslMechanism.Plain;
config.SaslUsername = _configuration["Kafka:SaslUserName"];
config.SaslPassword = _configuration["Kafka:SaslPassword"];
config.SecurityProtocol = _kafkaOptionConfig.SecurityProtocol;
config.SaslMechanism = _kafkaOptionConfig.SaslMechanism;
config.SaslUsername = _kafkaOptionConfig.SaslUserName;
config.SaslPassword = _kafkaOptionConfig.SaslPassword;
}
return config;
@ -101,7 +102,7 @@ namespace JiShe.CollectBus.Kafka.Consumer
/// <returns></returns>
public async Task SubscribeAsync<TKey, TValue>(string[] topics, Func<TKey, TValue, Task<bool>> messageHandler, string? groupId = null) where TKey : notnull where TValue : class
{
var consumerKey = typeof((TKey, TValue));
var consumerKey = typeof(KafkaConsumer<TKey, TValue>);
var cts = new CancellationTokenSource();
var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
@ -119,6 +120,29 @@ namespace JiShe.CollectBus.Kafka.Consumer
try
{
var result = consumer.Consume(cts.Token);
if (result == null || result.Message==null || result.Message.Value == null)
{
_logger.LogWarning($"Kafka消费: {result?.Topic} 分区 {result?.Partition} 值为NULL");
//consumer.Commit(result); // 手动提交
continue;
}
if (result.IsPartitionEOF)
{
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
await Task.Delay(TimeSpan.FromSeconds(1),cts.Token);
continue;
}
if (_kafkaOptionConfig.EnableFilter)
{
var headersFilter = new HeadersFilter { { "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) } };
// 检查 Header 是否符合条件
if (!headersFilter.Match(result.Message.Headers))
{
//consumer.Commit(result); // 提交偏移量
// 跳过消息
continue;
}
}
bool sucess= await messageHandler(result.Message.Key, result.Message.Value);
if (sucess)
{
@ -146,14 +170,14 @@ namespace JiShe.CollectBus.Kafka.Consumer
/// <returns></returns>
public async Task SubscribeAsync<TValue>(string[] topics, Func<TValue, Task<bool>> messageHandler, string? groupId) where TValue : class
{
var consumerKey = typeof((Null, TValue));
var consumerKey = typeof(KafkaConsumer<Ignore, TValue>);
var cts = new CancellationTokenSource();
var consumer = _consumerStore.GetOrAdd(consumerKey, _=>
(
CreateConsumer<Null, TValue>(groupId),
CreateConsumer<Ignore, TValue>(groupId),
cts
)).Consumer as IConsumer<Null, TValue>;
)).Consumer as IConsumer<Ignore, TValue>;
consumer!.Subscribe(topics);
@ -164,6 +188,29 @@ namespace JiShe.CollectBus.Kafka.Consumer
try
{
var result = consumer.Consume(cts.Token);
if (result == null || result.Message==null || result.Message.Value == null)
{
_logger.LogWarning($"Kafka消费: {result?.Topic} 分区 {result?.Partition} 值为NULL");
//consumer.Commit(result); // 手动提交
continue;
}
if (result.IsPartitionEOF)
{
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
await Task.Delay(100, cts.Token);
continue;
}
if (_kafkaOptionConfig.EnableFilter)
{
var headersFilter = new HeadersFilter { { "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) } };
// 检查 Header 是否符合条件
if (!headersFilter.Match(result.Message.Headers))
{
//consumer.Commit(result); // 提交偏移量
// 跳过消息
continue;
}
}
bool sucess = await messageHandler(result.Message.Value);
if (sucess)
consumer.Commit(result); // 手动提交
@ -177,6 +224,272 @@ namespace JiShe.CollectBus.Kafka.Consumer
await Task.CompletedTask;
}
/// <summary>
/// 批量订阅消息
/// </summary>
/// <typeparam name="TKey">消息Key类型</typeparam>
/// <typeparam name="TValue">消息Value类型</typeparam>
/// <param name="topic">主题</param>
/// <param name="messageBatchHandler">批量消息处理函数</param>
/// <param name="groupId">消费组ID</param>
/// <param name="batchSize">批次大小</param>
/// <param name="batchTimeout">批次超时时间</param>
public async Task SubscribeBatchAsync<TKey, TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class
{
await SubscribeBatchAsync<TKey, TValue>(new[] { topic }, messageBatchHandler, groupId, batchSize, batchTimeout);
}
/// <summary>
/// 批量订阅消息
/// </summary>
/// <typeparam name="TKey">消息Key类型</typeparam>
/// <typeparam name="TValue">消息Value类型</typeparam>
/// <param name="topics">主题列表</param>
/// <param name="messageBatchHandler">批量消息处理函数</param>
/// <param name="groupId">消费组ID</param>
/// <param name="batchSize">批次大小</param>
/// <param name="batchTimeout">批次超时时间</param>
public async Task SubscribeBatchAsync<TKey, TValue>(string[] topics,Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null,int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class
{
var consumerKey = typeof(KafkaConsumer<TKey, TValue>);
var cts = new CancellationTokenSource();
var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
(
CreateConsumer<TKey, TValue>(groupId),
cts
)).Consumer as IConsumer<TKey, TValue>;
consumer!.Subscribe(topics);
var timeout = batchTimeout ?? TimeSpan.FromSeconds(5); // 默认超时时间调整为5秒
_ = Task.Run(async () =>
{
var messages = new List<(TValue Value, TopicPartitionOffset Offset)>();
var startTime = DateTime.UtcNow;
while (!cts.IsCancellationRequested)
{
try
{
// 非阻塞快速累积消息
while (messages.Count < batchSize && (DateTime.UtcNow - startTime) < timeout)
{
var result = consumer.Consume(TimeSpan.Zero); // 非阻塞调用
if (result != null)
{
if (result.IsPartitionEOF)
{
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
await Task.Delay(TimeSpan.FromSeconds(1), cts.Token);
}
else if (result.Message.Value != null)
{
if (_kafkaOptionConfig.EnableFilter)
{
var headersFilter = new HeadersFilter { { "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) } };
// 检查 Header 是否符合条件
if (!headersFilter.Match(result.Message.Headers))
{
//consumer.Commit(result); // 提交偏移量
// 跳过消息
continue;
}
}
messages.Add((result.Message.Value, result.TopicPartitionOffset));
//messages.Add(result.Message.Value);
}
}
else
{
// 无消息时短暂等待
await Task.Delay(10, cts.Token);
}
}
// 处理批次
if (messages.Count > 0)
{
bool success = await messageBatchHandler(messages.Select(m => m.Value));
if (success)
{
var offsetsByPartition = new Dictionary<TopicPartition, long>();
foreach (var msg in messages)
{
var tp = msg.Offset.TopicPartition;
var offset = msg.Offset.Offset;
if (!offsetsByPartition.TryGetValue(tp, out var currentMax) || offset > currentMax)
{
offsetsByPartition[tp] = offset;
}
}
var offsetsToCommit = offsetsByPartition
.Select(kv => new TopicPartitionOffset(kv.Key, new Offset(kv.Value + 1)))
.ToList();
consumer.Commit(offsetsToCommit);
}
messages.Clear();
}
startTime = DateTime.UtcNow;
}
catch (ConsumeException ex)
{
_logger.LogError(ex, $"消息消费失败: {ex.Error.Reason}");
}
catch (OperationCanceledException)
{
// 任务取消,正常退出
}
catch (Exception ex)
{
_logger.LogError(ex, "处理批量消息时发生未知错误");
}
}
}, cts.Token);
await Task.CompletedTask;
}
/// <summary>
/// 批量订阅消息
/// </summary>
/// <typeparam name="TValue">消息Value类型</typeparam>
/// <param name="topic">主题列表</param>
/// <param name="messageBatchHandler">批量消息处理函数</param>
/// <param name="groupId">消费组ID</param>
/// <param name="batchSize">批次大小</param>
/// <param name="batchTimeout">批次超时时间</param>
/// <param name="consumeTimeout">消费等待时间</param>
public async Task SubscribeBatchAsync<TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class
{
await SubscribeBatchAsync<TValue>(new[] { topic }, messageBatchHandler, groupId, batchSize, batchTimeout, consumeTimeout);
}
/// <summary>
/// 批量订阅消息
/// </summary>
/// <typeparam name="TValue">消息Value类型</typeparam>
/// <param name="topics">主题列表</param>
/// <param name="messageBatchHandler">批量消息处理函数</param>
/// <param name="groupId">消费组ID</param>
/// <param name="batchSize">批次大小</param>
/// <param name="batchTimeout">批次超时时间</param>
/// <param name="consumeTimeout">消费等待时间</param>
public async Task SubscribeBatchAsync<TValue>(string[] topics,Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100,TimeSpan? batchTimeout = null,TimeSpan? consumeTimeout = null)where TValue : class
{
var consumerKey = typeof(KafkaConsumer<Ignore, TValue>);
var cts = new CancellationTokenSource();
var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
(
CreateConsumer<Ignore, TValue>(groupId),
cts
)).Consumer as IConsumer<Ignore, TValue>;
consumer!.Subscribe(topics);
var timeout = batchTimeout ?? TimeSpan.FromSeconds(5); // 默认超时时间调整为5秒
_ = Task.Run(async () =>
{
var messages = new List<(TValue Value, TopicPartitionOffset Offset)>();
//var messages = new List<ConsumeResult<TKey, TValue>>();
var startTime = DateTime.UtcNow;
while (!cts.IsCancellationRequested)
{
try
{
// 非阻塞快速累积消息
while (messages.Count < batchSize && (DateTime.UtcNow - startTime) < timeout)
{
var result = consumer.Consume(TimeSpan.Zero); // 非阻塞调用
if (result != null)
{
if (result.IsPartitionEOF)
{
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
await Task.Delay(TimeSpan.FromSeconds(1), cts.Token);
}
else if (result.Message.Value != null)
{
if (_kafkaOptionConfig.EnableFilter)
{
var headersFilter = new HeadersFilter { { "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) } };
// 检查 Header 是否符合条件
if (!headersFilter.Match(result.Message.Headers))
{
//consumer.Commit(result); // 提交偏移量
// 跳过消息
continue;
}
}
messages.Add((result.Message.Value, result.TopicPartitionOffset));
//messages.Add(result.Message.Value);
}
}
else
{
// 无消息时短暂等待
await Task.Delay(10, cts.Token);
}
}
// 处理批次
if (messages.Count > 0)
{
bool success = await messageBatchHandler(messages.Select(m => m.Value));
if (success)
{
var offsetsByPartition = new Dictionary<TopicPartition, long>();
foreach (var msg in messages)
{
var tp = msg.Offset.TopicPartition;
var offset = msg.Offset.Offset;
if (!offsetsByPartition.TryGetValue(tp, out var currentMax) || offset > currentMax)
{
offsetsByPartition[tp] = offset;
}
}
var offsetsToCommit = offsetsByPartition
.Select(kv => new TopicPartitionOffset(kv.Key, new Offset(kv.Value + 1)))
.ToList();
consumer.Commit(offsetsToCommit);
}
messages.Clear();
}
startTime = DateTime.UtcNow;
}
catch (ConsumeException ex)
{
_logger.LogError(ex, $"消息消费失败: {ex.Error.Reason}");
}
catch (OperationCanceledException)
{
// 任务取消,正常退出
}
catch (Exception ex)
{
_logger.LogError(ex, "处理批量消息时发生未知错误");
}
}
}, cts.Token);
await Task.CompletedTask;
}
/// <summary>
/// 取消消息订阅
/// </summary>

View File

@ -1,4 +1,5 @@
using System;
using Confluent.Kafka;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
@ -32,6 +33,14 @@ namespace JiShe.CollectBus.Kafka.Consumer
/// <returns></returns>
Task SubscribeAsync<TValue>(string[] topics, Func<TValue, Task<bool>> messageHandler, string? groupId = null) where TValue : class;
Task SubscribeBatchAsync<TKey, TValue>(string[] topics, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class;
Task SubscribeBatchAsync<TKey, TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class;
Task SubscribeBatchAsync<TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class;
Task SubscribeBatchAsync<TValue>(string[] topics, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class;
void Unsubscribe<TKey, TValue>() where TKey : notnull where TValue : class;
}
}

View File

@ -0,0 +1,30 @@
using Confluent.Kafka;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace JiShe.CollectBus.Kafka
{
/// <summary>
/// 消息头过滤器
/// </summary>
public class HeadersFilter : Dictionary<string, byte[]>
{
/// <summary>
/// 判断Headers是否匹配
/// </summary>
/// <param name="headers"></param>
/// <returns></returns>
public bool Match(Headers headers)
{
foreach (var kvp in this)
{
if (!headers.TryGetLastBytes(kvp.Key, out var value) || !value.SequenceEqual(kvp.Value))
return false;
}
return true;
}
}
}

View File

@ -6,6 +6,12 @@ using System.Threading.Tasks;
namespace JiShe.CollectBus.Kafka
{
/// <summary>
/// Kafka订阅者
/// <para>
/// 订阅者需要继承此接口并需要依赖注入,并使用<see cref="KafkaSubscribeAttribute"/>标记
/// </para>
/// </summary>
public interface IKafkaSubscribe
{
}

View File

@ -0,0 +1,21 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace JiShe.CollectBus.Kafka
{
public interface ISubscribeAck
{
/// <summary>
/// 是否成功标记
/// </summary>
bool Ack { get; set; }
/// <summary>
/// 消息
/// </summary>
string? Msg { get; set; }
}
}

View File

@ -0,0 +1,88 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Text.Json;
using Confluent.Kafka;
using System.Text.Json.Serialization;
using System.Text.Encodings.Web;
namespace JiShe.CollectBus.Kafka
{
/// <summary>
/// JSON 序列化器(支持泛型)
/// </summary>
public class JsonSerializer<T> : ISerializer<T>, IDeserializer<T>
{
private static readonly JsonSerializerOptions _options = new JsonSerializerOptions
{
DefaultIgnoreCondition = JsonIgnoreCondition.Never,
WriteIndented = false,// 设置格式化输出
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,// 允许特殊字符
IgnoreReadOnlyFields = true,
IgnoreReadOnlyProperties = true,
NumberHandling = JsonNumberHandling.AllowReadingFromString, // 允许数字字符串
AllowTrailingCommas = true, // 忽略尾随逗号
ReadCommentHandling = JsonCommentHandling.Skip, // 忽略注释
PropertyNameCaseInsensitive = true, // 属性名称大小写不敏感
PropertyNamingPolicy = JsonNamingPolicy.CamelCase, // 属性名称使用驼峰命名规则
Converters = { new DateTimeJsonConverter() } // 注册你的自定义转换器,
};
public byte[] Serialize(T data, SerializationContext context)
{
if (data == null)
return null;
try
{
return JsonSerializer.SerializeToUtf8Bytes(data, _options);
}
catch (Exception ex)
{
throw new InvalidOperationException("Kafka序列化失败", ex);
}
}
public T Deserialize(ReadOnlySpan<byte> data, bool isNull, SerializationContext context)
{
if (isNull)
return default;
try
{
return JsonSerializer.Deserialize<T>(data, _options);
}
catch (Exception ex)
{
throw new InvalidOperationException("Kafka反序列化失败", ex);
}
}
}
public class DateTimeJsonConverter : JsonConverter<DateTime>
{
private readonly string _dateFormatString;
public DateTimeJsonConverter()
{
_dateFormatString = "yyyy-MM-dd HH:mm:ss";
}
public DateTimeJsonConverter(string dateFormatString)
{
_dateFormatString = dateFormatString;
}
public override DateTime Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
return DateTime.Parse(reader.GetString());
}
public override void Write(Utf8JsonWriter writer, DateTime value, JsonSerializerOptions options)
{
writer.WriteStringValue(value.ToString(_dateFormatString));
}
}
}

View File

@ -0,0 +1,63 @@
using Confluent.Kafka;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace JiShe.CollectBus.Kafka
{
public class KafkaOptionConfig
{
/// <summary>
/// kafka地址
/// </summary>
public string BootstrapServers { get; set; } = null!;
/// <summary>
/// 服务器标识
/// </summary>
public string ServerTagName { get; set; }= "KafkaFilterKey";
/// <summary>
/// kafka主题副本数量
/// </summary>
public short KafkaReplicationFactor { get; set; }
/// <summary>
/// kafka主题分区数量
/// </summary>
public int NumPartitions { get; set; }
/// <summary>
/// 是否开启过滤器
/// </summary>
public bool EnableFilter { get; set; }= true;
/// <summary>
/// 是否开启认证
/// </summary>
public bool EnableAuthorization { get; set; } = false;
/// <summary>
/// 安全协议
/// </summary>
public SecurityProtocol SecurityProtocol { get; set; } = SecurityProtocol.SaslPlaintext;
/// <summary>
/// 认证方式
/// </summary>
public SaslMechanism SaslMechanism { get; set; }= SaslMechanism.Plain;
/// <summary>
/// 用户名
/// </summary>
public string? SaslUserName { get; set; }
/// <summary>
/// 密码
/// </summary>
public string? SaslPassword { get; set; }
}
}

View File

@ -1,17 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace JiShe.CollectBus.Kafka
{
public class KafkaOptions
{
public string BootstrapServers { get; set; }
public string GroupId { get; set; }
public Dictionary<string, string> ProducerConfig { get; set; } = new();
public Dictionary<string, string> ConsumerConfig { get; set; } = new();
public Dictionary<string, string> AdminConfig { get; set; } = new();
}
}

View File

@ -1,19 +1,16 @@
using Confluent.Kafka;
using JiShe.CollectBus.Common.Consts;
using JiShe.CollectBus.Common.Extensions;
using JiShe.CollectBus.Kafka.AdminClient;
using JiShe.CollectBus.Kafka.Attributes;
using JiShe.CollectBus.Kafka.Consumer;
using Microsoft.AspNetCore.Builder;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Primitives;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Extensions.Options;
using System.Reflection;
using System.Text;
using System.Threading.Tasks;
using static Confluent.Kafka.ConfigPropertyNames;
namespace JiShe.CollectBus.Kafka
{
@ -24,30 +21,105 @@ namespace JiShe.CollectBus.Kafka
/// </summary>
/// <param name="app"></param>
/// <param name="assembly"></param>
public static void UseKafkaSubscribers(this IServiceProvider provider)
{
var lifetime = provider.GetRequiredService<IHostApplicationLifetime>();
//初始化主题信息
var kafkaAdminClient = provider.GetRequiredService<IAdminClientService>();
var kafkaOptions = provider.GetRequiredService<IOptions<KafkaOptionConfig>>();
List<string> topics = ProtocolConstExtensions.GetAllTopicNamesByIssued();
topics.AddRange(ProtocolConstExtensions.GetAllTopicNamesByReceived());
foreach (var item in topics)
{
kafkaAdminClient.CreateTopicAsync(item, kafkaOptions.Value.NumPartitions, kafkaOptions.Value.KafkaReplicationFactor).ConfigureAwait(false).GetAwaiter().GetResult();
}
lifetime.ApplicationStarted.Register(() =>
{
var logger = provider.GetRequiredService<ILogger<CollectBusKafkaModule>>();
int threadCount = 0;
int topicCount = 0;
var assemblyPath = Path.GetDirectoryName(Assembly.GetEntryAssembly()?.Location);
if (string.IsNullOrWhiteSpace(assemblyPath))
{
logger.LogInformation($"kafka订阅未能找到程序路径");
return;
}
var dllFiles = Directory.GetFiles(assemblyPath, "*.dll");
foreach (var file in dllFiles)
{
// 跳过已加载的程序集
var assemblyName = AssemblyName.GetAssemblyName(file);
var existingAssembly = AppDomain.CurrentDomain.GetAssemblies()
.FirstOrDefault(a => a.GetName().FullName == assemblyName.FullName);
var assembly = existingAssembly ?? Assembly.LoadFrom(file);
// 实现IKafkaSubscribe接口
var subscribeTypes = assembly.GetTypes().Where(type =>
typeof(IKafkaSubscribe).IsAssignableFrom(type) &&
!type.IsAbstract && !type.IsInterface).ToList(); ;
if (subscribeTypes.Count == 0)
continue;
foreach (var subscribeType in subscribeTypes)
{
var subscribes = provider.GetServices(subscribeType).ToList();
subscribes.ForEach(subscribe =>
{
if (subscribe!=null)
{
Tuple<int, int> tuple = BuildKafkaSubscriber(subscribe, provider, logger, kafkaOptions.Value);
threadCount += tuple.Item1;
topicCount += tuple.Item2;
}
});
}
}
logger.LogInformation($"kafka订阅主题:{topicCount}数,共启动:{threadCount}线程");
});
}
public static void UseKafkaSubscribers(this IApplicationBuilder app, Assembly assembly)
{
var provider = app.ApplicationServices;
var lifetime = provider.GetRequiredService<IHostApplicationLifetime>();
//初始化主题信息
var kafkaAdminClient = provider.GetRequiredService<IAdminClientService>();
var kafkaOptions = provider.GetRequiredService<IOptions<KafkaOptionConfig>>();
List<string> topics = ProtocolConstExtensions.GetAllTopicNamesByIssued();
topics.AddRange(ProtocolConstExtensions.GetAllTopicNamesByReceived());
foreach (var item in topics)
{
kafkaAdminClient.CreateTopicAsync(item, kafkaOptions.Value.NumPartitions, kafkaOptions.Value.KafkaReplicationFactor).ConfigureAwait(false).GetAwaiter().GetResult();
}
lifetime.ApplicationStarted.Register(() =>
{
var logger = provider.GetRequiredService<ILogger<CollectBusKafkaModule>>();
int threadCount = 0;
int topicCount = 0;
var subscribeTypes = assembly.GetTypes()
.Where(t => typeof(IKafkaSubscribe).IsAssignableFrom(t))
.ToList();
if (subscribeTypes.Count == 0) return;
var provider = app.ApplicationServices;
var lifetime = provider.GetRequiredService<IHostApplicationLifetime>();
lifetime.ApplicationStarted.Register(() =>
{
foreach (var subscribeType in subscribeTypes)
{
var subscribes = provider.GetServices(subscribeType).ToList();
subscribes.ForEach(subscribe => {
if(subscribe is IKafkaSubscribe)
if (subscribe != null)
{
BuildKafkaSubscriber(subscribe, provider);
Tuple<int, int> tuple = BuildKafkaSubscriber(subscribe, provider, logger, kafkaOptions.Value);
threadCount += tuple.Item1;
topicCount += tuple.Item2;
}
});
}
logger.LogInformation($"kafka订阅主题:{topicCount}数,共启动:{threadCount}线程");
});
}
@ -56,17 +128,29 @@ namespace JiShe.CollectBus.Kafka
/// </summary>
/// <param name="subscribe"></param>
/// <param name="provider"></param>
private static void BuildKafkaSubscriber(object subscribe, IServiceProvider provider)
private static Tuple<int,int> BuildKafkaSubscriber(object subscribe, IServiceProvider provider,ILogger<CollectBusKafkaModule> logger, KafkaOptionConfig kafkaOptionConfig)
{
var subscribedMethods = subscribe.GetType().GetMethods()
.Select(m => new { Method = m, Attribute = m.GetCustomAttribute<KafkaSubscribeAttribute>() })
.Where(x => x.Attribute != null)
.ToArray();
//var configuration = provider.GetRequiredService<IConfiguration>();
int threadCount = 0;
foreach (var sub in subscribedMethods)
{
Task.Run(() => StartConsumerAsync(provider, sub.Attribute!, sub.Method, subscribe));
int partitionCount = kafkaOptionConfig.NumPartitions;
//var adminClientService = provider.GetRequiredService<IAdminClientService>();
//int partitionCount = sub.Attribute!.TaskCount==-1?adminClientService.GetTopicPartitionsNum(sub.Attribute!.Topic) : sub.Attribute!.TaskCount;
if (partitionCount <= 0)
partitionCount = 1;
for (int i = 0; i < partitionCount; i++)
{
Task.Run(() => StartConsumerAsync(provider, sub.Attribute!, sub.Method, subscribe, logger));
threadCount++;
}
}
return Tuple.Create(threadCount, subscribedMethods.Length);
}
/// <summary>
/// 启动后台消费线程
@ -76,11 +160,30 @@ namespace JiShe.CollectBus.Kafka
/// <param name="method"></param>
/// <param name="consumerInstance"></param>
/// <returns></returns>
private static async Task StartConsumerAsync(IServiceProvider provider, KafkaSubscribeAttribute attr,MethodInfo method, object subscribe)
private static async Task StartConsumerAsync(IServiceProvider provider, KafkaSubscribeAttribute attr,MethodInfo method, object subscribe, ILogger<CollectBusKafkaModule> logger)
{
var consumerService = provider.GetRequiredService<IConsumerService>();
var logger = provider.GetRequiredService<ILogger<CollectBusKafkaModule>>();
await consumerService.SubscribeAsync<string>(attr.Topics, async (message) =>
if (attr.EnableBatch)
{
await consumerService.SubscribeBatchAsync<dynamic>(attr.Topic, async (message) =>
{
try
{
// 处理消息
return await ProcessMessageAsync(message, method, subscribe);
}
catch (ConsumeException ex)
{
// 处理消费错误
logger.LogError($"kafka批量消费异常:{ex.Message}");
}
return await Task.FromResult(false);
}, attr.GroupId, attr.BatchSize,attr.BatchTimeout);
}
else
{
await consumerService.SubscribeAsync<dynamic>(attr.Topic, async (message) =>
{
try
{
@ -93,9 +196,12 @@ namespace JiShe.CollectBus.Kafka
logger.LogError($"kafka消费异常:{ex.Message}");
}
return await Task.FromResult(false);
});
}, attr.GroupId);
}
}
/// <summary>
/// 处理消息
/// </summary>
@ -103,28 +209,34 @@ namespace JiShe.CollectBus.Kafka
/// <param name="method"></param>
/// <param name="subscribe"></param>
/// <returns></returns>
private static async Task<bool> ProcessMessageAsync(string message, MethodInfo method, object subscribe)
private static async Task<bool> ProcessMessageAsync(dynamic message, MethodInfo method, object subscribe)
{
var parameters = method.GetParameters();
if (parameters.Length != 1)
return true;
var paramType = parameters[0].ParameterType;
var messageObj = paramType == typeof(string)? message: JsonConvert.DeserializeObject(message, paramType);
if (method.ReturnType == typeof(Task))
bool isGenericTask = method.ReturnType.IsGenericType
&& method.ReturnType.GetGenericTypeDefinition() == typeof(Task<>);
bool existParameters = parameters.Length > 0;
//dynamic? messageObj= null;
//if (existParameters)
//{
// var paramType = parameters[0].ParameterType;
// messageObj = paramType == typeof(string) ? message : message.Deserialize(paramType);
//}
if (isGenericTask)
{
object? result = await (Task<bool>)method.Invoke(subscribe, new[] { messageObj })!;
if (result is bool success)
return success;
object? result = await (Task<ISubscribeAck>)method.Invoke(subscribe, existParameters? new[] { message } :null)!;
if (result is ISubscribeAck ackResult)
{
return ackResult.Ack;
}
}
else
{
object? result = method.Invoke(subscribe, new[] { messageObj });
if (result is bool success)
return success;
object? result = method.Invoke(subscribe, existParameters ? new[] { message } : null);
if (result is ISubscribeAck ackResult)
{
return ackResult.Ack;
}
}
return false;
}

View File

@ -15,6 +15,6 @@ namespace JiShe.CollectBus.Kafka.Producer
Task ProduceAsync<TKey, TValue>(string topic, TKey key, TValue value, int? partition, Action<DeliveryReport<TKey, TValue>>? deliveryHandler = null) where TKey : notnull where TValue : class;
Task ProduceAsync<TValue>(string topic, TValue value, int? partition = null, Action<DeliveryReport<Null, TValue>>? deliveryHandler = null) where TValue : class;
Task ProduceAsync<TValue>(string topic, TValue value, int? partition = null, Action<DeliveryReport<string, TValue>>? deliveryHandler = null) where TValue : class;
}
}

View File

@ -8,7 +8,9 @@ using Confluent.Kafka;
using JiShe.CollectBus.Kafka.Consumer;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Volo.Abp.DependencyInjection;
using YamlDotNet.Serialization;
namespace JiShe.CollectBus.Kafka.Producer
{
@ -16,12 +18,14 @@ namespace JiShe.CollectBus.Kafka.Producer
{
private readonly ILogger<ProducerService> _logger;
private readonly IConfiguration _configuration;
private readonly ConcurrentDictionary<Tuple<Type, Type>, object> _producerCache = new();
public ProducerService(IConfiguration configuration,ILogger<ProducerService> logger)
private readonly ConcurrentDictionary<Type, object> _producerCache = new();
private class KafkaProducer<TKey, TValue> where TKey : notnull where TValue : class { }
private readonly KafkaOptionConfig _kafkaOptionConfig;
public ProducerService(IConfiguration configuration,ILogger<ProducerService> logger, IOptions<KafkaOptionConfig> kafkaOptionConfig)
{
_configuration = configuration;
_logger = logger;
_kafkaOptionConfig = kafkaOptionConfig.Value;
}
#region private
@ -31,14 +35,13 @@ namespace JiShe.CollectBus.Kafka.Producer
/// <typeparam name="TKey"></typeparam>
/// <typeparam name="TValue"></typeparam>
/// <returns></returns>
private IProducer<TKey, TValue> GetProducer<TKey, TValue>()
private IProducer<TKey, TValue> GetProducer<TKey, TValue>(Type typeKey)
{
var typeKey = Tuple.Create(typeof(TKey), typeof(TValue))!;
return (IProducer<TKey, TValue>)_producerCache.GetOrAdd(typeKey, _ =>
{
var config = BuildProducerConfig();
return new ProducerBuilder<TKey, TValue>(config)
.SetValueSerializer(new JsonSerializer<TValue>()) // Value 使用自定义 JSON 序列化
.SetLogHandler((_, msg) => _logger.Log(ConvertLogLevel(msg.Level), msg.Message))
.Build();
});
@ -50,11 +53,9 @@ namespace JiShe.CollectBus.Kafka.Producer
/// <returns></returns>
private ProducerConfig BuildProducerConfig()
{
var enableAuth = bool.Parse(_configuration["Kafka:EnableAuthorization"]!);
var config = new ProducerConfig
{
BootstrapServers = _configuration["Kafka:BootstrapServers"],
BootstrapServers = _kafkaOptionConfig.BootstrapServers,
AllowAutoCreateTopics = true,
QueueBufferingMaxKbytes = 2_097_151, // 修改缓冲区最大为2GB默认为1GB
CompressionType = CompressionType.Lz4, // 配置使用压缩算法LZ4其他gzip/snappy/zstd
@ -62,14 +63,15 @@ namespace JiShe.CollectBus.Kafka.Producer
LingerMs = 20, // 修改等待时间为20ms
Acks = Acks.All, // 表明只有所有副本Broker都收到消息才算提交成功, 可以 Acks.Leader
MessageSendMaxRetries = 50, // 消息发送失败最大重试50次
MessageTimeoutMs = 120000, // 消息发送超时时间为2分钟,设置值MessageTimeoutMs > LingerMs
};
if (enableAuth)
if (_kafkaOptionConfig.EnableAuthorization)
{
config.SecurityProtocol = SecurityProtocol.SaslPlaintext;
config.SaslMechanism = SaslMechanism.Plain;
config.SaslUsername = _configuration["Kafka:SaslUserName"];
config.SaslPassword = _configuration["Kafka:SaslPassword"];
config.SecurityProtocol = _kafkaOptionConfig.SecurityProtocol;
config.SaslMechanism = _kafkaOptionConfig.SaslMechanism;
config.SaslUsername = _kafkaOptionConfig.SaslUserName;
config.SaslPassword = _kafkaOptionConfig.SaslPassword;
}
return config;
@ -101,8 +103,17 @@ namespace JiShe.CollectBus.Kafka.Producer
/// <returns></returns>
public async Task ProduceAsync<TKey, TValue>(string topic, TKey key, TValue value)where TKey : notnull where TValue : class
{
var producer = GetProducer<TKey, TValue>();
await producer.ProduceAsync(topic, new Message<TKey, TValue> { Key = key, Value = value });
var typeKey = typeof(KafkaProducer<TKey, TValue>);
var producer = GetProducer<TKey, TValue>(typeKey);
var message = new Message<TKey, TValue>
{
Key = key,
Value = value,
Headers = new Headers{
{ "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) }
}
};
await producer.ProduceAsync(topic, message);
}
/// <summary>
@ -114,8 +125,16 @@ namespace JiShe.CollectBus.Kafka.Producer
/// <returns></returns>
public async Task ProduceAsync<TValue>(string topic, TValue value) where TValue : class
{
var producer = GetProducer<Null, TValue>();
await producer.ProduceAsync(topic, new Message<Null, TValue> { Value = value });
var typeKey = typeof(KafkaProducer<string, TValue>);
var producer = GetProducer<string, TValue>(typeKey);
var message = new Message<string, TValue>
{
Value = value,
Headers = new Headers{
{ "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) }
}
};
await producer.ProduceAsync(topic, message);
}
/// <summary>
@ -134,9 +153,13 @@ namespace JiShe.CollectBus.Kafka.Producer
var message = new Message<TKey, TValue>
{
Key = key,
Value = value
Value = value,
Headers = new Headers{
{ "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) }
}
};
var producer = GetProducer<TKey, TValue>();
var typeKey = typeof(KafkaProducer<TKey, TValue>);
var producer = GetProducer<TKey, TValue>(typeKey);
if (partition.HasValue)
{
var topicPartition = new TopicPartition(topic, partition.Value);
@ -160,13 +183,17 @@ namespace JiShe.CollectBus.Kafka.Producer
/// <param name="partition"></param>
/// <param name="deliveryHandler"></param>
/// <returns></returns>
public async Task ProduceAsync<TValue>(string topic, TValue value, int? partition=null, Action<DeliveryReport<Null, TValue>>? deliveryHandler = null) where TValue : class
public async Task ProduceAsync<TValue>(string topic, TValue value, int? partition=null, Action<DeliveryReport<string, TValue>>? deliveryHandler = null) where TValue : class
{
var message = new Message<Null, TValue>
var message = new Message<string, TValue>
{
Value = value
Value = value,
Headers = new Headers{
{ "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) }
}
};
var producer = GetProducer<Null, TValue>();
var typeKey = typeof(KafkaProducer<string, TValue>);
var producer = GetProducer<string, TValue>(typeKey);
if (partition.HasValue)
{
var topicPartition = new TopicPartition(topic, partition.Value);

View File

@ -0,0 +1,75 @@
using Confluent.Kafka;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using static System.Runtime.InteropServices.JavaScript.JSType;
namespace JiShe.CollectBus.Kafka
{
public class SubscribeResult: ISubscribeAck
{
/// <summary>
/// 是否成功
/// </summary>
public bool Ack { get; set; }
/// <summary>
/// 消息
/// </summary>
public string? Msg { get; set; }
/// <summary>
/// 成功
/// </summary>
/// <param name="msg">消息</param>
public SubscribeResult Success(string? msg = null)
{
Ack = true;
Msg = msg;
return this;
}
/// <summary>
/// 失败
/// </summary>
/// <param name="code"></param>
/// <param name="msg"></param>
/// <param name="data"></param>
/// <returns></returns>
public SubscribeResult Fail(string? msg = null)
{
Msg = msg;
Ack = false;
return this;
}
}
public static partial class SubscribeAck
{
/// <summary>
/// 成功
/// </summary>
/// <param name="msg">消息</param>
/// <returns></returns>
public static ISubscribeAck Success(string? msg = null)
{
return new SubscribeResult().Success(msg);
}
/// <summary>
/// 失败
/// </summary>
/// <param name="msg">消息</param>
/// <returns></returns>
public static ISubscribeAck Fail(string? msg = null)
{
return new SubscribeResult().Fail(msg);
}
}
}

View File

@ -1,5 +1,4 @@
using DotNetCore.CAP;
using JiShe.CollectBus.Common.Enums;
using JiShe.CollectBus.Common.Enums;
using JiShe.CollectBus.Common.Extensions;
using JiShe.CollectBus.Common.Models;
using JiShe.CollectBus.Protocol.Contracts.Interfaces;
@ -12,12 +11,16 @@ using Microsoft.Extensions.DependencyInjection;
using JiShe.CollectBus.IotSystems.MessageReceiveds;
using JiShe.CollectBus.IotSystems.Protocols;
using MassTransit;
using DotNetCore.CAP;
using JiShe.CollectBus.Kafka.Producer;
using JiShe.CollectBus.Common.Consts;
namespace JiShe.CollectBus.Protocol.Contracts.Abstracts
{
public abstract class BaseProtocolPlugin : IProtocolPlugin
{
private readonly ICapPublisher _producerBus;
private readonly IProducerService _producerService;
private readonly ILogger<BaseProtocolPlugin> _logger;
private readonly IRepository<ProtocolInfo, Guid> _protocolInfoRepository;
@ -37,6 +40,7 @@ namespace JiShe.CollectBus.Protocol.Contracts.Abstracts
_logger = serviceProvider.GetRequiredService<ILogger<BaseProtocolPlugin>>();
_protocolInfoRepository = serviceProvider.GetRequiredService<IRepository<ProtocolInfo, Guid>>();
_producerService = serviceProvider.GetRequiredService<IProducerService>();
_producerBus = serviceProvider.GetRequiredService<ICapPublisher>();
}
@ -86,8 +90,9 @@ namespace JiShe.CollectBus.Protocol.Contracts.Abstracts
Fn = 1
};
var bytes = Build3761SendData.BuildSendCommandBytes(reqParam);
//await _producerBus.PublishAsync(ProtocolConst.SubscriberLoginIssuedEventName, new IssuedEventMessage { ClientId = messageReceived.ClientId, DeviceNo = messageReceived.DeviceNo, Message = bytes, Type = IssuedEventType.Login, MessageId = messageReceived.MessageId });
await _producerBus.PublishAsync(ProtocolConst.SubscriberLoginIssuedEventName, new IssuedEventMessage { ClientId = messageReceived.ClientId, DeviceNo = messageReceived.DeviceNo, Message = bytes, Type = IssuedEventType.Login, MessageId = messageReceived.MessageId });
await _producerService.ProduceAsync(ProtocolConst.SubscriberLoginIssuedEventName, new IssuedEventMessage { ClientId = messageReceived.ClientId, DeviceNo = messageReceived.DeviceNo, Message = bytes, Type = IssuedEventType.Login, MessageId = messageReceived.MessageId });
//await _producerBus.Publish(new IssuedEventMessage { ClientId = messageReceived.ClientId, DeviceNo = messageReceived.DeviceNo, Message = bytes, Type = IssuedEventType.Login, MessageId = messageReceived.MessageId });
}
@ -126,7 +131,9 @@ namespace JiShe.CollectBus.Protocol.Contracts.Abstracts
Fn = 1
};
var bytes = Build3761SendData.BuildSendCommandBytes(reqParam);
await _producerBus.PublishAsync(ProtocolConst.SubscriberHeartbeatIssuedEventName, new IssuedEventMessage { ClientId = messageReceived.ClientId, DeviceNo = messageReceived.DeviceNo, Message = bytes, Type = IssuedEventType.Heartbeat, MessageId = messageReceived.MessageId });
//await _producerBus.PublishAsync(ProtocolConst.SubscriberHeartbeatIssuedEventName, new IssuedEventMessage { ClientId = messageReceived.ClientId, DeviceNo = messageReceived.DeviceNo, Message = bytes, Type = IssuedEventType.Heartbeat, MessageId = messageReceived.MessageId });
await _producerService.ProduceAsync(ProtocolConst.SubscriberHeartbeatIssuedEventName, new IssuedEventMessage { ClientId = messageReceived.ClientId, DeviceNo = messageReceived.DeviceNo, Message = bytes, Type = IssuedEventType.Heartbeat, MessageId = messageReceived.MessageId });
//await _producerBus.Publish(new IssuedEventMessage { ClientId = messageReceived.ClientId, DeviceNo = messageReceived.DeviceNo, Message = bytes, Type = IssuedEventType.Heartbeat, MessageId = messageReceived.MessageId });
}

View File

@ -17,6 +17,11 @@
<ItemGroup>
<ProjectReference Include="..\JiShe.CollectBus.Common\JiShe.CollectBus.Common.csproj" />
<ProjectReference Include="..\JiShe.CollectBus.Domain\JiShe.CollectBus.Domain.csproj" />
<ProjectReference Include="..\JiShe.CollectBus.KafkaProducer\JiShe.CollectBus.Kafka.csproj" />
</ItemGroup>
<ItemGroup>
<Folder Include="Extensions\" />
</ItemGroup>
</Project>