合并
This commit is contained in:
commit
01aeaebb0a
@ -47,6 +47,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "2.Services", "2.Services",
|
|||||||
EndProject
|
EndProject
|
||||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "5.Shared", "5.Shared", "{EBF7C01F-9B4F-48E6-8418-2CBFDA51EB0B}"
|
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "5.Shared", "5.Shared", "{EBF7C01F-9B4F-48E6-8418-2CBFDA51EB0B}"
|
||||||
EndProject
|
EndProject
|
||||||
|
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "JiShe.CollectBus.Kafka.Test", "modules\JiShe.CollectBus.Kafka.Test\JiShe.CollectBus.Kafka.Test.csproj", "{6D6A2A58-7406-9C8C-7B23-3E442CCE3E6B}"
|
||||||
|
EndProject
|
||||||
Global
|
Global
|
||||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||||
Debug|Any CPU = Debug|Any CPU
|
Debug|Any CPU = Debug|Any CPU
|
||||||
@ -121,6 +123,10 @@ Global
|
|||||||
{443B4549-0AC0-4493-8F3E-49C83225DD76}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
{443B4549-0AC0-4493-8F3E-49C83225DD76}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||||
{443B4549-0AC0-4493-8F3E-49C83225DD76}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
{443B4549-0AC0-4493-8F3E-49C83225DD76}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||||
{443B4549-0AC0-4493-8F3E-49C83225DD76}.Release|Any CPU.Build.0 = Release|Any CPU
|
{443B4549-0AC0-4493-8F3E-49C83225DD76}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||||
|
{6D6A2A58-7406-9C8C-7B23-3E442CCE3E6B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||||
|
{6D6A2A58-7406-9C8C-7B23-3E442CCE3E6B}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||||
|
{6D6A2A58-7406-9C8C-7B23-3E442CCE3E6B}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||||
|
{6D6A2A58-7406-9C8C-7B23-3E442CCE3E6B}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||||
EndGlobalSection
|
EndGlobalSection
|
||||||
GlobalSection(SolutionProperties) = preSolution
|
GlobalSection(SolutionProperties) = preSolution
|
||||||
HideSolutionNode = FALSE
|
HideSolutionNode = FALSE
|
||||||
@ -143,6 +149,7 @@ Global
|
|||||||
{A3F3C092-0A25-450B-BF6A-5983163CBEF5} = {2E0FE301-34C3-4561-9CAE-C7A9E65AEE59}
|
{A3F3C092-0A25-450B-BF6A-5983163CBEF5} = {2E0FE301-34C3-4561-9CAE-C7A9E65AEE59}
|
||||||
{A377955E-7EA1-6F29-8CF7-774569E93925} = {3C3F9DB2-EC97-4464-B49F-BF1A0C2B46DC}
|
{A377955E-7EA1-6F29-8CF7-774569E93925} = {3C3F9DB2-EC97-4464-B49F-BF1A0C2B46DC}
|
||||||
{443B4549-0AC0-4493-8F3E-49C83225DD76} = {2E0FE301-34C3-4561-9CAE-C7A9E65AEE59}
|
{443B4549-0AC0-4493-8F3E-49C83225DD76} = {2E0FE301-34C3-4561-9CAE-C7A9E65AEE59}
|
||||||
|
{6D6A2A58-7406-9C8C-7B23-3E442CCE3E6B} = {2E0FE301-34C3-4561-9CAE-C7A9E65AEE59}
|
||||||
EndGlobalSection
|
EndGlobalSection
|
||||||
GlobalSection(ExtensibilityGlobals) = postSolution
|
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||||
SolutionGuid = {4324B3B4-B60B-4E3C-91D8-59576B4E26DD}
|
SolutionGuid = {4324B3B4-B60B-4E3C-91D8-59576B4E26DD}
|
||||||
|
|||||||
@ -3,6 +3,7 @@ using JiShe.CollectBus.Common.Enums;
|
|||||||
using JiShe.CollectBus.Common.Models;
|
using JiShe.CollectBus.Common.Models;
|
||||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||||
using JiShe.CollectBus.Kafka.Attributes;
|
using JiShe.CollectBus.Kafka.Attributes;
|
||||||
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
using Microsoft.Extensions.DependencyInjection;
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
@ -16,53 +17,54 @@ namespace JiShe.CollectBus.Kafka.Test
|
|||||||
{
|
{
|
||||||
public class KafkaSubscribeTest: IKafkaSubscribe
|
public class KafkaSubscribeTest: IKafkaSubscribe
|
||||||
{
|
{
|
||||||
[KafkaSubscribe(ProtocolConst.TESTTOPIC, EnableBatch=false,BatchSize=1000)]
|
[KafkaSubscribe(ProtocolConst.TESTTOPIC, EnableBatch = false, BatchSize = 10)]
|
||||||
|
|
||||||
public async Task<ISubscribeAck> KafkaSubscribeAsync(object obj)
|
public async Task<ISubscribeAck> KafkaSubscribeAsync(TestTopic obj)
|
||||||
|
//public async Task<ISubscribeAck> KafkaSubscribeAsync(IEnumerable<int> obj)
|
||||||
{
|
{
|
||||||
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(obj)}");
|
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(obj)}");
|
||||||
return SubscribeAck.Success();
|
return SubscribeAck.Success();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
[KafkaSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
|
//[KafkaSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
|
||||||
//[CapSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
|
////[CapSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
|
||||||
public async Task<ISubscribeAck> LoginIssuedEvent(IssuedEventMessage issuedEventMessage)
|
//public async Task<ISubscribeAck> LoginIssuedEvent(IssuedEventMessage issuedEventMessage)
|
||||||
{
|
//{
|
||||||
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(issuedEventMessage)}");
|
// Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(issuedEventMessage)}");
|
||||||
return SubscribeAck.Success();
|
// return SubscribeAck.Success();
|
||||||
}
|
//}
|
||||||
|
|
||||||
[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
|
//[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
|
||||||
//[CapSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
|
////[CapSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
|
||||||
public async Task<ISubscribeAck> HeartbeatIssuedEvent(IssuedEventMessage issuedEventMessage)
|
//public async Task<ISubscribeAck> HeartbeatIssuedEvent(IssuedEventMessage issuedEventMessage)
|
||||||
{
|
//{
|
||||||
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(issuedEventMessage)}");
|
// Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(issuedEventMessage)}");
|
||||||
return SubscribeAck.Success();
|
// return SubscribeAck.Success();
|
||||||
}
|
//}
|
||||||
|
|
||||||
[KafkaSubscribe(ProtocolConst.SubscriberReceivedEventName)]
|
//[KafkaSubscribe(ProtocolConst.SubscriberReceivedEventName)]
|
||||||
//[CapSubscribe(ProtocolConst.SubscriberReceivedEventName)]
|
////[CapSubscribe(ProtocolConst.SubscriberReceivedEventName)]
|
||||||
public async Task<ISubscribeAck> ReceivedEvent(MessageReceived receivedMessage)
|
//public async Task<ISubscribeAck> ReceivedEvent(MessageReceived receivedMessage)
|
||||||
{
|
//{
|
||||||
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(receivedMessage)}");
|
// Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(receivedMessage)}");
|
||||||
return SubscribeAck.Success();
|
// return SubscribeAck.Success();
|
||||||
}
|
//}
|
||||||
|
|
||||||
[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
|
//[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
|
||||||
//[CapSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
|
////[CapSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
|
||||||
public async Task<ISubscribeAck> ReceivedHeartbeatEvent(MessageReceivedHeartbeat receivedHeartbeatMessage)
|
//public async Task<ISubscribeAck> ReceivedHeartbeatEvent(MessageReceivedHeartbeat receivedHeartbeatMessage)
|
||||||
{
|
//{
|
||||||
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(receivedHeartbeatMessage)}");
|
// Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(receivedHeartbeatMessage)}");
|
||||||
return SubscribeAck.Success();
|
// return SubscribeAck.Success();
|
||||||
}
|
//}
|
||||||
|
|
||||||
[KafkaSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
|
//[KafkaSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
|
||||||
//[CapSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
|
////[CapSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
|
||||||
public async Task<ISubscribeAck> ReceivedLoginEvent(MessageReceivedLogin receivedLoginMessage)
|
//public async Task<ISubscribeAck> ReceivedLoginEvent(MessageReceivedLogin receivedLoginMessage)
|
||||||
{
|
//{
|
||||||
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(receivedLoginMessage)}");
|
// Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(receivedLoginMessage)}");
|
||||||
return SubscribeAck.Success();
|
// return SubscribeAck.Success();
|
||||||
}
|
//}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -7,6 +7,7 @@ using JiShe.CollectBus.Common.Consts;
|
|||||||
using JiShe.CollectBus.Kafka;
|
using JiShe.CollectBus.Kafka;
|
||||||
using JiShe.CollectBus.Kafka.AdminClient;
|
using JiShe.CollectBus.Kafka.AdminClient;
|
||||||
using JiShe.CollectBus.Kafka.Consumer;
|
using JiShe.CollectBus.Kafka.Consumer;
|
||||||
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
using JiShe.CollectBus.Kafka.Producer;
|
using JiShe.CollectBus.Kafka.Producer;
|
||||||
using JiShe.CollectBus.Kafka.Test;
|
using JiShe.CollectBus.Kafka.Test;
|
||||||
using Microsoft.AspNetCore.Builder;
|
using Microsoft.AspNetCore.Builder;
|
||||||
@ -86,12 +87,13 @@ var logger = loggerFactory.CreateLogger<Program>();
|
|||||||
logger.LogInformation("程序启动");
|
logger.LogInformation("程序启动");
|
||||||
var adminClientService = host.Services.GetRequiredService<IAdminClientService>();
|
var adminClientService = host.Services.GetRequiredService<IAdminClientService>();
|
||||||
var configuration = host.Services.GetRequiredService<IConfiguration>();
|
var configuration = host.Services.GetRequiredService<IConfiguration>();
|
||||||
string topic = "test-topic";
|
string topic = ProtocolConst.TESTTOPIC;
|
||||||
//await adminClientService.DeleteTopicAsync(topic);
|
//await adminClientService.DeleteTopicAsync(topic);
|
||||||
// 创建 topic
|
// 创建 topic
|
||||||
//await adminClientService.CreateTopicAsync(topic, configuration.GetValue<int>(CommonConst.NumPartitions), 3);
|
//await adminClientService.CreateTopicAsync(topic, configuration.GetValue<int>(CommonConst.NumPartitions), 3);
|
||||||
|
|
||||||
var consumerService = host.Services.GetRequiredService<IConsumerService>();
|
var consumerService = host.Services.GetRequiredService<IConsumerService>();
|
||||||
|
var producerService = host.Services.GetRequiredService<IProducerService>();
|
||||||
//var kafkaOptions = host.Services.GetRequiredService<IOptions<KafkaOptionConfig>>();
|
//var kafkaOptions = host.Services.GetRequiredService<IOptions<KafkaOptionConfig>>();
|
||||||
//await consumerService.SubscribeAsync<object>(topic, (message) =>
|
//await consumerService.SubscribeAsync<object>(topic, (message) =>
|
||||||
//{
|
//{
|
||||||
@ -113,43 +115,49 @@ var consumerService = host.Services.GetRequiredService<IConsumerService>();
|
|||||||
|
|
||||||
//for (int i = 0; i < 3; i++)
|
//for (int i = 0; i < 3; i++)
|
||||||
//{
|
//{
|
||||||
// await consumerService.SubscribeBatchAsync<dynamic>(topic, (message) =>
|
//await consumerService.SubscribeBatchAsync<dynamic>(topic, (message) =>
|
||||||
|
//{
|
||||||
|
// try
|
||||||
// {
|
// {
|
||||||
// try
|
// int index = 0;
|
||||||
// {
|
// logger.LogInformation($"消费消息_{index}消费总数:{message.Count()}:{JsonSerializer.Serialize(message)}");
|
||||||
// int index = 0;
|
// return Task.FromResult(true);
|
||||||
// logger.LogInformation($"消费消息_{index}消费总数:{message.Count()}:{JsonSerializer.Serialize(message)}");
|
|
||||||
// return Task.FromResult(true);
|
|
||||||
|
|
||||||
// }
|
// }
|
||||||
// catch (ConsumeException ex)
|
// catch (ConsumeException ex)
|
||||||
// {
|
// {
|
||||||
// // 处理消费错误
|
// // 处理消费错误
|
||||||
// logger.LogError($"kafka消费异常:{ex.Message}");
|
// logger.LogError($"kafka消费异常:{ex.Message}");
|
||||||
// }
|
// }
|
||||||
// return Task.FromResult(false);
|
// return Task.FromResult(false);
|
||||||
// });
|
//});
|
||||||
//}
|
//}
|
||||||
//stopwatch.Stop();
|
//stopwatch.Stop();
|
||||||
//Console.WriteLine($"耗时: {stopwatch.ElapsedMilliseconds} 毫秒,{stopwatch.ElapsedMilliseconds/1000} 秒");
|
//Console.WriteLine($"耗时: {stopwatch.ElapsedMilliseconds} 毫秒,{stopwatch.ElapsedMilliseconds/1000} 秒");
|
||||||
|
|
||||||
var producerService = host.Services.GetRequiredService<IProducerService>();
|
|
||||||
//int num = 840;
|
int num = 1;
|
||||||
//while (num <= 900)
|
while (num <= 6)
|
||||||
|
{
|
||||||
|
await producerService.ProduceAsync<TestTopic>(topic, new TestTopic { Topic = topic, Val = num });
|
||||||
|
num++;
|
||||||
|
}
|
||||||
|
|
||||||
|
//int num = 2;
|
||||||
|
//while (num <= 4)
|
||||||
//{
|
//{
|
||||||
// //await producerService.ProduceAsync(topic, new TestTopic { Topic = topic, Val = i });
|
|
||||||
// await producerService.ProduceAsync<string>(topic, num.ToString());
|
// await producerService.ProduceAsync<string>(topic, num.ToString());
|
||||||
// num++;
|
// num++;
|
||||||
//}
|
//}
|
||||||
await Task.Factory.StartNew(async() => {
|
//await Task.Factory.StartNew(async() => {
|
||||||
int num = 0;
|
// int num = 0;
|
||||||
while (true)
|
// while (true)
|
||||||
{
|
// {
|
||||||
//await producerService.ProduceAsync(topic, new TestTopic { Topic = topic, Val = i });
|
// //await producerService.ProduceAsync(topic, new TestTopic { Topic = topic, Val = i });
|
||||||
await producerService.ProduceAsync<string>(topic, num.ToString());
|
// await producerService.ProduceAsync<string>(topic, num.ToString());
|
||||||
num++;
|
// num++;
|
||||||
}
|
// }
|
||||||
});
|
//});
|
||||||
Console.WriteLine("\n按Esc键退出");
|
Console.WriteLine("\n按Esc键退出");
|
||||||
while (true)
|
while (true)
|
||||||
{
|
{
|
||||||
|
|||||||
@ -91,8 +91,8 @@
|
|||||||
"SaslUserName": "lixiao",
|
"SaslUserName": "lixiao",
|
||||||
"SaslPassword": "lixiao1980",
|
"SaslPassword": "lixiao1980",
|
||||||
"KafkaReplicationFactor": 3,
|
"KafkaReplicationFactor": 3,
|
||||||
"NumPartitions": 1,
|
"NumPartitions": 30,
|
||||||
"ServerTagName": "JiSheCollectBus2"
|
"ServerTagName": "JiSheCollectBus99"
|
||||||
//"Topic": {
|
//"Topic": {
|
||||||
// "ReplicationFactor": 3,
|
// "ReplicationFactor": 3,
|
||||||
// "NumPartitions": 1000
|
// "NumPartitions": 1000
|
||||||
|
|||||||
@ -22,7 +22,7 @@ namespace JiShe.CollectBus.Kafka.Attributes
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// 消费者组
|
/// 消费者组
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public string GroupId { get; set; } = "default";
|
public string? GroupId { get; set; } = null;//"default"
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 任务数(默认是多少个分区多少个任务)
|
/// 任务数(默认是多少个分区多少个任务)
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
using Confluent.Kafka;
|
using Confluent.Kafka;
|
||||||
using JiShe.CollectBus.Common.Consts;
|
using JiShe.CollectBus.Common.Consts;
|
||||||
using JiShe.CollectBus.Kafka.Consumer;
|
using JiShe.CollectBus.Kafka.Consumer;
|
||||||
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
using JiShe.CollectBus.Kafka.Producer;
|
using JiShe.CollectBus.Kafka.Producer;
|
||||||
using Microsoft.AspNetCore.Builder;
|
using Microsoft.AspNetCore.Builder;
|
||||||
using Microsoft.Extensions.Configuration;
|
using Microsoft.Extensions.Configuration;
|
||||||
@ -43,10 +44,19 @@ namespace JiShe.CollectBus.Kafka
|
|||||||
//context.Services.AddHostedService<HostedService>();
|
//context.Services.AddHostedService<HostedService>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 在初始化之前,初始化Kafka Topic
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="context"></param>
|
||||||
|
public override void OnPreApplicationInitialization(ApplicationInitializationContext context)
|
||||||
|
{
|
||||||
|
var app = context.GetApplicationBuilder();
|
||||||
|
app.ApplicationServices.UseInitKafkaTopic();
|
||||||
|
}
|
||||||
|
|
||||||
public override void OnApplicationInitialization(ApplicationInitializationContext context)
|
public override void OnApplicationInitialization(ApplicationInitializationContext context)
|
||||||
{
|
{
|
||||||
var app = context.GetApplicationBuilder();
|
var app = context.GetApplicationBuilder();
|
||||||
|
|
||||||
// 注册Subscriber
|
// 注册Subscriber
|
||||||
app.ApplicationServices.UseKafkaSubscribe();
|
app.ApplicationServices.UseKafkaSubscribe();
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,7 @@
|
|||||||
using Confluent.Kafka;
|
using Confluent.Kafka;
|
||||||
using JiShe.CollectBus.Common.Consts;
|
using JiShe.CollectBus.Common.Consts;
|
||||||
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
|
using JiShe.CollectBus.Kafka.Serialization;
|
||||||
using Microsoft.Extensions.Configuration;
|
using Microsoft.Extensions.Configuration;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using Microsoft.Extensions.Options;
|
using Microsoft.Extensions.Options;
|
||||||
@ -47,11 +49,11 @@ namespace JiShe.CollectBus.Kafka.Consumer
|
|||||||
var config = new ConsumerConfig
|
var config = new ConsumerConfig
|
||||||
{
|
{
|
||||||
BootstrapServers = _kafkaOptionConfig.BootstrapServers,
|
BootstrapServers = _kafkaOptionConfig.BootstrapServers,
|
||||||
GroupId = groupId ?? "default",
|
GroupId = groupId ?? _kafkaOptionConfig.ServerTagName,
|
||||||
AutoOffsetReset = AutoOffsetReset.Earliest,
|
AutoOffsetReset = AutoOffsetReset.Earliest,
|
||||||
EnableAutoCommit = false, // 禁止AutoCommit
|
EnableAutoCommit = false, // 禁止AutoCommit
|
||||||
EnablePartitionEof = true, // 启用分区末尾标记
|
EnablePartitionEof = true, // 启用分区末尾标记
|
||||||
AllowAutoCreateTopics = true, // 启用自动创建
|
//AllowAutoCreateTopics = true, // 启用自动创建
|
||||||
FetchMaxBytes = 1024 * 1024 * 50 // 增加拉取大小(50MB)
|
FetchMaxBytes = 1024 * 1024 * 50 // 增加拉取大小(50MB)
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -252,7 +254,7 @@ namespace JiShe.CollectBus.Kafka.Consumer
|
|||||||
/// <param name="groupId">消费组ID</param>
|
/// <param name="groupId">消费组ID</param>
|
||||||
/// <param name="batchSize">批次大小</param>
|
/// <param name="batchSize">批次大小</param>
|
||||||
/// <param name="batchTimeout">批次超时时间</param>
|
/// <param name="batchTimeout">批次超时时间</param>
|
||||||
public async Task SubscribeBatchAsync<TKey, TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class
|
public async Task SubscribeBatchAsync<TKey, TValue>(string topic, Func<List<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class
|
||||||
{
|
{
|
||||||
await SubscribeBatchAsync<TKey, TValue>(new[] { topic }, messageBatchHandler, groupId, batchSize, batchTimeout);
|
await SubscribeBatchAsync<TKey, TValue>(new[] { topic }, messageBatchHandler, groupId, batchSize, batchTimeout);
|
||||||
}
|
}
|
||||||
@ -267,17 +269,17 @@ namespace JiShe.CollectBus.Kafka.Consumer
|
|||||||
/// <param name="groupId">消费组ID</param>
|
/// <param name="groupId">消费组ID</param>
|
||||||
/// <param name="batchSize">批次大小</param>
|
/// <param name="batchSize">批次大小</param>
|
||||||
/// <param name="batchTimeout">批次超时时间</param>
|
/// <param name="batchTimeout">批次超时时间</param>
|
||||||
public async Task SubscribeBatchAsync<TKey, TValue>(string[] topics,Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null,int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class
|
public async Task SubscribeBatchAsync<TKey, TValue>(string[] topics,Func<List<TValue>, Task<bool>> messageBatchHandler, string? groupId = null,int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class
|
||||||
{
|
{
|
||||||
var consumerKey = typeof(KafkaConsumer<TKey, TValue>);
|
var consumerKey = typeof(KafkaConsumer<TKey, TValue>);
|
||||||
var cts = new CancellationTokenSource();
|
var cts = new CancellationTokenSource();
|
||||||
|
|
||||||
var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
|
//var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
|
||||||
(
|
// (
|
||||||
CreateConsumer<TKey, TValue>(groupId),
|
// CreateConsumer<TKey, TValue>(groupId),
|
||||||
cts
|
// cts
|
||||||
)).Consumer as IConsumer<TKey, TValue>;
|
// )).Consumer as IConsumer<TKey, TValue>;
|
||||||
|
var consumer = CreateConsumer<string, TValue>(groupId);
|
||||||
consumer!.Subscribe(topics);
|
consumer!.Subscribe(topics);
|
||||||
|
|
||||||
var timeout = batchTimeout ?? TimeSpan.FromSeconds(5); // 默认超时时间调整为5秒
|
var timeout = batchTimeout ?? TimeSpan.FromSeconds(5); // 默认超时时间调整为5秒
|
||||||
@ -300,8 +302,8 @@ namespace JiShe.CollectBus.Kafka.Consumer
|
|||||||
{
|
{
|
||||||
if (result.IsPartitionEOF)
|
if (result.IsPartitionEOF)
|
||||||
{
|
{
|
||||||
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
|
//_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
|
||||||
await Task.Delay(TimeSpan.FromSeconds(1), cts.Token);
|
await Task.Delay(10, cts.Token);
|
||||||
}
|
}
|
||||||
else if (result.Message.Value != null)
|
else if (result.Message.Value != null)
|
||||||
{
|
{
|
||||||
@ -330,7 +332,7 @@ namespace JiShe.CollectBus.Kafka.Consumer
|
|||||||
// 处理批次
|
// 处理批次
|
||||||
if (messages.Count > 0)
|
if (messages.Count > 0)
|
||||||
{
|
{
|
||||||
bool success = await messageBatchHandler(messages.Select(m => m.Value));
|
bool success = await messageBatchHandler(messages.Select(m => m.Value).ToList());
|
||||||
if (success)
|
if (success)
|
||||||
{
|
{
|
||||||
var offsetsByPartition = new Dictionary<TopicPartition, long>();
|
var offsetsByPartition = new Dictionary<TopicPartition, long>();
|
||||||
@ -383,7 +385,7 @@ namespace JiShe.CollectBus.Kafka.Consumer
|
|||||||
/// <param name="batchSize">批次大小</param>
|
/// <param name="batchSize">批次大小</param>
|
||||||
/// <param name="batchTimeout">批次超时时间</param>
|
/// <param name="batchTimeout">批次超时时间</param>
|
||||||
/// <param name="consumeTimeout">消费等待时间</param>
|
/// <param name="consumeTimeout">消费等待时间</param>
|
||||||
public async Task SubscribeBatchAsync<TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class
|
public async Task SubscribeBatchAsync<TValue>(string topic, Func<List<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class
|
||||||
{
|
{
|
||||||
await SubscribeBatchAsync<TValue>(new[] { topic }, messageBatchHandler, groupId, batchSize, batchTimeout, consumeTimeout);
|
await SubscribeBatchAsync<TValue>(new[] { topic }, messageBatchHandler, groupId, batchSize, batchTimeout, consumeTimeout);
|
||||||
|
|
||||||
@ -400,17 +402,18 @@ namespace JiShe.CollectBus.Kafka.Consumer
|
|||||||
/// <param name="batchSize">批次大小</param>
|
/// <param name="batchSize">批次大小</param>
|
||||||
/// <param name="batchTimeout">批次超时时间</param>
|
/// <param name="batchTimeout">批次超时时间</param>
|
||||||
/// <param name="consumeTimeout">消费等待时间</param>
|
/// <param name="consumeTimeout">消费等待时间</param>
|
||||||
public async Task SubscribeBatchAsync<TValue>(string[] topics,Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100,TimeSpan? batchTimeout = null,TimeSpan? consumeTimeout = null)where TValue : class
|
public async Task SubscribeBatchAsync<TValue>(string[] topics,Func<List<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100,TimeSpan? batchTimeout = null,TimeSpan? consumeTimeout = null)where TValue : class
|
||||||
{
|
{
|
||||||
var consumerKey = typeof(KafkaConsumer<string, TValue>);
|
var consumerKey = typeof(KafkaConsumer<string, TValue>);
|
||||||
var cts = new CancellationTokenSource();
|
var cts = new CancellationTokenSource();
|
||||||
|
|
||||||
var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
|
//var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
|
||||||
(
|
// (
|
||||||
CreateConsumer<string, TValue>(groupId),
|
// CreateConsumer<string, TValue>(groupId),
|
||||||
cts
|
// cts
|
||||||
)).Consumer as IConsumer<string, TValue>;
|
// )).Consumer as IConsumer<string, TValue>;
|
||||||
|
|
||||||
|
var consumer= CreateConsumer<string, TValue> (groupId);
|
||||||
consumer!.Subscribe(topics);
|
consumer!.Subscribe(topics);
|
||||||
|
|
||||||
var timeout = batchTimeout ?? TimeSpan.FromSeconds(5); // 默认超时时间调整为5秒
|
var timeout = batchTimeout ?? TimeSpan.FromSeconds(5); // 默认超时时间调整为5秒
|
||||||
@ -434,8 +437,8 @@ namespace JiShe.CollectBus.Kafka.Consumer
|
|||||||
{
|
{
|
||||||
if (result.IsPartitionEOF)
|
if (result.IsPartitionEOF)
|
||||||
{
|
{
|
||||||
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
|
//_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
|
||||||
await Task.Delay(TimeSpan.FromSeconds(1), cts.Token);
|
await Task.Delay(10, cts.Token);
|
||||||
}
|
}
|
||||||
else if (result.Message.Value != null)
|
else if (result.Message.Value != null)
|
||||||
{
|
{
|
||||||
@ -464,7 +467,7 @@ namespace JiShe.CollectBus.Kafka.Consumer
|
|||||||
// 处理批次
|
// 处理批次
|
||||||
if (messages.Count > 0)
|
if (messages.Count > 0)
|
||||||
{
|
{
|
||||||
bool success = await messageBatchHandler(messages.Select(m => m.Value));
|
bool success = await messageBatchHandler(messages.Select(m => m.Value).ToList());
|
||||||
if (success)
|
if (success)
|
||||||
{
|
{
|
||||||
var offsetsByPartition = new Dictionary<TopicPartition, long>();
|
var offsetsByPartition = new Dictionary<TopicPartition, long>();
|
||||||
|
|||||||
@ -33,13 +33,13 @@ namespace JiShe.CollectBus.Kafka.Consumer
|
|||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
Task SubscribeAsync<TValue>(string[] topics, Func<TValue, Task<bool>> messageHandler, string? groupId = null) where TValue : class;
|
Task SubscribeAsync<TValue>(string[] topics, Func<TValue, Task<bool>> messageHandler, string? groupId = null) where TValue : class;
|
||||||
|
|
||||||
Task SubscribeBatchAsync<TKey, TValue>(string[] topics, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class;
|
Task SubscribeBatchAsync<TKey, TValue>(string[] topics, Func<List<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class;
|
||||||
|
|
||||||
Task SubscribeBatchAsync<TKey, TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class;
|
Task SubscribeBatchAsync<TKey, TValue>(string topic, Func<List<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class;
|
||||||
|
|
||||||
Task SubscribeBatchAsync<TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class;
|
Task SubscribeBatchAsync<TValue>(string topic, Func<List<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class;
|
||||||
|
|
||||||
Task SubscribeBatchAsync<TValue>(string[] topics, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class;
|
Task SubscribeBatchAsync<TValue>(string[] topics, Func<List<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class;
|
||||||
|
|
||||||
void Unsubscribe<TKey, TValue>() where TKey : notnull where TValue : class;
|
void Unsubscribe<TKey, TValue>() where TKey : notnull where TValue : class;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,7 +5,7 @@ using System.Linq;
|
|||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
namespace JiShe.CollectBus.Kafka
|
namespace JiShe.CollectBus.Kafka.Internal
|
||||||
{
|
{
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 消息头过滤器
|
/// 消息头过滤器
|
||||||
@ -4,7 +4,7 @@ using System.Linq;
|
|||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
namespace JiShe.CollectBus.Kafka
|
namespace JiShe.CollectBus.Kafka.Internal
|
||||||
{
|
{
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Kafka订阅者
|
/// Kafka订阅者
|
||||||
@ -4,7 +4,7 @@ using System.Linq;
|
|||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
namespace JiShe.CollectBus.Kafka
|
namespace JiShe.CollectBus.Kafka.Internal
|
||||||
{
|
{
|
||||||
public interface ISubscribeAck
|
public interface ISubscribeAck
|
||||||
{
|
{
|
||||||
@ -5,7 +5,7 @@ using System.Linq;
|
|||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
namespace JiShe.CollectBus.Kafka
|
namespace JiShe.CollectBus.Kafka.Internal
|
||||||
{
|
{
|
||||||
public class KafkaOptionConfig
|
public class KafkaOptionConfig
|
||||||
{
|
{
|
||||||
@ -59,5 +59,10 @@ namespace JiShe.CollectBus.Kafka
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public string? SaslPassword { get; set; }
|
public string? SaslPassword { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 首次采集时间
|
||||||
|
/// </summary>
|
||||||
|
public DateTime FirstCollectionTime { get; set; }
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
113
modules/JiShe.CollectBus.Kafka/Internal/ReflectionHelper.cs
Normal file
113
modules/JiShe.CollectBus.Kafka/Internal/ReflectionHelper.cs
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
using Newtonsoft.Json;
|
||||||
|
using System;
|
||||||
|
using System.Collections;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Reflection;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
|
namespace JiShe.CollectBus.Kafka.Internal
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// 反射辅助类
|
||||||
|
/// </summary>
|
||||||
|
public static class ReflectionHelper
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
///集合类型
|
||||||
|
///Item1:参数类型
|
||||||
|
///Item2:集合元素类型
|
||||||
|
/// </summary>
|
||||||
|
public static Tuple<Type,Type?> GetParameterTypeInfo(this MethodInfo method, int parameterIndex=0)
|
||||||
|
{
|
||||||
|
// 参数校验
|
||||||
|
if (method == null) throw new ArgumentNullException(nameof(method));
|
||||||
|
var parameters = method.GetParameters();
|
||||||
|
if (parameterIndex < 0 || parameterIndex >= parameters.Length)
|
||||||
|
throw new ArgumentOutOfRangeException(nameof(parameterIndex));
|
||||||
|
|
||||||
|
ParameterInfo param = parameters[parameterIndex];
|
||||||
|
Type paramType = param.ParameterType;
|
||||||
|
Type? elementType = null;
|
||||||
|
|
||||||
|
// 判断是否是集合类型(排除字符串)
|
||||||
|
if (paramType != typeof(string) && IsEnumerableType(paramType))
|
||||||
|
{
|
||||||
|
elementType = GetEnumerableElementType(paramType);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Tuple.Create(paramType, elementType);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 判断是否是集合类型(排除字符串)
|
||||||
|
/// </summary>
|
||||||
|
public static bool IsEnumerableType(this Type type)
|
||||||
|
{
|
||||||
|
return type.IsArray
|
||||||
|
|| (type.IsGenericType && type.GetInterfaces()
|
||||||
|
.Any(t => t.IsGenericType
|
||||||
|
&& t.GetGenericTypeDefinition() == typeof(IEnumerable<>)))
|
||||||
|
|| type.GetInterfaces().Any(t => t == typeof(System.Collections.IEnumerable));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 获取集合元素的类型
|
||||||
|
/// </summary>
|
||||||
|
public static Type? GetEnumerableElementType(this Type type)
|
||||||
|
{
|
||||||
|
// 处理数组类型
|
||||||
|
if (type.IsArray)
|
||||||
|
return type.GetElementType();
|
||||||
|
|
||||||
|
// 处理直接实现IEnumerable<T>的类型(如IEnumerable<int>本身)
|
||||||
|
if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(IEnumerable<>))
|
||||||
|
return type.GetGenericArguments()[0];
|
||||||
|
|
||||||
|
// 处理通过接口实现IEnumerable<T>的泛型集合(如List<T>)
|
||||||
|
var genericEnumerable = type.GetInterfaces()
|
||||||
|
.FirstOrDefault(t => t.IsGenericType
|
||||||
|
&& t.GetGenericTypeDefinition() == typeof(IEnumerable<>));
|
||||||
|
if (genericEnumerable != null)
|
||||||
|
return genericEnumerable.GetGenericArguments()[0];
|
||||||
|
|
||||||
|
// 处理非泛型集合类型(如 ArrayList)
|
||||||
|
if (typeof(IEnumerable).IsAssignableFrom(type) && type == typeof(ArrayList))
|
||||||
|
return typeof(ArrayList);
|
||||||
|
// 返回null表示无法确定元素类型
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// <summary>
|
||||||
|
/// 判断是否使用强转换
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="targetType">目标类型</param>
|
||||||
|
/// <returns></returns>
|
||||||
|
public static bool IsConvertType(this Type targetType)
|
||||||
|
{
|
||||||
|
// 处理可空类型
|
||||||
|
Type underlyingType = Nullable.GetUnderlyingType(targetType) ?? targetType;
|
||||||
|
// 情况1:值类型或基元类型(如 int、DateTime)
|
||||||
|
if (underlyingType.IsValueType || underlyingType.IsPrimitive)
|
||||||
|
return true;
|
||||||
|
// 情况2:字符串类型直接赋值
|
||||||
|
else if (underlyingType == typeof(string))
|
||||||
|
return true;
|
||||||
|
|
||||||
|
// 情况3:枚举类型处理
|
||||||
|
//else if (underlyingType.IsEnum)
|
||||||
|
//{
|
||||||
|
// if (Enum.IsDefined(underlyingType, msg))
|
||||||
|
// {
|
||||||
|
// convertedValue = Enum.Parse(underlyingType, msg.ToString());
|
||||||
|
// return true;
|
||||||
|
// }
|
||||||
|
// return false;
|
||||||
|
//}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -6,7 +6,7 @@ using System.Text;
|
|||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using static System.Runtime.InteropServices.JavaScript.JSType;
|
using static System.Runtime.InteropServices.JavaScript.JSType;
|
||||||
|
|
||||||
namespace JiShe.CollectBus.Kafka
|
namespace JiShe.CollectBus.Kafka.Internal
|
||||||
{
|
{
|
||||||
public class SubscribeResult: ISubscribeAck
|
public class SubscribeResult: ISubscribeAck
|
||||||
{
|
{
|
||||||
@ -5,30 +5,33 @@ using JiShe.CollectBus.Common.Helpers;
|
|||||||
using JiShe.CollectBus.Kafka.AdminClient;
|
using JiShe.CollectBus.Kafka.AdminClient;
|
||||||
using JiShe.CollectBus.Kafka.Attributes;
|
using JiShe.CollectBus.Kafka.Attributes;
|
||||||
using JiShe.CollectBus.Kafka.Consumer;
|
using JiShe.CollectBus.Kafka.Consumer;
|
||||||
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
|
using JiShe.CollectBus.Kafka.Serialization;
|
||||||
using Microsoft.AspNetCore.Builder;
|
using Microsoft.AspNetCore.Builder;
|
||||||
|
using Microsoft.AspNetCore.Mvc.Abstractions;
|
||||||
using Microsoft.Extensions.Configuration;
|
using Microsoft.Extensions.Configuration;
|
||||||
using Microsoft.Extensions.DependencyInjection;
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
using Microsoft.Extensions.Hosting;
|
using Microsoft.Extensions.Hosting;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using Microsoft.Extensions.Options;
|
using Microsoft.Extensions.Options;
|
||||||
using Newtonsoft.Json;
|
using System;
|
||||||
|
using System.Collections;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.ComponentModel;
|
||||||
|
using System.Linq.Expressions;
|
||||||
using System.Reflection;
|
using System.Reflection;
|
||||||
|
using System.Text.Json;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
|
using YamlDotNet.Core.Tokens;
|
||||||
|
using static System.Runtime.InteropServices.JavaScript.JSType;
|
||||||
|
|
||||||
namespace JiShe.CollectBus.Kafka
|
namespace JiShe.CollectBus.Kafka
|
||||||
{
|
{
|
||||||
public static class KafkaSubcribesExtensions
|
public static class KafkaSubcribesExtensions
|
||||||
{
|
{
|
||||||
/// <summary>
|
|
||||||
/// 添加Kafka订阅
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="app"></param>
|
|
||||||
/// <param name="assembly"></param>
|
|
||||||
public static void UseKafkaSubscribe(this IServiceProvider provider)
|
|
||||||
{
|
|
||||||
var lifetime = provider.GetRequiredService<IHostApplicationLifetime>();
|
|
||||||
|
|
||||||
|
public static void UseInitKafkaTopic(this IServiceProvider provider)
|
||||||
|
{
|
||||||
//初始化主题信息
|
//初始化主题信息
|
||||||
var kafkaAdminClient = provider.GetRequiredService<IAdminClientService>();
|
var kafkaAdminClient = provider.GetRequiredService<IAdminClientService>();
|
||||||
var kafkaOptions = provider.GetRequiredService<IOptions<KafkaOptionConfig>>();
|
var kafkaOptions = provider.GetRequiredService<IOptions<KafkaOptionConfig>>();
|
||||||
@ -40,6 +43,17 @@ namespace JiShe.CollectBus.Kafka
|
|||||||
{
|
{
|
||||||
kafkaAdminClient.CreateTopicAsync(item, kafkaOptions.Value.NumPartitions, kafkaOptions.Value.KafkaReplicationFactor).ConfigureAwait(false).GetAwaiter().GetResult();
|
kafkaAdminClient.CreateTopicAsync(item, kafkaOptions.Value.NumPartitions, kafkaOptions.Value.KafkaReplicationFactor).ConfigureAwait(false).GetAwaiter().GetResult();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 添加Kafka订阅
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="app"></param>
|
||||||
|
/// <param name="assembly"></param>
|
||||||
|
public static void UseKafkaSubscribe(this IServiceProvider provider)
|
||||||
|
{
|
||||||
|
var lifetime = provider.GetRequiredService<IHostApplicationLifetime>();
|
||||||
|
var kafkaOptions = provider.GetRequiredService<IOptions<KafkaOptionConfig>>();
|
||||||
lifetime.ApplicationStarted.Register(() =>
|
lifetime.ApplicationStarted.Register(() =>
|
||||||
{
|
{
|
||||||
var logger = provider.GetRequiredService<ILogger<CollectBusKafkaModule>>();
|
var logger = provider.GetRequiredService<ILogger<CollectBusKafkaModule>>();
|
||||||
@ -88,17 +102,7 @@ namespace JiShe.CollectBus.Kafka
|
|||||||
{
|
{
|
||||||
var provider = app.ApplicationServices;
|
var provider = app.ApplicationServices;
|
||||||
var lifetime = provider.GetRequiredService<IHostApplicationLifetime>();
|
var lifetime = provider.GetRequiredService<IHostApplicationLifetime>();
|
||||||
//初始化主题信息
|
|
||||||
var kafkaAdminClient = provider.GetRequiredService<IAdminClientService>();
|
|
||||||
var kafkaOptions = provider.GetRequiredService<IOptions<KafkaOptionConfig>>();
|
var kafkaOptions = provider.GetRequiredService<IOptions<KafkaOptionConfig>>();
|
||||||
|
|
||||||
List<string> topics = ProtocolConstExtensions.GetAllTopicNamesByIssued();
|
|
||||||
topics.AddRange(ProtocolConstExtensions.GetAllTopicNamesByReceived());
|
|
||||||
|
|
||||||
foreach (var item in topics)
|
|
||||||
{
|
|
||||||
kafkaAdminClient.CreateTopicAsync(item, kafkaOptions.Value.NumPartitions, kafkaOptions.Value.KafkaReplicationFactor).ConfigureAwait(false).GetAwaiter().GetResult();
|
|
||||||
}
|
|
||||||
lifetime.ApplicationStarted.Register(() =>
|
lifetime.ApplicationStarted.Register(() =>
|
||||||
{
|
{
|
||||||
var logger = provider.GetRequiredService<ILogger<CollectBusKafkaModule>>();
|
var logger = provider.GetRequiredService<ILogger<CollectBusKafkaModule>>();
|
||||||
@ -132,7 +136,7 @@ namespace JiShe.CollectBus.Kafka
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="subscribe"></param>
|
/// <param name="subscribe"></param>
|
||||||
/// <param name="provider"></param>
|
/// <param name="provider"></param>
|
||||||
private static Tuple<int,int> BuildKafkaSubscribe(object subscribe, IServiceProvider provider,ILogger<CollectBusKafkaModule> logger, KafkaOptionConfig kafkaOptionConfig)
|
private static Tuple<int, int> BuildKafkaSubscribe(object subscribe, IServiceProvider provider, ILogger<CollectBusKafkaModule> logger, KafkaOptionConfig kafkaOptionConfig)
|
||||||
{
|
{
|
||||||
var subscribedMethods = subscribe.GetType().GetMethods()
|
var subscribedMethods = subscribe.GetType().GetMethods()
|
||||||
.Select(m => new { Method = m, Attribute = m.GetCustomAttribute<KafkaSubscribeAttribute>() })
|
.Select(m => new { Method = m, Attribute = m.GetCustomAttribute<KafkaSubscribeAttribute>() })
|
||||||
@ -147,7 +151,7 @@ namespace JiShe.CollectBus.Kafka
|
|||||||
#if DEBUG
|
#if DEBUG
|
||||||
var adminClientService = provider.GetRequiredService<IAdminClientService>();
|
var adminClientService = provider.GetRequiredService<IAdminClientService>();
|
||||||
int topicCount = adminClientService.GetTopicPartitionsNum(sub.Attribute!.Topic);
|
int topicCount = adminClientService.GetTopicPartitionsNum(sub.Attribute!.Topic);
|
||||||
partitionCount= partitionCount> topicCount ? topicCount: partitionCount;
|
partitionCount = partitionCount > topicCount ? topicCount : partitionCount;
|
||||||
#endif
|
#endif
|
||||||
//int partitionCount = sub.Attribute!.TaskCount==-1?adminClientService.GetTopicPartitionsNum(sub.Attribute!.Topic) : sub.Attribute!.TaskCount;
|
//int partitionCount = sub.Attribute!.TaskCount==-1?adminClientService.GetTopicPartitionsNum(sub.Attribute!.Topic) : sub.Attribute!.TaskCount;
|
||||||
if (partitionCount <= 0)
|
if (partitionCount <= 0)
|
||||||
@ -170,18 +174,18 @@ namespace JiShe.CollectBus.Kafka
|
|||||||
/// <param name="method"></param>
|
/// <param name="method"></param>
|
||||||
/// <param name="consumerInstance"></param>
|
/// <param name="consumerInstance"></param>
|
||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
private static async Task StartConsumerAsync(IServiceProvider provider, KafkaSubscribeAttribute attr,MethodInfo method, object subscribe, ILogger<CollectBusKafkaModule> logger)
|
private static async Task StartConsumerAsync(IServiceProvider provider, KafkaSubscribeAttribute attr, MethodInfo method, object subscribe, ILogger<CollectBusKafkaModule> logger)
|
||||||
{
|
{
|
||||||
var consumerService = provider.GetRequiredService<IConsumerService>();
|
var consumerService = provider.GetRequiredService<IConsumerService>();
|
||||||
|
|
||||||
if (attr.EnableBatch)
|
if (attr.EnableBatch)
|
||||||
{
|
{
|
||||||
await consumerService.SubscribeBatchAsync<object>(attr.Topic, async (message) =>
|
await consumerService.SubscribeBatchAsync<dynamic>(attr.Topic, async (message) =>
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
#if DEBUG
|
#if DEBUG
|
||||||
logger.LogInformation($"kafka批量消费消息:{message}");
|
logger.LogInformation($"kafka批量消费消息:{message.Serialize()}");
|
||||||
#endif
|
#endif
|
||||||
// 处理消息
|
// 处理消息
|
||||||
return await ProcessMessageAsync(message.ToList(), method, subscribe);
|
return await ProcessMessageAsync(message.ToList(), method, subscribe);
|
||||||
@ -196,7 +200,7 @@ namespace JiShe.CollectBus.Kafka
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
await consumerService.SubscribeAsync<object>(attr.Topic, async (message) =>
|
await consumerService.SubscribeAsync<dynamic>(attr.Topic, async (message) =>
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
@ -225,26 +229,112 @@ namespace JiShe.CollectBus.Kafka
|
|||||||
/// <param name="method"></param>
|
/// <param name="method"></param>
|
||||||
/// <param name="subscribe"></param>
|
/// <param name="subscribe"></param>
|
||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
private static async Task<bool> ProcessMessageAsync(List<object> messages, MethodInfo method, object subscribe)
|
private static async Task<bool> ProcessMessageAsync(List<dynamic> messages, MethodInfo method, object subscribe)
|
||||||
{
|
{
|
||||||
var parameters = method.GetParameters();
|
var parameters = method.GetParameters();
|
||||||
bool isGenericTask = method.ReturnType.IsGenericType
|
bool isGenericTask = method.ReturnType.IsGenericType
|
||||||
&& method.ReturnType.GetGenericTypeDefinition() == typeof(Task<>);
|
&& method.ReturnType.GetGenericTypeDefinition() == typeof(Task<>);
|
||||||
bool existParameters = parameters.Length > 0;
|
bool existParameters = parameters.Length > 0;
|
||||||
List<object>? messageObj = null;
|
object[]? executeParameters = null;
|
||||||
|
|
||||||
if (existParameters)
|
if (existParameters)
|
||||||
{
|
{
|
||||||
messageObj = new List<object>();
|
IList? list = null;
|
||||||
var paramType = parameters[0].ParameterType;
|
Tuple<Type, Type?> tuple = method.GetParameterTypeInfo();
|
||||||
foreach (var msg in messages)
|
bool isEnumerable = false;
|
||||||
|
if (tuple.Item2 != null)
|
||||||
{
|
{
|
||||||
var data = paramType != typeof(string) ? msg?.ToString()?.Deserialize(paramType) : msg;
|
Type listType = typeof(List<>).MakeGenericType(tuple.Item2);
|
||||||
if (data != null)
|
list = (IList)Activator.CreateInstance(listType)!;
|
||||||
messageObj.Add(data);
|
isEnumerable = tuple.Item2.IsConvertType();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
isEnumerable = tuple.Item1.IsConvertType();
|
||||||
|
}
|
||||||
|
#region 暂时
|
||||||
|
//foreach (var msg in messages)
|
||||||
|
//{
|
||||||
|
// if (tuple.Item2 != null)
|
||||||
|
// {
|
||||||
|
// if (isEnumerable)
|
||||||
|
// {
|
||||||
|
// var parameterType = parameters[0].ParameterType;
|
||||||
|
// var data=messages?.Serialize().Deserialize(parameterType);
|
||||||
|
// messageObj = data!=null? new[] { data }:null;
|
||||||
|
// break;
|
||||||
|
// }
|
||||||
|
// else
|
||||||
|
// {
|
||||||
|
// // 集合类型
|
||||||
|
// var data = msg?.Serialize().Deserialize(tuple.Item2) /*isEnumerable ? Convert.ChangeType(msg, tuple.Item2) : msg?.Serialize().Deserialize(tuple.Item2)*/;
|
||||||
|
// if (data != null)
|
||||||
|
// list?.Add(data);
|
||||||
|
// }
|
||||||
|
|
||||||
|
// }
|
||||||
|
// else
|
||||||
|
// {
|
||||||
|
// // (dynamic)Convert.ChangeType(msg, tuple.Item1)
|
||||||
|
// using (var stream = new MemoryStream(msg))
|
||||||
|
// {
|
||||||
|
// var data1= System.Text.Json.JsonSerializer.Deserialize(stream, tuple.Item1);
|
||||||
|
// }
|
||||||
|
// var data = isEnumerable ? System.Text.Json.JsonSerializer.Deserialize(msg, tuple.Item1): msg?.ToString()?.Deserialize(tuple.Item1);
|
||||||
|
// if (data != null)
|
||||||
|
// messageObj = new[] { data };
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
//if (tuple.Item2 != null && list != null && list.Count > 0)
|
||||||
|
//{
|
||||||
|
// messageObj = new[] { list };
|
||||||
|
//}
|
||||||
|
#endregion
|
||||||
|
var parameterDescriptors = method.GetParameters();
|
||||||
|
executeParameters = new object?[parameterDescriptors.Length];
|
||||||
|
for (var i = 0; i < parameterDescriptors.Length; i++)
|
||||||
|
{
|
||||||
|
foreach (var item in messages)
|
||||||
|
{
|
||||||
|
|
||||||
|
object? tempParameter=null;
|
||||||
|
var parameterDescriptor = parameterDescriptors[i];
|
||||||
|
if (KafkaSerialization.IsJsonType(item))
|
||||||
|
{
|
||||||
|
tempParameter = KafkaSerialization.Deserialize(item, tuple.Item2 != null? tuple.Item2: parameterDescriptor.ParameterType);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
|
||||||
|
var converter = TypeDescriptor.GetConverter(parameterDescriptor.ParameterType);
|
||||||
|
if (converter.CanConvertFrom(item.GetType()))
|
||||||
|
{
|
||||||
|
tempParameter = converter.ConvertFrom(item);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (parameterDescriptor.ParameterType.IsInstanceOfType(item))
|
||||||
|
tempParameter = item;
|
||||||
|
else
|
||||||
|
tempParameter =Convert.ChangeType(item, parameterDescriptor.ParameterType);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (tuple.Item2 == null)
|
||||||
|
{
|
||||||
|
executeParameters[i] = tempParameter;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
list.Add(tempParameter);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
if(list!=null && list.Count>0)
|
||||||
|
executeParameters[i] = list;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var result = method.Invoke(subscribe, messageObj?.ToArray());
|
var result = method.Invoke(subscribe, executeParameters);
|
||||||
if (result is Task<ISubscribeAck> genericTask)
|
if (result is Task<ISubscribeAck> genericTask)
|
||||||
{
|
{
|
||||||
await genericTask.ConfigureAwait(false);
|
await genericTask.ConfigureAwait(false);
|
||||||
@ -262,5 +352,9 @@ namespace JiShe.CollectBus.Kafka
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,6 +6,8 @@ using System.Text;
|
|||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using Confluent.Kafka;
|
using Confluent.Kafka;
|
||||||
using JiShe.CollectBus.Kafka.Consumer;
|
using JiShe.CollectBus.Kafka.Consumer;
|
||||||
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
|
using JiShe.CollectBus.Kafka.Serialization;
|
||||||
using Microsoft.Extensions.Configuration;
|
using Microsoft.Extensions.Configuration;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using Microsoft.Extensions.Options;
|
using Microsoft.Extensions.Options;
|
||||||
@ -56,7 +58,7 @@ namespace JiShe.CollectBus.Kafka.Producer
|
|||||||
var config = new ProducerConfig
|
var config = new ProducerConfig
|
||||||
{
|
{
|
||||||
BootstrapServers = _kafkaOptionConfig.BootstrapServers,
|
BootstrapServers = _kafkaOptionConfig.BootstrapServers,
|
||||||
AllowAutoCreateTopics = true,
|
//AllowAutoCreateTopics = true,
|
||||||
QueueBufferingMaxKbytes = 2_097_151, // 修改缓冲区最大为2GB,默认为1GB
|
QueueBufferingMaxKbytes = 2_097_151, // 修改缓冲区最大为2GB,默认为1GB
|
||||||
CompressionType = CompressionType.Lz4, // 配置使用压缩算法LZ4,其他:gzip/snappy/zstd
|
CompressionType = CompressionType.Lz4, // 配置使用压缩算法LZ4,其他:gzip/snappy/zstd
|
||||||
BatchSize = 32_768, // 修改批次大小为32K
|
BatchSize = 32_768, // 修改批次大小为32K
|
||||||
@ -108,7 +110,7 @@ namespace JiShe.CollectBus.Kafka.Producer
|
|||||||
var message = new Message<TKey, TValue>
|
var message = new Message<TKey, TValue>
|
||||||
{
|
{
|
||||||
Key = key,
|
Key = key,
|
||||||
Value = value,
|
Value = value,
|
||||||
Headers = new Headers{
|
Headers = new Headers{
|
||||||
{ "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) }
|
{ "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) }
|
||||||
}
|
}
|
||||||
|
|||||||
@ -8,7 +8,7 @@ using Confluent.Kafka;
|
|||||||
using System.Text.Json.Serialization;
|
using System.Text.Json.Serialization;
|
||||||
using System.Text.Encodings.Web;
|
using System.Text.Encodings.Web;
|
||||||
|
|
||||||
namespace JiShe.CollectBus.Kafka
|
namespace JiShe.CollectBus.Kafka.Serialization
|
||||||
{
|
{
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// JSON 序列化器(支持泛型)
|
/// JSON 序列化器(支持泛型)
|
||||||
@ -49,10 +49,11 @@ namespace JiShe.CollectBus.Kafka
|
|||||||
{
|
{
|
||||||
if (isNull)
|
if (isNull)
|
||||||
return default;
|
return default;
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
return JsonSerializer.Deserialize<T>(data, _options);
|
if (data.IsEmpty)
|
||||||
|
return default;
|
||||||
|
return JsonSerializer.Deserialize<T>(data, _options)!;
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
@ -85,4 +86,40 @@ namespace JiShe.CollectBus.Kafka
|
|||||||
writer.WriteStringValue(value.ToString(_dateFormatString));
|
writer.WriteStringValue(value.ToString(_dateFormatString));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static class KafkaSerialization
|
||||||
|
{
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 判断是否是json类型
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="jsonObject"></param>
|
||||||
|
/// <returns></returns>
|
||||||
|
public static bool IsJsonType(this object jsonObject)
|
||||||
|
{
|
||||||
|
return jsonObject is JsonElement;
|
||||||
|
}
|
||||||
|
public static object? Deserialize(object value, Type valueType)
|
||||||
|
{
|
||||||
|
var _jsonSerializerOptions = new JsonSerializerOptions
|
||||||
|
{
|
||||||
|
DefaultIgnoreCondition = JsonIgnoreCondition.Never,
|
||||||
|
WriteIndented = false,// 设置格式化输出
|
||||||
|
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,// 允许特殊字符
|
||||||
|
IgnoreReadOnlyFields = true,
|
||||||
|
IgnoreReadOnlyProperties = true,
|
||||||
|
NumberHandling = JsonNumberHandling.AllowReadingFromString, // 允许数字字符串
|
||||||
|
AllowTrailingCommas = true, // 忽略尾随逗号
|
||||||
|
ReadCommentHandling = JsonCommentHandling.Skip, // 忽略注释
|
||||||
|
PropertyNameCaseInsensitive = true, // 属性名称大小写不敏感
|
||||||
|
PropertyNamingPolicy = JsonNamingPolicy.CamelCase, // 属性名称使用驼峰命名规则
|
||||||
|
Converters = { new DateTimeJsonConverter() } // 注册你的自定义转换器,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (value is JsonElement jsonElement) return jsonElement.Deserialize(valueType, _jsonSerializerOptions);
|
||||||
|
|
||||||
|
throw new NotSupportedException("Type is not of type JsonElement");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@ -1,17 +1,18 @@
|
|||||||
using System.Threading.Tasks;
|
using System.Collections.Generic;
|
||||||
|
using System.Threading.Tasks;
|
||||||
using JiShe.CollectBus.Common.Models;
|
using JiShe.CollectBus.Common.Models;
|
||||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||||
using JiShe.CollectBus.Kafka;
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
using Volo.Abp.Application.Services;
|
using Volo.Abp.Application.Services;
|
||||||
|
|
||||||
namespace JiShe.CollectBus.Subscribers
|
namespace JiShe.CollectBus.Subscribers
|
||||||
{
|
{
|
||||||
public interface ISubscriberAppService : IApplicationService
|
public interface ISubscriberAppService : IApplicationService
|
||||||
{
|
{
|
||||||
Task<ISubscribeAck> LoginIssuedEvent(IssuedEventMessage issuedEventMessage);
|
Task<ISubscribeAck> LoginIssuedEvent(List<IssuedEventMessage> issuedEventMessage);
|
||||||
Task<ISubscribeAck> HeartbeatIssuedEvent(IssuedEventMessage issuedEventMessage);
|
Task<ISubscribeAck> HeartbeatIssuedEvent(List<IssuedEventMessage> issuedEventMessage);
|
||||||
Task<ISubscribeAck> ReceivedEvent(MessageReceived receivedMessage);
|
Task<ISubscribeAck> ReceivedEvent(MessageReceived receivedMessage);
|
||||||
Task<ISubscribeAck> ReceivedHeartbeatEvent(MessageReceivedHeartbeat receivedHeartbeatMessage);
|
Task<ISubscribeAck> ReceivedHeartbeatEvent(List<MessageReceivedHeartbeat> receivedHeartbeatMessage);
|
||||||
Task<ISubscribeAck> ReceivedLoginEvent(MessageReceivedLogin receivedLoginMessage);
|
Task<ISubscribeAck> ReceivedLoginEvent(List<MessageReceivedLogin> receivedLoginMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
||||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||||
using JiShe.CollectBus.Kafka;
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using Volo.Abp.Application.Services;
|
using Volo.Abp.Application.Services;
|
||||||
|
|||||||
@ -25,6 +25,7 @@ using Volo.Abp.BackgroundWorkers.Hangfire;
|
|||||||
using Volo.Abp.EventBus;
|
using Volo.Abp.EventBus;
|
||||||
using Volo.Abp.Modularity;
|
using Volo.Abp.Modularity;
|
||||||
using Microsoft.Extensions.Options;
|
using Microsoft.Extensions.Options;
|
||||||
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
using JiShe.CollectBus.Interceptors;
|
using JiShe.CollectBus.Interceptors;
|
||||||
using JiShe.CollectBus.Common.Attributes;
|
using JiShe.CollectBus.Common.Attributes;
|
||||||
|
|
||||||
@ -85,7 +86,7 @@ public class CollectBusApplicationModule : AbpModule
|
|||||||
|
|
||||||
//默认初始化表计信息
|
//默认初始化表计信息
|
||||||
var dbContext = context.ServiceProvider.GetRequiredService<EnergySystemScheduledMeterReadingService>();
|
var dbContext = context.ServiceProvider.GetRequiredService<EnergySystemScheduledMeterReadingService>();
|
||||||
//await dbContext.InitAmmeterCacheData();
|
await dbContext.InitAmmeterCacheData();
|
||||||
//await dbContext.InitWatermeterCacheData();
|
//await dbContext.InitWatermeterCacheData();
|
||||||
|
|
||||||
//初始化主题信息
|
//初始化主题信息
|
||||||
|
|||||||
@ -20,13 +20,14 @@ using System.Diagnostics.Metrics;
|
|||||||
using JiShe.CollectBus.Common.DeviceBalanceControl;
|
using JiShe.CollectBus.Common.DeviceBalanceControl;
|
||||||
using JiShe.CollectBus.Kafka.Attributes;
|
using JiShe.CollectBus.Kafka.Attributes;
|
||||||
using System.Text.Json;
|
using System.Text.Json;
|
||||||
using JiShe.CollectBus.Kafka;
|
|
||||||
using JiShe.CollectBus.Application.Contracts;
|
using JiShe.CollectBus.Application.Contracts;
|
||||||
using JiShe.CollectBus.Common.Models;
|
using JiShe.CollectBus.Common.Models;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
using JiShe.CollectBus.IoTDB.Context;
|
using JiShe.CollectBus.IoTDB.Context;
|
||||||
using JiShe.CollectBus.IoTDB.Interface;
|
using JiShe.CollectBus.IoTDB.Interface;
|
||||||
using JiShe.CollectBus.IoTDB.Options;
|
using JiShe.CollectBus.IoTDB.Options;
|
||||||
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
|
using JiShe.CollectBus.Common.Extensions;
|
||||||
|
|
||||||
namespace JiShe.CollectBus.Samples;
|
namespace JiShe.CollectBus.Samples;
|
||||||
|
|
||||||
@ -243,6 +244,19 @@ public class SampleAppService : CollectBusAppService, ISampleAppService, IKafkaS
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 下一个采集时间点验证
|
||||||
|
/// </summary>
|
||||||
|
/// <returns></returns>
|
||||||
|
[HttpGet]
|
||||||
|
public async Task<DateTime> TestCalculateNextCollectionTime(string time, int timeDensity)
|
||||||
|
{
|
||||||
|
DateTime nextTaskTime = Convert.ToDateTime(time);
|
||||||
|
|
||||||
|
return await Task.FromResult(nextTaskTime.CalculateNextCollectionTime(timeDensity));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
public Task<SampleDto> GetAsync()
|
public Task<SampleDto> GetAsync()
|
||||||
{
|
{
|
||||||
return Task.FromResult(
|
return Task.FromResult(
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
using JiShe.CollectBus.Ammeters;
|
using DnsClient.Protocol;
|
||||||
|
using JiShe.CollectBus.Ammeters;
|
||||||
using JiShe.CollectBus.Application.Contracts;
|
using JiShe.CollectBus.Application.Contracts;
|
||||||
using JiShe.CollectBus.Common.BuildSendDatas;
|
using JiShe.CollectBus.Common.BuildSendDatas;
|
||||||
using JiShe.CollectBus.Common.Consts;
|
using JiShe.CollectBus.Common.Consts;
|
||||||
@ -8,10 +9,11 @@ using JiShe.CollectBus.Common.Extensions;
|
|||||||
using JiShe.CollectBus.Common.Helpers;
|
using JiShe.CollectBus.Common.Helpers;
|
||||||
using JiShe.CollectBus.Common.Models;
|
using JiShe.CollectBus.Common.Models;
|
||||||
using JiShe.CollectBus.GatherItem;
|
using JiShe.CollectBus.GatherItem;
|
||||||
|
using JiShe.CollectBus.IoTDB.Interface;
|
||||||
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
||||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||||
using JiShe.CollectBus.IotSystems.Watermeter;
|
using JiShe.CollectBus.IotSystems.Watermeter;
|
||||||
using JiShe.CollectBus.Kafka;
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
using JiShe.CollectBus.Kafka.Producer;
|
using JiShe.CollectBus.Kafka.Producer;
|
||||||
using JiShe.CollectBus.Protocol.Contracts;
|
using JiShe.CollectBus.Protocol.Contracts;
|
||||||
using JiShe.CollectBus.RedisDataCache;
|
using JiShe.CollectBus.RedisDataCache;
|
||||||
@ -25,7 +27,6 @@ using System.Diagnostics;
|
|||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using JiShe.CollectBus.IoTDB.Interface;
|
|
||||||
using static FreeSql.Internal.GlobalFilter;
|
using static FreeSql.Internal.GlobalFilter;
|
||||||
|
|
||||||
namespace JiShe.CollectBus.ScheduledMeterReading
|
namespace JiShe.CollectBus.ScheduledMeterReading
|
||||||
@ -101,6 +102,8 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var currentTime = DateTime.Now;
|
||||||
|
|
||||||
foreach (var item in taskInfos)
|
foreach (var item in taskInfos)
|
||||||
{
|
{
|
||||||
var tasksToBeIssueModel = await FreeRedisProvider.Instance.GetAsync<TasksToBeIssueModel>(item);
|
var tasksToBeIssueModel = await FreeRedisProvider.Instance.GetAsync<TasksToBeIssueModel>(item);
|
||||||
@ -130,70 +133,31 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
|
|
||||||
if (meteryType == MeterTypeEnum.Ammeter.ToString())
|
if (meteryType == MeterTypeEnum.Ammeter.ToString())
|
||||||
{
|
{
|
||||||
var timer = Stopwatch.StartNew();
|
//_ = AmmerterCreatePublishTask(timeDensity, $"{tasksToBeIssueModel.NextTaskTime:yyyyMMddHHmm00}");
|
||||||
|
|
||||||
//获取对应频率中的所有电表信息
|
_ = CreateMeterPublishTask<AmmeterInfo>(
|
||||||
var redisCacheMeterInfoHashKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
|
timeDensity: timeDensity,
|
||||||
var redisCacheMeterInfoSetIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoSetIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
|
taskBatch: $"{tasksToBeIssueModel.NextTaskTime:yyyyMMddHHmm00}",
|
||||||
var redisCacheMeterInfoZSetScoresIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoZSetScoresIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
|
meterType: MeterTypeEnum.Ammeter,
|
||||||
|
taskCreateAction: (timeDensity, data, groupIndex, taskBatch) =>
|
||||||
List<AmmeterInfo> meterInfos = new List<AmmeterInfo>();
|
|
||||||
decimal? cursor = null;
|
|
||||||
string member = null;
|
|
||||||
bool hasNext;
|
|
||||||
do
|
|
||||||
{
|
|
||||||
var page = await _redisDataCacheService.GetAllPagedData<AmmeterInfo>(
|
|
||||||
redisCacheMeterInfoHashKeyTemp,
|
|
||||||
redisCacheMeterInfoZSetScoresIndexKeyTemp,
|
|
||||||
pageSize: 1000,
|
|
||||||
lastScore: cursor,
|
|
||||||
lastMember: member);
|
|
||||||
|
|
||||||
meterInfos.AddRange(page.Items);
|
|
||||||
cursor = page.HasNext ? page.NextScore : null;
|
|
||||||
member = page.HasNext ? page.NextMember : null;
|
|
||||||
hasNext = page.HasNext;
|
|
||||||
} while (hasNext);
|
|
||||||
|
|
||||||
if (meterInfos == null || meterInfos.Count <= 0)
|
|
||||||
{
|
|
||||||
timer.Stop();
|
|
||||||
_logger.LogError($"{nameof(CreateToBeIssueTasks)} {timeDensity}分钟采集待下发任务创建失败,没有获取到缓存信息,-105");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
//await AmmerterScheduledMeterReadingIssued(timeDensity, meterInfos);
|
|
||||||
|
|
||||||
|
|
||||||
//处理数据
|
|
||||||
//await DeviceGroupBalanceControl.ProcessGenericListAsync(
|
|
||||||
// items: meterInfos,
|
|
||||||
// deviceIdSelector: data => data.FocusAddress,
|
|
||||||
// processor: (data, groupIndex) =>
|
|
||||||
// {
|
|
||||||
// _ = AmmerterCreatePublishTask(timeDensity, data, groupIndex, tasksToBeIssueModel.NextTaskTime.ToString("yyyyMMddHHmmss"));
|
|
||||||
// }
|
|
||||||
//);
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
await DeviceGroupBalanceControl.ProcessWithThrottleAsync(
|
|
||||||
items: meterInfos,
|
|
||||||
deviceIdSelector: data => data.FocusAddress,
|
|
||||||
processor: (data, groupIndex) =>
|
|
||||||
{
|
{
|
||||||
AmmerterCreatePublishTask(timeDensity, data, groupIndex, tasksToBeIssueModel.NextTaskTime.ToString("yyyyMMddHHmmss"));
|
AmmerterCreatePublishTaskAction(timeDensity, data, groupIndex, taskBatch);
|
||||||
}
|
});
|
||||||
);
|
|
||||||
|
|
||||||
timer.Stop();
|
|
||||||
_logger.LogInformation($"{nameof(CreateToBeIssueTasks)} {timeDensity}分钟采集待下发任务创建完成,{timer.ElapsedMilliseconds},总共{meterInfos.Count}表计信息");
|
|
||||||
|
|
||||||
}
|
}
|
||||||
else if (meteryType == MeterTypeEnum.WaterMeter.ToString())
|
else if (meteryType == MeterTypeEnum.WaterMeter.ToString())
|
||||||
{
|
{
|
||||||
//todo 水表任务创建待处理
|
//todo 水表任务创建待处理
|
||||||
//await WatermeterScheduledMeterReadingIssued(timeDensity, meterInfos);
|
//await WatermeterScheduledMeterReadingIssued(timeDensity, meterInfos);
|
||||||
|
|
||||||
|
_ = CreateMeterPublishTask<WatermeterInfo>(
|
||||||
|
timeDensity: timeDensity,
|
||||||
|
taskBatch: $"{tasksToBeIssueModel.NextTaskTime:yyyyMMddHHmm00}",
|
||||||
|
meterType: MeterTypeEnum.Ammeter,
|
||||||
|
taskCreateAction: (timeDensity, data, groupIndex, taskBatch) =>
|
||||||
|
{
|
||||||
|
//AmmerterCreatePublishTaskAction(timeDensity, data, groupIndex, taskBatch);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -205,7 +169,7 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
|
|
||||||
|
|
||||||
//根据当前的采集频率和类型,重新更新下一个任务点,把任务的创建源固定在当前逻辑,避免任务处理的逻辑异常导致任务创建失败。
|
//根据当前的采集频率和类型,重新更新下一个任务点,把任务的创建源固定在当前逻辑,避免任务处理的逻辑异常导致任务创建失败。
|
||||||
tasksToBeIssueModel.NextTaskTime = tasksToBeIssueModel.NextTaskTime.AddMinutes(timeDensity);
|
tasksToBeIssueModel.NextTaskTime = tasksToBeIssueModel.NextTaskTime.CalculateNextCollectionTime(timeDensity);
|
||||||
await FreeRedisProvider.Instance.SetAsync(item, tasksToBeIssueModel);
|
await FreeRedisProvider.Instance.SetAsync(item, tasksToBeIssueModel);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -230,23 +194,7 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
public virtual async Task InitAmmeterCacheData(string gatherCode = "")
|
public virtual async Task InitAmmeterCacheData(string gatherCode = "")
|
||||||
{
|
{
|
||||||
#if DEBUG
|
#if DEBUG
|
||||||
//var timeDensity = "15";
|
return;
|
||||||
//string tempCacheMeterInfoKey = $"CollectBus:{"{0}:{1}"}:MeterInfo:{"{2}"}:{"{3}"}";
|
|
||||||
////获取缓存中的电表信息
|
|
||||||
//var redisKeyList = $"{string.Format(tempCacheMeterInfoKey, SystemType, "JiSheCollectBus", MeterTypeEnum.Ammeter, timeDensity)}*";
|
|
||||||
|
|
||||||
//var oneMinutekeyList = await FreeRedisProvider.Instance.KeysAsync(redisKeyList);
|
|
||||||
//var tempMeterInfos = await GetMeterRedisCacheListData<AmmeterInfoTemp>(oneMinutekeyList, SystemType, ServerTagName, timeDensity, MeterTypeEnum.Ammeter);
|
|
||||||
////List<string> focusAddressDataLista = new List<string>();
|
|
||||||
//List<AmmeterInfo> meterInfos = new List<AmmeterInfo>();
|
|
||||||
//foreach (var item in tempMeterInfos)
|
|
||||||
//{
|
|
||||||
// var tempData = item.Adapt<AmmeterInfo>();
|
|
||||||
// tempData.FocusId = item.FocusID;
|
|
||||||
// tempData.MeterId = item.Id;
|
|
||||||
// meterInfos.Add(tempData);
|
|
||||||
// //focusAddressDataLista.Add(item.FocusAddress);
|
|
||||||
//}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -258,23 +206,6 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
List<AmmeterInfo> meterInfos = new List<AmmeterInfo>();
|
List<AmmeterInfo> meterInfos = new List<AmmeterInfo>();
|
||||||
List<string> focusAddressDataLista = new List<string>();
|
List<string> focusAddressDataLista = new List<string>();
|
||||||
var timer1 = Stopwatch.StartNew();
|
var timer1 = Stopwatch.StartNew();
|
||||||
//decimal? cursor = null;
|
|
||||||
//string member = null;
|
|
||||||
//bool hasNext;
|
|
||||||
//do
|
|
||||||
//{
|
|
||||||
// var page = await _redisDataCacheService.GetAllPagedDataOptimized<AmmeterInfo>(
|
|
||||||
// redisCacheMeterInfoHashKeyTemp,
|
|
||||||
// redisCacheMeterInfoZSetScoresIndexKeyTemp,
|
|
||||||
// pageSize: 1000,
|
|
||||||
// lastScore: cursor,
|
|
||||||
// lastMember: member);
|
|
||||||
|
|
||||||
// meterInfos.AddRange(page.Items);
|
|
||||||
// cursor = page.HasNext ? page.NextScore : null;
|
|
||||||
// member = page.HasNext ? page.NextMember : null;
|
|
||||||
// hasNext = page.HasNext;
|
|
||||||
//} while (hasNext);
|
|
||||||
|
|
||||||
var allIds = new HashSet<string>();
|
var allIds = new HashSet<string>();
|
||||||
decimal? score = null;
|
decimal? score = null;
|
||||||
@ -306,8 +237,8 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
|
|
||||||
timer1.Stop();
|
timer1.Stop();
|
||||||
_logger.LogError($"读取数据更花费时间{timer1.ElapsedMilliseconds}毫秒");
|
_logger.LogError($"读取数据更花费时间{timer1.ElapsedMilliseconds}毫秒");
|
||||||
DeviceGroupBalanceControl.InitializeCache(focusAddressDataLista, _kafkaOptions.NumPartitions);
|
//DeviceGroupBalanceControl.InitializeCache(focusAddressDataLista, _kafkaOptions.NumPartitions);
|
||||||
return;
|
//return;
|
||||||
#else
|
#else
|
||||||
var meterInfos = await GetAmmeterInfoList(gatherCode);
|
var meterInfos = await GetAmmeterInfoList(gatherCode);
|
||||||
#endif
|
#endif
|
||||||
@ -329,6 +260,22 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
|
|
||||||
//根据采集频率分组,获得采集频率分组
|
//根据采集频率分组,获得采集频率分组
|
||||||
var meterInfoGroupByTimeDensity = meterInfos.GroupBy(d => d.TimeDensity);
|
var meterInfoGroupByTimeDensity = meterInfos.GroupBy(d => d.TimeDensity);
|
||||||
|
|
||||||
|
//先处理采集频率任务缓存
|
||||||
|
foreach (var item in meterInfoGroupByTimeDensity)
|
||||||
|
{
|
||||||
|
TasksToBeIssueModel nextTask = new TasksToBeIssueModel()
|
||||||
|
{
|
||||||
|
TimeDensity = item.Key,
|
||||||
|
NextTaskTime = _kafkaOptions.FirstCollectionTime.CalculateNextCollectionTime(item.Key),//使用首次采集时间作为下一次采集时间
|
||||||
|
};
|
||||||
|
|
||||||
|
//todo 首次采集时间节点到目前运行时间中漏采的时间点,可以考虑使用IoTDB的存储,利用时间序列处理。
|
||||||
|
|
||||||
|
var taskRedisCacheKey = string.Format(RedisConst.CacheTasksToBeIssuedKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, item.Key);
|
||||||
|
await FreeRedisProvider.Instance.SetAsync(taskRedisCacheKey, nextTask);
|
||||||
|
}
|
||||||
|
|
||||||
foreach (var itemTimeDensity in meterInfoGroupByTimeDensity)
|
foreach (var itemTimeDensity in meterInfoGroupByTimeDensity)
|
||||||
{
|
{
|
||||||
var redisCacheMeterInfoHashKey = $"{string.Format(RedisConst.CacheMeterInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, itemTimeDensity.Key)}";
|
var redisCacheMeterInfoHashKey = $"{string.Format(RedisConst.CacheMeterInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, itemTimeDensity.Key)}";
|
||||||
@ -403,25 +350,13 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
}
|
}
|
||||||
|
|
||||||
ammeterInfos.Add(ammeter);
|
ammeterInfos.Add(ammeter);
|
||||||
//keyValuePairs.TryAdd($"{ammeter.MeterId}", ammeter);
|
|
||||||
}
|
}
|
||||||
//await FreeRedisProvider.Instance.HSetAsync(redisCacheKey, keyValuePairs);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
await _redisDataCacheService.BatchInsertDataAsync<AmmeterInfo>(
|
await _redisDataCacheService.BatchInsertDataAsync<AmmeterInfo>(
|
||||||
redisCacheMeterInfoHashKey,
|
redisCacheMeterInfoHashKey,
|
||||||
redisCacheMeterInfoSetIndexKey,
|
redisCacheMeterInfoSetIndexKey,
|
||||||
redisCacheMeterInfoZSetScoresIndexKey, ammeterInfos);
|
redisCacheMeterInfoZSetScoresIndexKey, ammeterInfos);
|
||||||
|
|
||||||
//在缓存表信息数据的时候,新增下一个时间的自动处理任务,1分钟后执行所有的采集频率任务
|
|
||||||
TasksToBeIssueModel nextTask = new TasksToBeIssueModel()
|
|
||||||
{
|
|
||||||
TimeDensity = itemTimeDensity.Key,
|
|
||||||
NextTaskTime = DateTime.Now.AddMinutes(1)
|
|
||||||
};
|
|
||||||
|
|
||||||
var taskRedisCacheKey = string.Format(RedisConst.CacheTasksToBeIssuedKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, itemTimeDensity.Key);
|
|
||||||
await FreeRedisProvider.Instance.SetAsync(taskRedisCacheKey, nextTask);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//初始化设备组负载控制
|
//初始化设备组负载控制
|
||||||
@ -446,63 +381,29 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
public virtual async Task AmmeterScheduledMeterOneMinuteReading()
|
public virtual async Task AmmeterScheduledMeterOneMinuteReading()
|
||||||
{
|
{
|
||||||
|
|
||||||
//获取缓存中的电表信息
|
//获取缓存中的电表信息
|
||||||
int timeDensity = 1;
|
int timeDensity = 5;
|
||||||
var currentTime = DateTime.Now;
|
var currentTime = DateTime.Now;
|
||||||
|
|
||||||
var redisKeyList = GetTelemetryPacketCacheKeyPrefix(timeDensity, MeterTypeEnum.Ammeter);
|
// 自动计算最佳并发度
|
||||||
var oneMinutekeyList = await FreeRedisProvider.Instance.KeysAsync(redisKeyList);
|
int recommendedThreads = DeviceGroupBalanceControl.CalculateOptimalThreadCount();
|
||||||
if (oneMinutekeyList == null || oneMinutekeyList.Length <= 0)
|
|
||||||
|
var options = new ParallelOptions
|
||||||
{
|
{
|
||||||
_logger.LogError($"{nameof(AmmeterScheduledMeterOneMinuteReading)} {timeDensity}分钟采集电表数据处理时没有获取到缓存信息,-101");
|
MaxDegreeOfParallelism = recommendedThreads,
|
||||||
return;
|
};
|
||||||
}
|
var taskBatch = $"{currentTime:yyyyMMddHHmm00}";
|
||||||
|
|
||||||
//获取下发任务缓存数据
|
Parallel.For(0, _kafkaOptions.NumPartitions, options, async groupIndex =>
|
||||||
Dictionary<string, Dictionary<string, MeterReadingRecords>> meterTaskInfos = await GetMeterRedisCacheDictionaryData<MeterReadingRecords>(oneMinutekeyList, SystemType, ServerTagName, timeDensity.ToString(), MeterTypeEnum.Ammeter);
|
|
||||||
if (meterTaskInfos == null || meterTaskInfos.Count <= 0)
|
|
||||||
{
|
{
|
||||||
_logger.LogError($"{nameof(AmmeterScheduledMeterOneMinuteReading)} {timeDensity}分钟采集电表数据处理时没有获取到缓存信息,-102");
|
var redisCacheTelemetryPacketInfoHashKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
|
||||||
return;
|
var redisCacheTelemetryPacketInfoZSetScoresIndexKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoZSetScoresIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
|
||||||
}
|
|
||||||
|
|
||||||
List<MeterReadingRecords> meterTaskInfosList = new List<MeterReadingRecords>();
|
_ = CreateMeterKafkaTaskMessage(redisCacheTelemetryPacketInfoHashKey, redisCacheTelemetryPacketInfoZSetScoresIndexKey);
|
||||||
|
});
|
||||||
|
|
||||||
//将取出的缓存任务数据发送到Kafka消息队列中
|
await Task.CompletedTask;
|
||||||
foreach (var focusItem in meterTaskInfos)
|
|
||||||
{
|
|
||||||
foreach (var ammerterItem in focusItem.Value)
|
|
||||||
{
|
|
||||||
var tempMsg = new ScheduledMeterReadingIssuedEventMessage()
|
|
||||||
{
|
|
||||||
MessageHexString = ammerterItem.Value.IssuedMessageHexString,
|
|
||||||
MessageId = ammerterItem.Value.IssuedMessageId,
|
|
||||||
FocusAddress = ammerterItem.Value.FocusAddress,
|
|
||||||
TimeDensity = timeDensity.ToString(),
|
|
||||||
};
|
|
||||||
//_ = _producerBus.PublishDelayAsync(TimeSpan.FromMicroseconds(500), ProtocolConst.AmmeterSubscriberWorkerOneMinuteIssuedEventName, tempMsg);
|
|
||||||
|
|
||||||
_ = _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerOneMinuteIssuedEventName, tempMsg);
|
|
||||||
//_= _producerBus.Publish(tempMsg);
|
|
||||||
|
|
||||||
|
|
||||||
meterTaskInfosList.Add(ammerterItem.Value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (meterTaskInfosList != null && meterTaskInfosList.Count > 0)
|
|
||||||
{
|
|
||||||
//_dbProvider.SwitchSessionPool(true);
|
|
||||||
//await _dbProvider.InsertAsync(meterTaskInfosList);
|
|
||||||
|
|
||||||
await _meterReadingRecordRepository.InsertManyAsync(meterTaskInfosList, currentTime);
|
|
||||||
}
|
|
||||||
|
|
||||||
////删除任务数据
|
|
||||||
//await FreeRedisProvider.Instance.DelAsync(oneMinutekeyList);
|
|
||||||
//await CacheNextTaskData(timeDensity, MeterTypeEnum.Ammeter);
|
|
||||||
|
|
||||||
|
|
||||||
_logger.LogInformation($"{nameof(AmmeterScheduledMeterOneMinuteReading)} {timeDensity}分钟采集电表数据处理完成");
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -516,57 +417,22 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
int timeDensity = 5;
|
int timeDensity = 5;
|
||||||
var currentTime = DateTime.Now;
|
var currentTime = DateTime.Now;
|
||||||
|
|
||||||
var redisKeyList = GetTelemetryPacketCacheKeyPrefix(timeDensity, MeterTypeEnum.Ammeter);
|
// 自动计算最佳并发度
|
||||||
var fiveMinutekeyList = await FreeRedisProvider.Instance.KeysAsync(redisKeyList);
|
int recommendedThreads = DeviceGroupBalanceControl.CalculateOptimalThreadCount();
|
||||||
if (fiveMinutekeyList == null || fiveMinutekeyList.Length <= 0)
|
|
||||||
|
var options = new ParallelOptions
|
||||||
{
|
{
|
||||||
_logger.LogError($"{nameof(AmmeterScheduledMeterOneMinuteReading)} {timeDensity}分钟采集电表数据处理时没有获取到缓存信息,-101");
|
MaxDegreeOfParallelism = recommendedThreads,
|
||||||
return;
|
};
|
||||||
}
|
var taskBatch = $"{currentTime:yyyyMMddHHmm00}";
|
||||||
|
|
||||||
//获取下发任务缓存数据
|
Parallel.For(0, _kafkaOptions.NumPartitions, options, async groupIndex =>
|
||||||
Dictionary<string, Dictionary<string, MeterReadingRecords>> meterTaskInfos = await GetMeterRedisCacheDictionaryData<MeterReadingRecords>(fiveMinutekeyList, SystemType, ServerTagName, timeDensity.ToString(), MeterTypeEnum.Ammeter);
|
|
||||||
if (meterTaskInfos == null || meterTaskInfos.Count <= 0)
|
|
||||||
{
|
{
|
||||||
_logger.LogError($"{nameof(AmmeterScheduledMeterOneMinuteReading)} {timeDensity}分钟采集电表数据处理时没有获取到缓存信息,-102");
|
var redisCacheTelemetryPacketInfoHashKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
|
||||||
return;
|
var redisCacheTelemetryPacketInfoZSetScoresIndexKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoZSetScoresIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
|
||||||
}
|
|
||||||
|
|
||||||
List<MeterReadingRecords> meterTaskInfosList = new List<MeterReadingRecords>();
|
_ = CreateMeterKafkaTaskMessage(redisCacheTelemetryPacketInfoHashKey, redisCacheTelemetryPacketInfoZSetScoresIndexKey);
|
||||||
|
});
|
||||||
//将取出的缓存任务数据发送到Kafka消息队列中
|
|
||||||
foreach (var focusItem in meterTaskInfos)
|
|
||||||
{
|
|
||||||
foreach (var ammerterItem in focusItem.Value)
|
|
||||||
{
|
|
||||||
var tempMsg = new ScheduledMeterReadingIssuedEventMessage()
|
|
||||||
{
|
|
||||||
MessageHexString = ammerterItem.Value.IssuedMessageHexString,
|
|
||||||
MessageId = ammerterItem.Value.IssuedMessageId,
|
|
||||||
FocusAddress = ammerterItem.Value.FocusAddress,
|
|
||||||
TimeDensity = timeDensity.ToString(),
|
|
||||||
};
|
|
||||||
//_ = _producerBus.PublishDelayAsync(TimeSpan.FromMicroseconds(500), ProtocolConst.AmmeterSubscriberWorkerFiveMinuteIssuedEventName, tempMsg);
|
|
||||||
|
|
||||||
_ = _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerFiveMinuteIssuedEventName, tempMsg);
|
|
||||||
|
|
||||||
//_ = _producerBus.Publish(tempMsg);
|
|
||||||
|
|
||||||
meterTaskInfosList.Add(ammerterItem.Value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (meterTaskInfosList != null && meterTaskInfosList.Count > 0)
|
|
||||||
{
|
|
||||||
await _meterReadingRecordRepository.InsertManyAsync(meterTaskInfosList, currentTime);
|
|
||||||
}
|
|
||||||
|
|
||||||
////删除任务数据
|
|
||||||
//await FreeRedisProvider.Instance.DelAsync(fiveMinutekeyList);
|
|
||||||
|
|
||||||
////缓存下一个时间的任务
|
|
||||||
//await CacheNextTaskData(timeDensity, MeterTypeEnum.Ammeter);
|
|
||||||
|
|
||||||
_logger.LogInformation($"{nameof(AmmeterScheduledMeterFiveMinuteReading)} {timeDensity}分钟采集电表数据处理完成");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@ -575,12 +441,9 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
public virtual async Task AmmeterScheduledMeterFifteenMinuteReading()
|
public virtual async Task AmmeterScheduledMeterFifteenMinuteReading()
|
||||||
{
|
{
|
||||||
Stopwatch stopwatch = new Stopwatch();
|
|
||||||
stopwatch.Start();
|
|
||||||
|
|
||||||
//获取缓存中的电表信息
|
//获取缓存中的电表信息
|
||||||
int timeDensity = 15;
|
int timeDensity = 15;
|
||||||
var currentDateTime = DateTime.Now;
|
var currentTime = DateTime.Now;
|
||||||
|
|
||||||
// 自动计算最佳并发度
|
// 自动计算最佳并发度
|
||||||
int recommendedThreads = DeviceGroupBalanceControl.CalculateOptimalThreadCount();
|
int recommendedThreads = DeviceGroupBalanceControl.CalculateOptimalThreadCount();
|
||||||
@ -589,107 +452,84 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
{
|
{
|
||||||
MaxDegreeOfParallelism = recommendedThreads,
|
MaxDegreeOfParallelism = recommendedThreads,
|
||||||
};
|
};
|
||||||
string taskBatch = "20250417155016";
|
var taskBatch = $"{currentTime:yyyyMMddHHmm00}";
|
||||||
|
|
||||||
Parallel.For(0, _kafkaOptions.NumPartitions, options, async groupIndex =>
|
Parallel.For(0, _kafkaOptions.NumPartitions, options, async groupIndex =>
|
||||||
{
|
{
|
||||||
Console.WriteLine($"15分钟采集电表数据:{groupIndex}");
|
|
||||||
var redisCacheTelemetryPacketInfoHashKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
|
var redisCacheTelemetryPacketInfoHashKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
|
||||||
var redisCacheTelemetryPacketInfoSetIndexKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoSetIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
|
|
||||||
var redisCacheTelemetryPacketInfoZSetScoresIndexKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoZSetScoresIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
|
var redisCacheTelemetryPacketInfoZSetScoresIndexKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoZSetScoresIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
|
||||||
|
|
||||||
List<MeterReadingTelemetryPacketInfo> meterInfos = new List<MeterReadingTelemetryPacketInfo>();
|
_ = CreateMeterKafkaTaskMessage(redisCacheTelemetryPacketInfoHashKey, redisCacheTelemetryPacketInfoZSetScoresIndexKey);
|
||||||
decimal? cursor = null;
|
|
||||||
string member = null;
|
|
||||||
bool hasNext;
|
|
||||||
do
|
|
||||||
{
|
|
||||||
var page = await _redisDataCacheService.GetAllPagedData<MeterReadingTelemetryPacketInfo>(
|
|
||||||
redisCacheTelemetryPacketInfoHashKey,
|
|
||||||
redisCacheTelemetryPacketInfoZSetScoresIndexKey,
|
|
||||||
pageSize: 1000,
|
|
||||||
lastScore: cursor,
|
|
||||||
lastMember: member);
|
|
||||||
|
|
||||||
meterInfos.AddRange(page.Items);
|
|
||||||
cursor = page.HasNext ? page.NextScore : null;
|
|
||||||
member = page.HasNext ? page.NextMember : null;
|
|
||||||
hasNext = page.HasNext;
|
|
||||||
|
|
||||||
await DeviceGroupBalanceControl.ProcessWithThrottleAsync(
|
|
||||||
items: meterInfos,
|
|
||||||
deviceIdSelector: data => data.FocusAddress,
|
|
||||||
processor: (data, groupIndex) =>
|
|
||||||
{
|
|
||||||
_= KafkaProducerIssuedMessage(ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName,data, groupIndex);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
} while (hasNext);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
//var redisKeyList = GetTelemetryPacketCacheKeyPrefix(timeDensity, MeterTypeEnum.Ammeter);
|
|
||||||
//var fifteenMinutekeyList = await FreeRedisProvider.Instance.KeysAsync(redisKeyList);
|
|
||||||
//if (fifteenMinutekeyList == null || fifteenMinutekeyList.Length <= 0)
|
|
||||||
//{
|
|
||||||
// _logger.LogError($"{nameof(AmmeterScheduledMeterOneMinuteReading)} {timeDensity}分钟采集电表数据处理时没有获取到缓存信息,-101");
|
|
||||||
// return;
|
|
||||||
//}
|
|
||||||
|
|
||||||
////获取下发任务缓存数据
|
|
||||||
//Dictionary<string, Dictionary<string, MeterReadingRecords>> meterTaskInfos = await GetMeterRedisCacheDictionaryData<MeterReadingRecords>(fifteenMinutekeyList, SystemType, ServerTagName, timeDensity.ToString(), MeterTypeEnum.Ammeter);
|
|
||||||
//if (meterTaskInfos == null || meterTaskInfos.Count <= 0)
|
|
||||||
//{
|
|
||||||
// _logger.LogError($"{nameof(AmmeterScheduledMeterOneMinuteReading)} {timeDensity}分钟采集电表数据处理时没有获取到缓存信息,-102");
|
|
||||||
// return;
|
|
||||||
//}
|
|
||||||
|
|
||||||
//List<MeterReadingRecords> meterTaskInfosList = new List<MeterReadingRecords>();
|
|
||||||
|
|
||||||
////将取出的缓存任务数据发送到Kafka消息队列中
|
|
||||||
//foreach (var focusItem in meterTaskInfos)
|
|
||||||
//{
|
|
||||||
// foreach (var ammerterItem in focusItem.Value)
|
|
||||||
// {
|
|
||||||
// var tempMsg = new ScheduledMeterReadingIssuedEventMessage()
|
|
||||||
// {
|
|
||||||
// MessageHexString = ammerterItem.Value.IssuedMessageHexString,
|
|
||||||
// MessageId = ammerterItem.Value.IssuedMessageId,
|
|
||||||
// FocusAddress = ammerterItem.Value.FocusAddress,
|
|
||||||
// TimeDensity = timeDensity.ToString(),
|
|
||||||
// };
|
|
||||||
// //_ = _producerBus.PublishDelayAsync(TimeSpan.FromMicroseconds(500), ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName, tempMsg);
|
|
||||||
|
|
||||||
// _ = _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName, tempMsg);
|
|
||||||
|
|
||||||
// //_ = _producerBus.Publish(tempMsg);
|
|
||||||
|
|
||||||
// meterTaskInfosList.Add(ammerterItem.Value);
|
|
||||||
// }
|
|
||||||
//}
|
|
||||||
//if (meterTaskInfosList != null && meterTaskInfosList.Count > 0)
|
|
||||||
//{
|
|
||||||
// await _meterReadingRecordRepository.InsertManyAsync(meterTaskInfosList, currentDateTime);
|
|
||||||
//}
|
|
||||||
|
|
||||||
|
|
||||||
//stopwatch.Stop();
|
|
||||||
|
|
||||||
//_logger.LogError($"{nameof(AmmeterScheduledMeterFifteenMinuteReading)} {timeDensity}分钟采集电表数据处理完成,共消耗{stopwatch.ElapsedMilliseconds}毫秒。");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 电表创建发布任务
|
/// 创建电表待发送的任务数据
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="timeDensity">采集频率</param>
|
/// <param name="timeDensity">采集频率</param>
|
||||||
/// <param name="ammeterInfo">集中器号hash分组的集中器集合数据</param>
|
/// <param name="taskBatch">时间格式的任务批次名称</param>
|
||||||
|
/// <returns></returns>
|
||||||
|
private async Task AmmerterCreatePublishTask(int timeDensity, string taskBatch)
|
||||||
|
{
|
||||||
|
var timer = Stopwatch.StartNew();
|
||||||
|
|
||||||
|
//获取对应频率中的所有电表信息
|
||||||
|
var redisCacheMeterInfoHashKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
|
||||||
|
var redisCacheMeterInfoSetIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoSetIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
|
||||||
|
var redisCacheMeterInfoZSetScoresIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoZSetScoresIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
|
||||||
|
|
||||||
|
List<AmmeterInfo> meterInfos = new List<AmmeterInfo>();
|
||||||
|
decimal? cursor = null;
|
||||||
|
string member = null;
|
||||||
|
bool hasNext;
|
||||||
|
do
|
||||||
|
{
|
||||||
|
var page = await _redisDataCacheService.GetAllPagedData<AmmeterInfo>(
|
||||||
|
redisCacheMeterInfoHashKeyTemp,
|
||||||
|
redisCacheMeterInfoZSetScoresIndexKeyTemp,
|
||||||
|
pageSize: 1000,
|
||||||
|
lastScore: cursor,
|
||||||
|
lastMember: member);
|
||||||
|
|
||||||
|
meterInfos.AddRange(page.Items);
|
||||||
|
cursor = page.HasNext ? page.NextScore : null;
|
||||||
|
member = page.HasNext ? page.NextMember : null;
|
||||||
|
hasNext = page.HasNext;
|
||||||
|
} while (hasNext);
|
||||||
|
|
||||||
|
if (meterInfos == null || meterInfos.Count <= 0)
|
||||||
|
{
|
||||||
|
timer.Stop();
|
||||||
|
_logger.LogError($"{nameof(AmmerterCreatePublishTaskAction)} {timeDensity}分钟采集待下发任务创建失败,没有获取到缓存信息,-105");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
await DeviceGroupBalanceControl.ProcessWithThrottleAsync(
|
||||||
|
items: meterInfos,
|
||||||
|
deviceIdSelector: data => data.FocusAddress,
|
||||||
|
processor: (data, groupIndex) =>
|
||||||
|
{
|
||||||
|
AmmerterCreatePublishTaskAction(timeDensity, data, groupIndex, taskBatch);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
timer.Stop();
|
||||||
|
_logger.LogInformation($"{nameof(AmmerterCreatePublishTaskAction)} {timeDensity}分钟采集待下发任务创建完成,{timer.ElapsedMilliseconds},总共{meterInfos.Count}表计信息");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 创建电表待发送的任务数据
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="timeDensity">采集频率</param>
|
||||||
|
/// <param name="ammeterInfo">电表信息</param>
|
||||||
/// <param name="groupIndex">集中器所在分组</param>
|
/// <param name="groupIndex">集中器所在分组</param>
|
||||||
/// <param name="taskBatch">时间格式的任务批次名称</param>
|
/// <param name="taskBatch">时间格式的任务批次名称</param>
|
||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
private void AmmerterCreatePublishTask(int timeDensity
|
private void AmmerterCreatePublishTaskAction(int timeDensity
|
||||||
, AmmeterInfo ammeterInfo, int groupIndex, string taskBatch)
|
, AmmeterInfo ammeterInfo, int groupIndex, string taskBatch)
|
||||||
{
|
{
|
||||||
var handlerPacketBuilder = TelemetryPacketBuilder.AFNHandlersDictionary;
|
var handlerPacketBuilder = TelemetryPacketBuilder.AFNHandlersDictionary;
|
||||||
@ -781,7 +621,6 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//Dictionary<string, MeterReadingRecords> keyValuePairs = new Dictionary<string, MeterReadingRecords>();
|
|
||||||
List<MeterReadingTelemetryPacketInfo> taskList = new List<MeterReadingTelemetryPacketInfo>();
|
List<MeterReadingTelemetryPacketInfo> taskList = new List<MeterReadingTelemetryPacketInfo>();
|
||||||
|
|
||||||
foreach (var tempItem in tempCodes)
|
foreach (var tempItem in tempCodes)
|
||||||
@ -801,11 +640,16 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
var aFNStr = itemCodeArr[0];
|
var aFNStr = itemCodeArr[0];
|
||||||
var aFN = (AFN)aFNStr.HexToDec();
|
var aFN = (AFN)aFNStr.HexToDec();
|
||||||
var fn = int.Parse(itemCodeArr[1]);
|
var fn = int.Parse(itemCodeArr[1]);
|
||||||
byte[] dataInfos = null;
|
TelemetryPacketResponse builderResponse = null;
|
||||||
if (ammeterInfo.AutomaticReport.Equals(1) && aFN == AFN.请求实时数据)
|
if (ammeterInfo.AutomaticReport.Equals(1) && aFN == AFN.请求实时数据)
|
||||||
{
|
{
|
||||||
//实时数据
|
//实时数据
|
||||||
dataInfos = Build3761SendData.BuildAmmeterReadRealTimeDataSendCmd(ammeterInfo.FocusAddress, ammeterInfo.MeteringCode, (ATypeOfDataItems)fn);
|
builderResponse = TelemetryPacketBuilder.AFN0C_Fn_Send(new TelemetryPacketRequest()
|
||||||
|
{
|
||||||
|
FocusAddress = ammeterInfo.FocusAddress,
|
||||||
|
Fn = fn,
|
||||||
|
Pn = ammeterInfo.MeteringCode
|
||||||
|
});
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -814,7 +658,7 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
if (handlerPacketBuilder != null && handlerPacketBuilder.TryGetValue(methonCode
|
if (handlerPacketBuilder != null && handlerPacketBuilder.TryGetValue(methonCode
|
||||||
, out var handler))
|
, out var handler))
|
||||||
{
|
{
|
||||||
dataInfos = handler(new TelemetryPacketRequest()
|
builderResponse = handler(new TelemetryPacketRequest()
|
||||||
{
|
{
|
||||||
FocusAddress = ammeterInfo.FocusAddress,
|
FocusAddress = ammeterInfo.FocusAddress,
|
||||||
Fn = fn,
|
Fn = fn,
|
||||||
@ -829,7 +673,7 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
}
|
}
|
||||||
//TODO:特殊表
|
//TODO:特殊表
|
||||||
|
|
||||||
if (dataInfos == null || dataInfos.Length <= 0)
|
if (builderResponse == null || builderResponse.Data.Length <= 0)
|
||||||
{
|
{
|
||||||
//_logger.LogWarning($"{nameof(AmmerterCreatePublishTask)} 集中器{ammeterInfo.FocusAddress}的电表{ammeterInfo.Name}采集项{tempItem}未能正确获取报文。");
|
//_logger.LogWarning($"{nameof(AmmerterCreatePublishTask)} 集中器{ammeterInfo.FocusAddress}的电表{ammeterInfo.Name}采集项{tempItem}未能正确获取报文。");
|
||||||
continue;
|
continue;
|
||||||
@ -850,36 +694,28 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
FocusId = ammeterInfo.FocusId,
|
FocusId = ammeterInfo.FocusId,
|
||||||
AFN = aFN,
|
AFN = aFN,
|
||||||
Fn = fn,
|
Fn = fn,
|
||||||
|
Seq = builderResponse.Seq,
|
||||||
|
MSA = builderResponse.MSA,
|
||||||
ItemCode = tempItem,
|
ItemCode = tempItem,
|
||||||
TaskMark = CommonHelper.GetTaskMark((int)aFN, fn, ammeterInfo.MeteringCode),
|
TaskMark = CommonHelper.GetTaskMark((int)aFN, fn, ammeterInfo.MeteringCode, builderResponse.MSA),
|
||||||
|
IsSend = false,
|
||||||
ManualOrNot = false,
|
ManualOrNot = false,
|
||||||
Pn = ammeterInfo.MeteringCode,
|
Pn = ammeterInfo.MeteringCode,
|
||||||
IssuedMessageId = GuidGenerator.Create().ToString(),
|
IssuedMessageId = GuidGenerator.Create().ToString(),
|
||||||
IssuedMessageHexString = Convert.ToHexString(dataInfos),
|
IssuedMessageHexString = Convert.ToHexString(builderResponse.Data),
|
||||||
|
IsReceived = false,
|
||||||
};
|
};
|
||||||
|
|
||||||
//meterReadingRecords.CreateDataId(GuidGenerator.Create());
|
|
||||||
|
|
||||||
taskList.Add(meterReadingRecords);
|
taskList.Add(meterReadingRecords);
|
||||||
}
|
}
|
||||||
//TimeSpan timeSpan = TimeSpan.FromMicroseconds(5);
|
|
||||||
//await Task.Delay(timeSpan);
|
|
||||||
|
|
||||||
//return keyValuePairs;
|
|
||||||
// await FreeRedisProvider.Instance.HSetAsync(redisCacheKey, keyValuePairs);
|
|
||||||
|
|
||||||
//using (var pipe = FreeRedisProvider.Instance.StartPipe())
|
|
||||||
//{
|
|
||||||
// pipe.HSet(redisCacheKey, keyValuePairs);
|
|
||||||
// object[] ret = pipe.EndPipe();
|
|
||||||
//}
|
|
||||||
if (taskList == null
|
if (taskList == null
|
||||||
|| taskList.Count() <= 0
|
|| taskList.Count() <= 0
|
||||||
|| string.IsNullOrWhiteSpace(redisCacheTelemetryPacketInfoHashKey)
|
|| string.IsNullOrWhiteSpace(redisCacheTelemetryPacketInfoHashKey)
|
||||||
|| string.IsNullOrWhiteSpace(redisCacheTelemetryPacketInfoSetIndexKey)
|
|| string.IsNullOrWhiteSpace(redisCacheTelemetryPacketInfoSetIndexKey)
|
||||||
|| string.IsNullOrWhiteSpace(redisCacheTelemetryPacketInfoZSetScoresIndexKey))
|
|| string.IsNullOrWhiteSpace(redisCacheTelemetryPacketInfoZSetScoresIndexKey))
|
||||||
{
|
{
|
||||||
_logger.LogError($"{nameof(AmmerterCreatePublishTask)} {ammeterInfo.Name}的写入参数异常,{redisCacheTelemetryPacketInfoHashKey}:{redisCacheTelemetryPacketInfoSetIndexKey}:{redisCacheTelemetryPacketInfoZSetScoresIndexKey},-101");
|
_logger.LogError($"{nameof(AmmerterCreatePublishTaskAction)} {ammeterInfo.Name}的写入参数异常,{redisCacheTelemetryPacketInfoHashKey}:{redisCacheTelemetryPacketInfoSetIndexKey}:{redisCacheTelemetryPacketInfoZSetScoresIndexKey},-101");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -906,76 +742,6 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
// taskList);
|
// taskList);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Kafka 推送消息
|
|
||||||
/// </summary>
|
|
||||||
/// <param name="topicName">主题名称</param>
|
|
||||||
/// <param name="taskRecord">任务记录</param>
|
|
||||||
/// <param name="partition">对应分区,也就是集中器号所在的分组序号</param>
|
|
||||||
/// <returns></returns>
|
|
||||||
private async Task KafkaProducerIssuedMessage(string topicName,
|
|
||||||
MeterReadingTelemetryPacketInfo taskRecord,int partition)
|
|
||||||
{
|
|
||||||
if (string.IsNullOrWhiteSpace(topicName) || taskRecord == null)
|
|
||||||
{
|
|
||||||
throw new Exception($"{nameof(KafkaProducerIssuedMessage)} 推送消息失败,参数异常,-101");
|
|
||||||
}
|
|
||||||
|
|
||||||
await _producerService.ProduceAsync(topicName, partition, taskRecord);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async Task AmmerterCreatePublishTask(int timeDensity, MeterTypeEnum meterType)
|
|
||||||
{
|
|
||||||
var currentDateTime = DateTime.Now;
|
|
||||||
|
|
||||||
var redisKeyList = GetTelemetryPacketCacheKeyPrefix(timeDensity, meterType);
|
|
||||||
|
|
||||||
//FreeRedisProvider.Instance.key()
|
|
||||||
|
|
||||||
var fifteenMinutekeyList = await FreeRedisProvider.Instance.KeysAsync(redisKeyList);
|
|
||||||
if (fifteenMinutekeyList == null || fifteenMinutekeyList.Length <= 0)
|
|
||||||
{
|
|
||||||
_logger.LogError($"{nameof(AmmeterScheduledMeterOneMinuteReading)} {timeDensity}分钟采集电表数据处理时没有获取到缓存信息,-101");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
//获取下发任务缓存数据
|
|
||||||
Dictionary<string, Dictionary<string, MeterReadingRecords>> meterTaskInfos = await GetMeterRedisCacheDictionaryData<MeterReadingRecords>(fifteenMinutekeyList, SystemType, ServerTagName, timeDensity.ToString(), meterType);
|
|
||||||
if (meterTaskInfos == null || meterTaskInfos.Count <= 0)
|
|
||||||
{
|
|
||||||
_logger.LogError($"{nameof(AmmeterScheduledMeterOneMinuteReading)} {timeDensity}分钟采集电表数据处理时没有获取到缓存信息,-102");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
List<MeterReadingRecords> meterTaskInfosList = new List<MeterReadingRecords>();
|
|
||||||
|
|
||||||
//将取出的缓存任务数据发送到Kafka消息队列中
|
|
||||||
foreach (var focusItem in meterTaskInfos)
|
|
||||||
{
|
|
||||||
foreach (var ammerterItem in focusItem.Value)
|
|
||||||
{
|
|
||||||
var tempMsg = new ScheduledMeterReadingIssuedEventMessage()
|
|
||||||
{
|
|
||||||
MessageHexString = ammerterItem.Value.IssuedMessageHexString,
|
|
||||||
MessageId = ammerterItem.Value.IssuedMessageId,
|
|
||||||
FocusAddress = ammerterItem.Value.FocusAddress,
|
|
||||||
TimeDensity = timeDensity.ToString(),
|
|
||||||
};
|
|
||||||
//_ = _producerBus.PublishDelayAsync(TimeSpan.FromMicroseconds(500), ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName, tempMsg);
|
|
||||||
|
|
||||||
_ = _producerService.ProduceAsync(ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName, tempMsg);
|
|
||||||
|
|
||||||
//_ = _producerBus.Publish(tempMsg);
|
|
||||||
|
|
||||||
meterTaskInfosList.Add(ammerterItem.Value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (meterTaskInfosList != null && meterTaskInfosList.Count > 0)
|
|
||||||
{
|
|
||||||
await _meterReadingRecordRepository.InsertManyAsync(meterTaskInfosList, currentDateTime);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
|
|
||||||
@ -1044,6 +810,8 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
var taskRedisCacheKey = string.Format(RedisConst.CacheTasksToBeIssuedKey, SystemType, ServerTagName, MeterTypeEnum.WaterMeter, itemTimeDensity.Key);
|
var taskRedisCacheKey = string.Format(RedisConst.CacheTasksToBeIssuedKey, SystemType, ServerTagName, MeterTypeEnum.WaterMeter, itemTimeDensity.Key);
|
||||||
await FreeRedisProvider.Instance.SetAsync(taskRedisCacheKey, nextTask);
|
await FreeRedisProvider.Instance.SetAsync(taskRedisCacheKey, nextTask);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
_logger.LogInformation($"{nameof(InitAmmeterCacheData)} 初始化水表缓存数据完成");
|
_logger.LogInformation($"{nameof(InitAmmeterCacheData)} 初始化水表缓存数据完成");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1109,12 +877,58 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
_logger.LogInformation($"{nameof(WatermeterScheduledMeterAutoReading)} {timeDensity}分钟采集水表数据处理完成");
|
_logger.LogInformation($"{nameof(WatermeterScheduledMeterAutoReading)} {timeDensity}分钟采集水表数据处理完成");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 创建水表待发送的任务数据
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="timeDensity">采集频率</param>
|
||||||
|
/// <param name="meterInfo">水表信息</param>
|
||||||
|
/// <param name="groupIndex">集中器所在分组</param>
|
||||||
|
/// <param name="taskBatch">时间格式的任务批次名称</param>
|
||||||
|
/// <returns></returns>
|
||||||
|
private void WatermeterCreatePublishTaskAction(int timeDensity
|
||||||
|
, WatermeterInfo meterInfo, int groupIndex, string taskBatch)
|
||||||
|
{
|
||||||
|
var handlerPacketBuilder = TelemetryPacketBuilder.AFNHandlersDictionary;
|
||||||
|
|
||||||
|
|
||||||
|
var currentTime = DateTime.Now;
|
||||||
|
var pendingCopyReadTime = currentTime.AddMinutes(timeDensity);
|
||||||
|
|
||||||
|
var redisCacheTelemetryPacketInfoHashKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
|
||||||
|
var redisCacheTelemetryPacketInfoSetIndexKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoSetIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
|
||||||
|
var redisCacheTelemetryPacketInfoZSetScoresIndexKey = $"{string.Format(RedisConst.CacheTelemetryPacketInfoZSetScoresIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity, groupIndex, taskBatch)}";
|
||||||
|
|
||||||
|
|
||||||
|
var taskInfo = new MeterReadingTelemetryPacketInfo()
|
||||||
|
{
|
||||||
|
Seq= null,
|
||||||
|
|
||||||
|
};
|
||||||
|
//
|
||||||
|
|
||||||
|
Build188SendData.Build188WaterMeterReadingSendDataUnit(meterInfo.Address);
|
||||||
|
|
||||||
|
using (var pipe = FreeRedisProvider.Instance.StartPipe())
|
||||||
|
{
|
||||||
|
// 主数据存储Hash
|
||||||
|
pipe.HSet(redisCacheTelemetryPacketInfoHashKey, taskInfo.MemberId, taskInfo.Serialize());
|
||||||
|
|
||||||
|
// Set索引缓存
|
||||||
|
pipe.SAdd(redisCacheTelemetryPacketInfoSetIndexKey, taskInfo.MemberId);
|
||||||
|
|
||||||
|
// ZSET索引缓存Key
|
||||||
|
pipe.ZAdd(redisCacheTelemetryPacketInfoZSetScoresIndexKey, taskInfo.ScoreValue, taskInfo.MemberId);
|
||||||
|
|
||||||
|
pipe.EndPipe();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
|
|
||||||
#region 公共处理方法
|
#region 公共处理方法
|
||||||
|
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 判断是否需要生成采集指令
|
/// 判断是否需要生成采集指令
|
||||||
/// </summary>
|
/// </summary>
|
||||||
@ -1131,39 +945,6 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
///// <summary>
|
|
||||||
///// 指定时间对比当前时间
|
|
||||||
///// </summary>
|
|
||||||
///// <param name="lastTime"></param>
|
|
||||||
///// <param name="subtrahend"></param>
|
|
||||||
///// <returns></returns>
|
|
||||||
//private bool IsGennerateCmd(DateTime lastTime, int subtrahend = 0)
|
|
||||||
//{
|
|
||||||
// if (DateTime.Now.AddDays(subtrahend) >= lastTime)//当前时间减去一天,大于等于最后在线时间,不再生成该集中器下表生成采集指令
|
|
||||||
// return false;
|
|
||||||
// return true;
|
|
||||||
//}
|
|
||||||
|
|
||||||
///// <summary>
|
|
||||||
///// 缓存下一个时间的任务
|
|
||||||
///// </summary>
|
|
||||||
///// <param name="timeDensity">采集频率</param>
|
|
||||||
///// <param name="meterType">表类型</param>
|
|
||||||
///// <returns></returns>
|
|
||||||
//private async Task CacheNextTaskData(int timeDensity, MeterTypeEnum meterType)
|
|
||||||
//{
|
|
||||||
// //缓存下一个时间的任务
|
|
||||||
// TasksToBeIssueModel nextTask = new TasksToBeIssueModel()
|
|
||||||
// {
|
|
||||||
// TimeDensity = timeDensity,
|
|
||||||
// NextTask = DateTime.Now.AddMinutes(timeDensity)
|
|
||||||
// };
|
|
||||||
|
|
||||||
// var redisCacheKey = string.Format(RedisConst.CacheTasksToBeIssuedKey, SystemType, ServerTagName, meterType, timeDensity);
|
|
||||||
// await FreeRedisProvider.Instance.SetAsync(redisCacheKey, nextTask);
|
|
||||||
//}
|
|
||||||
|
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 获取缓存表计下发指令缓存key前缀
|
/// 获取缓存表计下发指令缓存key前缀
|
||||||
/// </summary>
|
/// </summary>
|
||||||
@ -1175,6 +956,130 @@ namespace JiShe.CollectBus.ScheduledMeterReading
|
|||||||
return $"{string.Format(RedisConst.CacheTelemetryPacketInfoHashKey, SystemType, ServerTagName, meterType, timeDensity)}*";
|
return $"{string.Format(RedisConst.CacheTelemetryPacketInfoHashKey, SystemType, ServerTagName, meterType, timeDensity)}*";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 创建表的待发送的任务数据
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="timeDensity">采集频率</param>
|
||||||
|
/// <param name="taskBatch">时间格式的任务批次名称</param>
|
||||||
|
/// <param name="meterType">表类型</param>
|
||||||
|
/// <param name="taskCreateAction">具体的创建任务的委托</param>
|
||||||
|
/// <returns></returns>
|
||||||
|
private async Task CreateMeterPublishTask<T>(int timeDensity, string taskBatch, MeterTypeEnum meterType, Action<int, T, int, string> taskCreateAction) where T : DeviceCacheBasicModel
|
||||||
|
{
|
||||||
|
var timer = Stopwatch.StartNew();
|
||||||
|
|
||||||
|
//获取对应频率中的所有电表信息
|
||||||
|
var redisCacheMeterInfoHashKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoHashKey, SystemType, ServerTagName, meterType, timeDensity)}";
|
||||||
|
var redisCacheMeterInfoSetIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoSetIndexKey, SystemType, ServerTagName, meterType, timeDensity)}";
|
||||||
|
var redisCacheMeterInfoZSetScoresIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoZSetScoresIndexKey, SystemType, ServerTagName, meterType, timeDensity)}";
|
||||||
|
|
||||||
|
List<T> meterInfos = new List<T>();
|
||||||
|
decimal? cursor = null;
|
||||||
|
string member = null;
|
||||||
|
bool hasNext;
|
||||||
|
do
|
||||||
|
{
|
||||||
|
var page = await _redisDataCacheService.GetAllPagedData<T>(
|
||||||
|
redisCacheMeterInfoHashKeyTemp,
|
||||||
|
redisCacheMeterInfoZSetScoresIndexKeyTemp,
|
||||||
|
pageSize: 1000,
|
||||||
|
lastScore: cursor,
|
||||||
|
lastMember: member);
|
||||||
|
|
||||||
|
meterInfos.AddRange(page.Items);
|
||||||
|
cursor = page.HasNext ? page.NextScore : null;
|
||||||
|
member = page.HasNext ? page.NextMember : null;
|
||||||
|
hasNext = page.HasNext;
|
||||||
|
} while (hasNext);
|
||||||
|
|
||||||
|
if (meterInfos == null || meterInfos.Count <= 0)
|
||||||
|
{
|
||||||
|
timer.Stop();
|
||||||
|
_logger.LogError($"{nameof(CreateMeterPublishTask)} {timeDensity}分钟采集待下发任务创建失败,没有获取到缓存信息,-105");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
await DeviceGroupBalanceControl.ProcessWithThrottleAsync(
|
||||||
|
items: meterInfos,
|
||||||
|
deviceIdSelector: data => data.FocusAddress,
|
||||||
|
processor: (data, groupIndex) =>
|
||||||
|
{
|
||||||
|
taskCreateAction(timeDensity, data, groupIndex, taskBatch);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
timer.Stop();
|
||||||
|
_logger.LogInformation($"{nameof(CreateMeterPublishTask)} {meterType} {timeDensity}分钟采集待下发任务创建完成,{timer.ElapsedMilliseconds},总共{meterInfos.Count}表计信息");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 创建Kafka消息
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="redisCacheTelemetryPacketInfoHashKey"></param>
|
||||||
|
/// <param name="redisCacheTelemetryPacketInfoZSetScoresIndexKey"></param>
|
||||||
|
/// <returns></returns>
|
||||||
|
private async Task CreateMeterKafkaTaskMessage(
|
||||||
|
string redisCacheTelemetryPacketInfoHashKey,
|
||||||
|
string redisCacheTelemetryPacketInfoZSetScoresIndexKey)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(redisCacheTelemetryPacketInfoHashKey) || string.IsNullOrWhiteSpace(redisCacheTelemetryPacketInfoHashKey))
|
||||||
|
{
|
||||||
|
throw new Exception($"{nameof(CreateMeterKafkaTaskMessage)} 创建Kafka消息失败,参数异常,-101");
|
||||||
|
}
|
||||||
|
|
||||||
|
decimal? cursor = null;
|
||||||
|
string member = null;
|
||||||
|
bool hasNext;
|
||||||
|
var stopwatch = Stopwatch.StartNew();
|
||||||
|
do
|
||||||
|
{
|
||||||
|
var page = await _redisDataCacheService.GetAllPagedData<MeterReadingTelemetryPacketInfo>(
|
||||||
|
redisCacheTelemetryPacketInfoHashKey,
|
||||||
|
redisCacheTelemetryPacketInfoZSetScoresIndexKey,
|
||||||
|
pageSize: 1000,
|
||||||
|
lastScore: cursor,
|
||||||
|
lastMember: member);
|
||||||
|
|
||||||
|
cursor = page.HasNext ? page.NextScore : null;
|
||||||
|
member = page.HasNext ? page.NextMember : null;
|
||||||
|
hasNext = page.HasNext;
|
||||||
|
|
||||||
|
await DeviceGroupBalanceControl.ProcessWithThrottleAsync(
|
||||||
|
items: page.Items,
|
||||||
|
deviceIdSelector: data => data.FocusAddress,
|
||||||
|
processor: (data, groupIndex) =>
|
||||||
|
{
|
||||||
|
_ = KafkaProducerIssuedMessageAction(ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName, data, groupIndex);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
} while (hasNext);
|
||||||
|
|
||||||
|
stopwatch.Stop();
|
||||||
|
_logger.LogError($"{nameof(CreateMeterKafkaTaskMessage)} {redisCacheTelemetryPacketInfoHashKey}采集推送完成,共消耗{stopwatch.ElapsedMilliseconds}毫秒。");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Kafka 推送消息
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="topicName">主题名称</param>
|
||||||
|
/// <param name="taskRecord">任务记录</param>
|
||||||
|
/// <param name="partition">对应分区,也就是集中器号所在的分组序号</param>
|
||||||
|
/// <returns></returns>
|
||||||
|
private async Task KafkaProducerIssuedMessageAction(string topicName,
|
||||||
|
MeterReadingTelemetryPacketInfo taskRecord, int partition)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrWhiteSpace(topicName) || taskRecord == null)
|
||||||
|
{
|
||||||
|
throw new Exception($"{nameof(KafkaProducerIssuedMessageAction)} 推送消息失败,参数异常,-101");
|
||||||
|
}
|
||||||
|
|
||||||
|
await _producerService.ProduceAsync(topicName, partition, taskRecord);
|
||||||
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -13,7 +13,7 @@ using JiShe.CollectBus.IotSystems.Devices;
|
|||||||
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
||||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||||
using JiShe.CollectBus.IotSystems.Watermeter;
|
using JiShe.CollectBus.IotSystems.Watermeter;
|
||||||
using JiShe.CollectBus.Kafka;
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
using JiShe.CollectBus.Kafka.Producer;
|
using JiShe.CollectBus.Kafka.Producer;
|
||||||
using JiShe.CollectBus.Repository;
|
using JiShe.CollectBus.Repository;
|
||||||
using JiShe.CollectBus.Repository.MeterReadingRecord;
|
using JiShe.CollectBus.Repository.MeterReadingRecord;
|
||||||
|
|||||||
@ -5,7 +5,6 @@ using JiShe.CollectBus.Common.Models;
|
|||||||
using JiShe.CollectBus.IotSystems.Devices;
|
using JiShe.CollectBus.IotSystems.Devices;
|
||||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||||
using JiShe.CollectBus.Kafka;
|
|
||||||
using JiShe.CollectBus.Kafka.Attributes;
|
using JiShe.CollectBus.Kafka.Attributes;
|
||||||
using JiShe.CollectBus.Protocol.Contracts;
|
using JiShe.CollectBus.Protocol.Contracts;
|
||||||
using JiShe.CollectBus.Protocol.Contracts.Interfaces;
|
using JiShe.CollectBus.Protocol.Contracts.Interfaces;
|
||||||
@ -19,6 +18,8 @@ using System.Threading.Tasks;
|
|||||||
using JiShe.CollectBus.IoTDB.Interface;
|
using JiShe.CollectBus.IoTDB.Interface;
|
||||||
using TouchSocket.Sockets;
|
using TouchSocket.Sockets;
|
||||||
using Volo.Abp.Domain.Repositories;
|
using Volo.Abp.Domain.Repositories;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
|
|
||||||
namespace JiShe.CollectBus.Subscribers
|
namespace JiShe.CollectBus.Subscribers
|
||||||
{
|
{
|
||||||
@ -58,67 +59,75 @@ namespace JiShe.CollectBus.Subscribers
|
|||||||
_dbProvider = dbProvider;
|
_dbProvider = dbProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
[KafkaSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
|
[KafkaSubscribe(ProtocolConst.SubscriberLoginIssuedEventName, EnableBatch = true)]
|
||||||
//[CapSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
|
//[CapSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
|
||||||
public async Task<ISubscribeAck> LoginIssuedEvent(IssuedEventMessage issuedEventMessage)
|
public async Task<ISubscribeAck> LoginIssuedEvent(List<IssuedEventMessage> issuedEventMessages)
|
||||||
{
|
{
|
||||||
bool isAck = false;
|
bool isAck = false;
|
||||||
switch (issuedEventMessage.Type)
|
foreach (var issuedEventMessage in issuedEventMessages)
|
||||||
{
|
{
|
||||||
case IssuedEventType.Heartbeat:
|
switch (issuedEventMessage.Type)
|
||||||
break;
|
{
|
||||||
case IssuedEventType.Login:
|
case IssuedEventType.Heartbeat:
|
||||||
_logger.LogWarning($"集中器地址{issuedEventMessage.ClientId} 登录回复下发内容:{issuedEventMessage.Serialize()}");
|
break;
|
||||||
var loginEntity = await _messageReceivedLoginEventRepository.GetAsync(a => a.MessageId == issuedEventMessage.MessageId);
|
case IssuedEventType.Login:
|
||||||
loginEntity.AckTime = Clock.Now;
|
_logger.LogWarning($"集中器地址{issuedEventMessage.ClientId} 登录回复下发内容:{issuedEventMessage.Serialize()}");
|
||||||
loginEntity.IsAck = true;
|
var loginEntity = await _messageReceivedLoginEventRepository.GetAsync(a => a.MessageId == issuedEventMessage.MessageId);
|
||||||
await _messageReceivedLoginEventRepository.UpdateAsync(loginEntity);
|
loginEntity.AckTime = Clock.Now;
|
||||||
isAck = true;
|
loginEntity.IsAck = true;
|
||||||
break;
|
await _messageReceivedLoginEventRepository.UpdateAsync(loginEntity);
|
||||||
case IssuedEventType.Data:
|
isAck = true;
|
||||||
break;
|
break;
|
||||||
default:
|
case IssuedEventType.Data:
|
||||||
throw new ArgumentOutOfRangeException();
|
break;
|
||||||
|
default:
|
||||||
|
throw new ArgumentOutOfRangeException();
|
||||||
|
}
|
||||||
|
|
||||||
|
//var device = await _deviceRepository.FindAsync(a => a.Number == issuedEventMessage.DeviceNo);
|
||||||
|
//if (device != null)
|
||||||
|
//{
|
||||||
|
// await _tcpService.SendAsync(device.ClientId, issuedEventMessage.Message);
|
||||||
|
//}
|
||||||
|
|
||||||
|
await _tcpService.SendAsync(issuedEventMessage.ClientId, issuedEventMessage.Message);
|
||||||
}
|
}
|
||||||
|
|
||||||
//var device = await _deviceRepository.FindAsync(a => a.Number == issuedEventMessage.DeviceNo);
|
|
||||||
//if (device != null)
|
|
||||||
//{
|
|
||||||
// await _tcpService.SendAsync(device.ClientId, issuedEventMessage.Message);
|
|
||||||
//}
|
|
||||||
|
|
||||||
await _tcpService.SendAsync(issuedEventMessage.ClientId, issuedEventMessage.Message);
|
|
||||||
return isAck? SubscribeAck.Success(): SubscribeAck.Fail();
|
return isAck? SubscribeAck.Success(): SubscribeAck.Fail();
|
||||||
}
|
}
|
||||||
|
|
||||||
[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
|
[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName, EnableBatch = true)]
|
||||||
//[CapSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
|
//[CapSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
|
||||||
public async Task<ISubscribeAck> HeartbeatIssuedEvent(IssuedEventMessage issuedEventMessage)
|
public async Task<ISubscribeAck> HeartbeatIssuedEvent(List<IssuedEventMessage> issuedEventMessages)
|
||||||
{
|
{
|
||||||
bool isAck = false;
|
bool isAck = false;
|
||||||
switch (issuedEventMessage.Type)
|
foreach (var issuedEventMessage in issuedEventMessages)
|
||||||
{
|
{
|
||||||
case IssuedEventType.Heartbeat:
|
switch (issuedEventMessage.Type)
|
||||||
_logger.LogWarning($"集中器地址{issuedEventMessage.ClientId} 心跳回复下发内容:{issuedEventMessage.Serialize()}");
|
{
|
||||||
var heartbeatEntity = await _messageReceivedHeartbeatEventRepository.GetAsync(a => a.MessageId == issuedEventMessage.MessageId);
|
case IssuedEventType.Heartbeat:
|
||||||
heartbeatEntity.AckTime = Clock.Now;
|
_logger.LogWarning($"集中器地址{issuedEventMessage.ClientId} 心跳回复下发内容:{issuedEventMessage.Serialize()}");
|
||||||
heartbeatEntity.IsAck = true;
|
var heartbeatEntity = await _messageReceivedHeartbeatEventRepository.GetAsync(a => a.MessageId == issuedEventMessage.MessageId);
|
||||||
await _messageReceivedHeartbeatEventRepository.UpdateAsync(heartbeatEntity);
|
heartbeatEntity.AckTime = Clock.Now;
|
||||||
isAck = true;
|
heartbeatEntity.IsAck = true;
|
||||||
break;
|
await _messageReceivedHeartbeatEventRepository.UpdateAsync(heartbeatEntity);
|
||||||
case IssuedEventType.Data:
|
isAck = true;
|
||||||
break;
|
break;
|
||||||
default:
|
case IssuedEventType.Data:
|
||||||
throw new ArgumentOutOfRangeException();
|
break;
|
||||||
|
default:
|
||||||
|
throw new ArgumentOutOfRangeException();
|
||||||
|
}
|
||||||
|
|
||||||
|
//var device = await _deviceRepository.FindAsync(a => a.Number == issuedEventMessage.DeviceNo);
|
||||||
|
//if (device != null)
|
||||||
|
//{
|
||||||
|
// await _tcpService.SendAsync(device.ClientId, issuedEventMessage.Message);
|
||||||
|
//}
|
||||||
|
|
||||||
|
await _tcpService.SendAsync(issuedEventMessage.ClientId, issuedEventMessage.Message);
|
||||||
}
|
}
|
||||||
|
|
||||||
//var device = await _deviceRepository.FindAsync(a => a.Number == issuedEventMessage.DeviceNo);
|
|
||||||
//if (device != null)
|
|
||||||
//{
|
|
||||||
// await _tcpService.SendAsync(device.ClientId, issuedEventMessage.Message);
|
|
||||||
//}
|
|
||||||
|
|
||||||
await _tcpService.SendAsync(issuedEventMessage.ClientId, issuedEventMessage.Message);
|
|
||||||
return isAck ? SubscribeAck.Success() : SubscribeAck.Fail();
|
return isAck ? SubscribeAck.Success() : SubscribeAck.Fail();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -179,37 +188,44 @@ namespace JiShe.CollectBus.Subscribers
|
|||||||
return SubscribeAck.Success();
|
return SubscribeAck.Success();
|
||||||
}
|
}
|
||||||
|
|
||||||
[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
|
[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName, EnableBatch = true)]
|
||||||
//[CapSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
|
//[CapSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
|
||||||
public async Task<ISubscribeAck> ReceivedHeartbeatEvent(MessageReceivedHeartbeat receivedHeartbeatMessage)
|
public async Task<ISubscribeAck> ReceivedHeartbeatEvent(List<MessageReceivedHeartbeat> receivedHeartbeatMessages)
|
||||||
{
|
{
|
||||||
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
|
foreach (var receivedHeartbeatMessage in receivedHeartbeatMessages)
|
||||||
if (protocolPlugin == null)
|
|
||||||
{
|
{
|
||||||
_logger.LogError("协议不存在!");
|
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
|
||||||
}
|
if (protocolPlugin == null)
|
||||||
else
|
{
|
||||||
{
|
_logger.LogError("协议不存在!");
|
||||||
await protocolPlugin.HeartbeatAsync(receivedHeartbeatMessage);
|
}
|
||||||
await _messageReceivedHeartbeatEventRepository.InsertAsync(receivedHeartbeatMessage);
|
else
|
||||||
|
{
|
||||||
|
await protocolPlugin.HeartbeatAsync(receivedHeartbeatMessage);
|
||||||
|
await _messageReceivedHeartbeatEventRepository.InsertAsync(receivedHeartbeatMessage);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return SubscribeAck.Success();
|
return SubscribeAck.Success();
|
||||||
}
|
}
|
||||||
|
|
||||||
[KafkaSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
|
[KafkaSubscribe(ProtocolConst.SubscriberLoginReceivedEventName,EnableBatch =true)]
|
||||||
//[CapSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
|
//[CapSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
|
||||||
public async Task<ISubscribeAck> ReceivedLoginEvent(MessageReceivedLogin receivedLoginMessage)
|
public async Task<ISubscribeAck> ReceivedLoginEvent(List<MessageReceivedLogin> receivedLoginMessages)
|
||||||
{
|
{
|
||||||
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
|
foreach (var receivedLoginMessage in receivedLoginMessages)
|
||||||
if (protocolPlugin == null)
|
|
||||||
{
|
{
|
||||||
_logger.LogError("协议不存在!");
|
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
|
||||||
}
|
if (protocolPlugin == null)
|
||||||
else
|
{
|
||||||
{
|
_logger.LogError("协议不存在!");
|
||||||
await protocolPlugin.LoginAsync(receivedLoginMessage);
|
}
|
||||||
await _messageReceivedLoginEventRepository.InsertAsync(receivedLoginMessage);
|
else
|
||||||
|
{
|
||||||
|
await protocolPlugin.LoginAsync(receivedLoginMessage);
|
||||||
|
await _messageReceivedLoginEventRepository.InsertAsync(receivedLoginMessage);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return SubscribeAck.Success();
|
return SubscribeAck.Success();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -8,8 +8,8 @@ using JiShe.CollectBus.IotSystems.Devices;
|
|||||||
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
||||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||||
using JiShe.CollectBus.Kafka;
|
|
||||||
using JiShe.CollectBus.Kafka.Attributes;
|
using JiShe.CollectBus.Kafka.Attributes;
|
||||||
|
using JiShe.CollectBus.Kafka.Internal;
|
||||||
using JiShe.CollectBus.Protocol.Contracts;
|
using JiShe.CollectBus.Protocol.Contracts;
|
||||||
using JiShe.CollectBus.Protocol.Contracts.Interfaces;
|
using JiShe.CollectBus.Protocol.Contracts.Interfaces;
|
||||||
using JiShe.CollectBus.Repository.MeterReadingRecord;
|
using JiShe.CollectBus.Repository.MeterReadingRecord;
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
using System.Threading;
|
using System;
|
||||||
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using Hangfire;
|
using Hangfire;
|
||||||
using JiShe.CollectBus.Common.Consts;
|
using JiShe.CollectBus.Common.Consts;
|
||||||
@ -27,13 +28,15 @@ namespace JiShe.CollectBus.Workers
|
|||||||
{
|
{
|
||||||
_logger = logger;
|
_logger = logger;
|
||||||
RecurringJobId = nameof(CreateToBeIssueTaskWorker);
|
RecurringJobId = nameof(CreateToBeIssueTaskWorker);
|
||||||
CronExpression = "* 10 * * * *";
|
CronExpression = "0 0/1 * * * *";
|
||||||
|
TimeZone = TimeZoneInfo.Local;
|
||||||
this._scheduledMeterReadingService = scheduledMeterReadingService;
|
this._scheduledMeterReadingService = scheduledMeterReadingService;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public override async Task DoWorkAsync(CancellationToken cancellationToken = new CancellationToken())
|
public override async Task DoWorkAsync(CancellationToken cancellationToken = new CancellationToken())
|
||||||
{
|
{
|
||||||
|
_logger.LogError($"{DateTime.Now}");
|
||||||
// await _scheduledMeterReadingService.CreateToBeIssueTasks();
|
// await _scheduledMeterReadingService.CreateToBeIssueTasks();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
using System.Threading;
|
using System;
|
||||||
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using Hangfire;
|
using Hangfire;
|
||||||
using JiShe.CollectBus.ScheduledMeterReading;
|
using JiShe.CollectBus.ScheduledMeterReading;
|
||||||
@ -26,7 +27,8 @@ namespace JiShe.CollectBus.Workers
|
|||||||
{
|
{
|
||||||
_logger = logger;
|
_logger = logger;
|
||||||
RecurringJobId = nameof(SubscriberFifteenMinuteWorker);
|
RecurringJobId = nameof(SubscriberFifteenMinuteWorker);
|
||||||
CronExpression = "* 15 * * * *";
|
CronExpression = "0 0/15 * * * *";
|
||||||
|
TimeZone = TimeZoneInfo.Local;
|
||||||
this._scheduledMeterReadingService = scheduledMeterReadingService;
|
this._scheduledMeterReadingService = scheduledMeterReadingService;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
using System.Threading;
|
using System;
|
||||||
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using Hangfire;
|
using Hangfire;
|
||||||
using JiShe.CollectBus.ScheduledMeterReading;
|
using JiShe.CollectBus.ScheduledMeterReading;
|
||||||
@ -26,7 +27,8 @@ namespace JiShe.CollectBus.Workers
|
|||||||
{
|
{
|
||||||
_logger = logger;
|
_logger = logger;
|
||||||
RecurringJobId = nameof(SubscriberFiveMinuteWorker);
|
RecurringJobId = nameof(SubscriberFiveMinuteWorker);
|
||||||
CronExpression = "* 5 * * * *";
|
CronExpression = "0 0/5 * * * *";
|
||||||
|
TimeZone = TimeZoneInfo.Local;
|
||||||
this._scheduledMeterReadingService = scheduledMeterReadingService;
|
this._scheduledMeterReadingService = scheduledMeterReadingService;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
using System.Threading;
|
using System;
|
||||||
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using Hangfire;
|
using Hangfire;
|
||||||
using JiShe.CollectBus.ScheduledMeterReading;
|
using JiShe.CollectBus.ScheduledMeterReading;
|
||||||
@ -26,7 +27,8 @@ namespace JiShe.CollectBus.Workers
|
|||||||
{
|
{
|
||||||
_logger = logger;
|
_logger = logger;
|
||||||
RecurringJobId = nameof(SubscriberOneMinuteWorker);
|
RecurringJobId = nameof(SubscriberOneMinuteWorker);
|
||||||
CronExpression = "* 1 * * * *";
|
CronExpression = "0 0/1 * * * *";
|
||||||
|
TimeZone = TimeZoneInfo.Local;
|
||||||
this._scheduledMeterReadingService = scheduledMeterReadingService;
|
this._scheduledMeterReadingService = scheduledMeterReadingService;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -27,11 +27,6 @@ namespace JiShe.CollectBus.Ammeters
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public string Name { get; set; }
|
public string Name { get; set; }
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// 集中器地址
|
|
||||||
/// </summary>
|
|
||||||
public string FocusAddress { get; set; }
|
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 集中器地址
|
/// 集中器地址
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
|||||||
@ -34,7 +34,7 @@ namespace JiShe.CollectBus.IotSystems.MeterReadingRecords
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// 任务数据唯一标记
|
/// 任务数据唯一标记
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public string TaskMark { get; set; }
|
public decimal TaskMark { get; set; }
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 时间戳标记,IoTDB时间列处理,上报通过构建标记获取唯一标记匹配时间戳。
|
/// 时间戳标记,IoTDB时间列处理,上报通过构建标记获取唯一标记匹配时间戳。
|
||||||
@ -97,6 +97,20 @@ namespace JiShe.CollectBus.IotSystems.MeterReadingRecords
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public string ItemCode { get; set;}
|
public string ItemCode { get; set;}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 帧序列域SEQ
|
||||||
|
/// </summary>
|
||||||
|
public required Seq Seq { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 地址域A3的主站地址MSA
|
||||||
|
/// </summary>
|
||||||
|
public int MSA { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 是否发送
|
||||||
|
/// </summary>
|
||||||
|
public bool IsSend { get; set; }
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 创建时间
|
/// 创建时间
|
||||||
@ -133,6 +147,11 @@ namespace JiShe.CollectBus.IotSystems.MeterReadingRecords
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public string ReceivedRemark { get; set; }
|
public string ReceivedRemark { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 是否已上报
|
||||||
|
/// </summary>
|
||||||
|
public bool IsReceived { get; set; }
|
||||||
|
|
||||||
//public void CreateDataId(Guid Id)
|
//public void CreateDataId(Guid Id)
|
||||||
//{
|
//{
|
||||||
// this.Id = Id;
|
// this.Id = Id;
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
using JiShe.CollectBus.Common.Enums;
|
using FreeSql.DataAnnotations;
|
||||||
|
using JiShe.CollectBus.Common.Enums;
|
||||||
using JiShe.CollectBus.Common.Models;
|
using JiShe.CollectBus.Common.Models;
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
@ -13,6 +14,18 @@ namespace JiShe.CollectBus.IotSystems.Watermeter
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public class WatermeterInfo: DeviceCacheBasicModel
|
public class WatermeterInfo: DeviceCacheBasicModel
|
||||||
{
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// 关系映射标识,用于ZSet的Member字段和Set的Value字段,具体值可以根据不同业务场景进行定义
|
||||||
|
/// </summary>
|
||||||
|
[Column(IsIgnore = true)]
|
||||||
|
public override string MemberId => $"{FocusId}:{MeterId}";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// ZSet排序索引分数值,具体值可以根据不同业务场景进行定义,例如时间戳
|
||||||
|
/// </summary>
|
||||||
|
[Column(IsIgnore = true)]
|
||||||
|
public override long ScoreValue => ((long)FocusId << 32) | (uint)DateTime.Now.Ticks;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 水表名称
|
/// 水表名称
|
||||||
/// </summary>
|
/// </summary>
|
||||||
@ -22,11 +35,6 @@ namespace JiShe.CollectBus.IotSystems.Watermeter
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public string Password { get; set; }
|
public string Password { get; set; }
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// 集中器地址
|
|
||||||
/// </summary>
|
|
||||||
public string FocusAddress { get; set; }
|
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 一个集中器下的[MeteringCode]必须唯一。 PN
|
/// 一个集中器下的[MeteringCode]必须唯一。 PN
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
|||||||
@ -22,7 +22,7 @@ namespace JiShe.CollectBus.Common.BuildSendDatas
|
|||||||
/// <param name="request.FocusAddress"></param>
|
/// <param name="request.FocusAddress"></param>
|
||||||
/// <param name="request.Fn"></param>
|
/// <param name="request.Fn"></param>
|
||||||
/// <param name="request.Pn"></param>
|
/// <param name="request.Pn"></param>
|
||||||
public delegate byte[] AFNDelegate(TelemetryPacketRequest request);
|
public delegate TelemetryPacketResponse AFNDelegate(TelemetryPacketRequest request);
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 编码与方法的映射表
|
/// 编码与方法的映射表
|
||||||
@ -45,7 +45,7 @@ namespace JiShe.CollectBus.Common.BuildSendDatas
|
|||||||
}
|
}
|
||||||
|
|
||||||
#region AFN_00H 确认∕否认
|
#region AFN_00H 确认∕否认
|
||||||
public static byte[] AFN00_Fn_Send(TelemetryPacketRequest request)
|
public static TelemetryPacketResponse AFN00_Fn_Send(TelemetryPacketRequest request)
|
||||||
{
|
{
|
||||||
var reqParameter = new ReqParameter2()
|
var reqParameter = new ReqParameter2()
|
||||||
{
|
{
|
||||||
@ -64,13 +64,13 @@ namespace JiShe.CollectBus.Common.BuildSendDatas
|
|||||||
Fn = request.Fn
|
Fn = request.Fn
|
||||||
};
|
};
|
||||||
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
||||||
return bytes;
|
return new TelemetryPacketResponse() { Seq = reqParameter.Seq, Data = bytes, MSA = reqParameter.MSA, };
|
||||||
}
|
}
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
|
|
||||||
#region AFN_01H 复位命令
|
#region AFN_01H 复位命令
|
||||||
public static byte[] AFN01_Fn_Send(TelemetryPacketRequest request)
|
public static TelemetryPacketResponse AFN01_Fn_Send(TelemetryPacketRequest request)
|
||||||
{
|
{
|
||||||
var reqParameter = new ReqParameter2()
|
var reqParameter = new ReqParameter2()
|
||||||
{
|
{
|
||||||
@ -89,13 +89,13 @@ namespace JiShe.CollectBus.Common.BuildSendDatas
|
|||||||
Fn = request.Fn
|
Fn = request.Fn
|
||||||
};
|
};
|
||||||
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
||||||
return bytes;
|
return new TelemetryPacketResponse() { Seq = reqParameter.Seq, Data = bytes, MSA = reqParameter.MSA, };
|
||||||
}
|
}
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
|
|
||||||
#region AFN_02H 链路接口检测
|
#region AFN_02H 链路接口检测
|
||||||
public static byte[] AFN02_Fn_Send(TelemetryPacketRequest request)
|
public static TelemetryPacketResponse AFN02_Fn_Send(TelemetryPacketRequest request)
|
||||||
{
|
{
|
||||||
var reqParameter = new ReqParameter2()
|
var reqParameter = new ReqParameter2()
|
||||||
{
|
{
|
||||||
@ -114,12 +114,12 @@ namespace JiShe.CollectBus.Common.BuildSendDatas
|
|||||||
Fn = request.Fn
|
Fn = request.Fn
|
||||||
};
|
};
|
||||||
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
||||||
return bytes;
|
return new TelemetryPacketResponse() { Seq = reqParameter.Seq, Data = bytes, MSA = reqParameter.MSA, };
|
||||||
}
|
}
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region AFN_04H 设置参数
|
#region AFN_04H 设置参数
|
||||||
public static byte[] AFN04_Fn_Send(TelemetryPacketRequest request)
|
public static TelemetryPacketResponse AFN04_Fn_Send(TelemetryPacketRequest request)
|
||||||
{
|
{
|
||||||
var reqParameter = new ReqParameter2()
|
var reqParameter = new ReqParameter2()
|
||||||
{
|
{
|
||||||
@ -138,13 +138,13 @@ namespace JiShe.CollectBus.Common.BuildSendDatas
|
|||||||
Fn = request.Fn
|
Fn = request.Fn
|
||||||
};
|
};
|
||||||
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
||||||
return bytes;
|
return new TelemetryPacketResponse() { Seq = reqParameter.Seq, Data = bytes, MSA = reqParameter.MSA, };
|
||||||
}
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region AFN_05H 控制命令
|
#region AFN_05H 控制命令
|
||||||
public static byte[] AFN05_Fn_Send(TelemetryPacketRequest request)
|
public static TelemetryPacketResponse AFN05_Fn_Send(TelemetryPacketRequest request)
|
||||||
{
|
{
|
||||||
var reqParameter = new ReqParameter2()
|
var reqParameter = new ReqParameter2()
|
||||||
{
|
{
|
||||||
@ -163,12 +163,12 @@ namespace JiShe.CollectBus.Common.BuildSendDatas
|
|||||||
Fn = request.Fn
|
Fn = request.Fn
|
||||||
};
|
};
|
||||||
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
||||||
return bytes;
|
return new TelemetryPacketResponse() { Seq = reqParameter.Seq, Data = bytes, MSA = reqParameter.MSA, };
|
||||||
}
|
}
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region AFN_09H 请求终端配置及信息
|
#region AFN_09H 请求终端配置及信息
|
||||||
public static byte[] AFN09_Fn_Send(TelemetryPacketRequest request)
|
public static TelemetryPacketResponse AFN09_Fn_Send(TelemetryPacketRequest request)
|
||||||
{
|
{
|
||||||
var reqParameter = new ReqParameter2()
|
var reqParameter = new ReqParameter2()
|
||||||
{
|
{
|
||||||
@ -187,13 +187,13 @@ namespace JiShe.CollectBus.Common.BuildSendDatas
|
|||||||
Fn = request.Fn
|
Fn = request.Fn
|
||||||
};
|
};
|
||||||
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
||||||
return bytes;
|
return new TelemetryPacketResponse() { Seq = reqParameter.Seq, Data = bytes, MSA = reqParameter.MSA, };
|
||||||
}
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region AFN_0AH 查询参数
|
#region AFN_0AH 查询参数
|
||||||
public static byte[] AFN0A_Fn_Send(TelemetryPacketRequest request)
|
public static TelemetryPacketResponse AFN0A_Fn_Send(TelemetryPacketRequest request)
|
||||||
{
|
{
|
||||||
var reqParameter = new ReqParameter2()
|
var reqParameter = new ReqParameter2()
|
||||||
{
|
{
|
||||||
@ -212,12 +212,12 @@ namespace JiShe.CollectBus.Common.BuildSendDatas
|
|||||||
Fn = request.Fn
|
Fn = request.Fn
|
||||||
};
|
};
|
||||||
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
||||||
return bytes;
|
return new TelemetryPacketResponse() { Seq = reqParameter.Seq, Data = bytes, MSA = reqParameter.MSA, };
|
||||||
}
|
}
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region AFN_0CH 请求一类数据
|
#region AFN_0CH 请求一类数据
|
||||||
public static byte[] AFN0C_Fn_Send(TelemetryPacketRequest request)
|
public static TelemetryPacketResponse AFN0C_Fn_Send(TelemetryPacketRequest request)
|
||||||
{
|
{
|
||||||
var reqParameter = new ReqParameter2()
|
var reqParameter = new ReqParameter2()
|
||||||
{
|
{
|
||||||
@ -236,12 +236,12 @@ namespace JiShe.CollectBus.Common.BuildSendDatas
|
|||||||
Fn = request.Fn
|
Fn = request.Fn
|
||||||
};
|
};
|
||||||
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
||||||
return bytes;
|
return new TelemetryPacketResponse() { Seq = reqParameter.Seq, Data = bytes, MSA = reqParameter.MSA, };
|
||||||
}
|
}
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region AFN_0DH 请求二类数据
|
#region AFN_0DH 请求二类数据
|
||||||
public static byte[] AFN0D_Fn_Send(TelemetryPacketRequest request)
|
public static TelemetryPacketResponse AFN0D_Fn_Send(TelemetryPacketRequest request)
|
||||||
{
|
{
|
||||||
var reqParameter = new ReqParameter2()
|
var reqParameter = new ReqParameter2()
|
||||||
{
|
{
|
||||||
@ -260,12 +260,12 @@ namespace JiShe.CollectBus.Common.BuildSendDatas
|
|||||||
Fn = request.Fn
|
Fn = request.Fn
|
||||||
};
|
};
|
||||||
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter);
|
||||||
return bytes;
|
return new TelemetryPacketResponse() { Seq = reqParameter.Seq, Data = bytes, MSA = reqParameter.MSA, };
|
||||||
}
|
}
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region AFN10H 数据转发
|
#region AFN10H 数据转发
|
||||||
public static byte[] AFN10_Fn_Send(TelemetryPacketRequest request)
|
public static TelemetryPacketResponse AFN10_Fn_Send(TelemetryPacketRequest request)
|
||||||
{
|
{
|
||||||
var reqParameter = new ReqParameter2()
|
var reqParameter = new ReqParameter2()
|
||||||
{
|
{
|
||||||
@ -283,8 +283,8 @@ namespace JiShe.CollectBus.Common.BuildSendDatas
|
|||||||
Pn = request.Pn,
|
Pn = request.Pn,
|
||||||
Fn = request.Fn
|
Fn = request.Fn
|
||||||
};
|
};
|
||||||
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter,request.DataUnit);
|
var bytes = Build3761SendData.BuildSendCommandBytes(reqParameter, request.DataUnit);
|
||||||
return bytes;
|
return new TelemetryPacketResponse() { Seq = reqParameter.Seq, Data = bytes, MSA = reqParameter.MSA, };
|
||||||
}
|
}
|
||||||
|
|
||||||
#region SpecialAmmeter 特殊电表转发
|
#region SpecialAmmeter 特殊电表转发
|
||||||
|
|||||||
@ -0,0 +1,30 @@
|
|||||||
|
using JiShe.CollectBus.Common.Models;
|
||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
|
namespace JiShe.CollectBus.Common.BuildSendDatas
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// 报文构建返回结果
|
||||||
|
/// </summary>
|
||||||
|
public class TelemetryPacketResponse
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// 帧序列域SEQ
|
||||||
|
/// </summary>
|
||||||
|
public required Seq Seq { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 地址域A3的主站地址MSA
|
||||||
|
/// </summary>
|
||||||
|
public int MSA { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 报文体
|
||||||
|
/// </summary>
|
||||||
|
public required byte[] Data { get; set; }
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -11,25 +11,31 @@ namespace JiShe.CollectBus.Common.Consts
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public class CommonConst
|
public class CommonConst
|
||||||
{
|
{
|
||||||
/// <summary>
|
|
||||||
/// 服务器标识
|
|
||||||
/// </summary>
|
|
||||||
public const string ServerTagName = "ServerTagName";
|
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Kafka
|
/// Kafka
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public const string Kafka = "Kafka";
|
public const string Kafka = "Kafka";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 服务器标识
|
||||||
|
/// </summary>
|
||||||
|
public const string ServerTagName = $"{Kafka}:ServerTagName";
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Kafka副本数量
|
/// Kafka副本数量
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public const string KafkaReplicationFactor = "KafkaReplicationFactor";
|
public const string KafkaReplicationFactor = $"{Kafka}:KafkaReplicationFactor";
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Kafka主题分区数量
|
/// Kafka主题分区数量
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public const string NumPartitions = "NumPartitions";
|
public const string NumPartitions = $"{Kafka}:NumPartitions";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 首次采集时间
|
||||||
|
/// </summary>
|
||||||
|
public const string FirstCollectionTime = "FirstCollectionTime";
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -200,5 +200,38 @@ namespace JiShe.CollectBus.Common.Extensions
|
|||||||
{
|
{
|
||||||
return DateTimeOffset.FromUnixTimeMilliseconds(millis).DateTime;
|
return DateTimeOffset.FromUnixTimeMilliseconds(millis).DateTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 采集时间节点计算
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="referenceTime">待采集时间</param>
|
||||||
|
/// <param name="interval"></param>
|
||||||
|
/// <returns></returns>
|
||||||
|
public static DateTime CalculateNextCollectionTime(this DateTime referenceTime, int interval)
|
||||||
|
{
|
||||||
|
// 计算精确到分钟的基准时间
|
||||||
|
var baseTime = new DateTime(
|
||||||
|
referenceTime.Year,
|
||||||
|
referenceTime.Month,
|
||||||
|
referenceTime.Day,
|
||||||
|
referenceTime.Hour,
|
||||||
|
referenceTime.Minute,
|
||||||
|
0);
|
||||||
|
|
||||||
|
// 计算总分钟数和下一个间隔点
|
||||||
|
int totalMinutes = baseTime.Hour * 60 + baseTime.Minute;
|
||||||
|
int nextTotalMinutes = ((totalMinutes / interval) + 1) * interval;
|
||||||
|
|
||||||
|
// 处理跨天情况
|
||||||
|
int daysToAdd = nextTotalMinutes / (24 * 60);
|
||||||
|
int remainingMinutes = nextTotalMinutes % (24 * 60);
|
||||||
|
int hours = remainingMinutes / 60;
|
||||||
|
int minutes = remainingMinutes % 60;
|
||||||
|
|
||||||
|
return baseTime.Date
|
||||||
|
.AddDays(daysToAdd)
|
||||||
|
.AddHours(hours)
|
||||||
|
.AddMinutes(minutes);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -767,10 +767,13 @@ namespace JiShe.CollectBus.Common.Helpers
|
|||||||
/// <param name="afn"></param>
|
/// <param name="afn"></param>
|
||||||
/// <param name="fn"></param>
|
/// <param name="fn"></param>
|
||||||
/// <param name="pn"></param>
|
/// <param name="pn"></param>
|
||||||
|
/// <param name="msa"></param>
|
||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
public static string GetTaskMark(int afn,int fn,int pn)
|
public static decimal GetTaskMark(int afn, int fn, int pn, int msa)
|
||||||
{
|
{
|
||||||
return $"{afn.ToString().PadLeft(2,'0')}{fn}{pn}";
|
var makstr = $"{afn.ToString().PadLeft(2, '0')}{fn.ToString().PadLeft(2, '0')}{pn.ToString().PadLeft(2, '0')}";
|
||||||
|
|
||||||
|
return Convert.ToInt32(makstr) << 32 | msa;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -30,5 +30,15 @@ namespace JiShe.CollectBus.Common.Models
|
|||||||
/// ZSet排序索引分数值,具体值可以根据不同业务场景进行定义,例如时间戳
|
/// ZSet排序索引分数值,具体值可以根据不同业务场景进行定义,例如时间戳
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public virtual long ScoreValue=> ((long)FocusId << 32) | (uint)MeterId;
|
public virtual long ScoreValue=> ((long)FocusId << 32) | (uint)MeterId;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 是否已处理
|
||||||
|
/// </summary>
|
||||||
|
public virtual bool IsHandle { get; set; } = false;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 集中器地址
|
||||||
|
/// </summary>
|
||||||
|
public string FocusAddress { get; set;}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -43,7 +43,7 @@
|
|||||||
"Configuration": "192.168.1.9:6380,password=1q2w3e!@#,syncTimeout=30000,abortConnect=false,connectTimeout=30000,allowAdmin=true",
|
"Configuration": "192.168.1.9:6380,password=1q2w3e!@#,syncTimeout=30000,abortConnect=false,connectTimeout=30000,allowAdmin=true",
|
||||||
"MaxPoolSize": "50",
|
"MaxPoolSize": "50",
|
||||||
"DefaultDB": "14",
|
"DefaultDB": "14",
|
||||||
"HangfireDB": "15"
|
"HangfireDB": "13"
|
||||||
},
|
},
|
||||||
"Jwt": {
|
"Jwt": {
|
||||||
"Audience": "JiShe.CollectBus",
|
"Audience": "JiShe.CollectBus",
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user