Compare commits
No commits in common. "a7602cde86ac091a432c6916c5d9caa71d6cb2e0" and "de1654327fb1c1a62406f5350a8237eef8015aa5" have entirely different histories.
a7602cde86
...
de1654327f
4
.gitignore
vendored
4
.gitignore
vendored
@ -400,6 +400,4 @@ FodyWeavers.xsd
|
||||
|
||||
# ABP Studio
|
||||
**/.abpstudio/
|
||||
/web/JiShe.CollectBus.Host/Plugins/*.dll
|
||||
/web/JiShe.CollectBus.Host/Plugins/JiShe.CollectBus.Protocol.dll
|
||||
/web/JiShe.CollectBus.Host/Plugins/JiShe.CollectBus.Protocol.Test.dll
|
||||
/src/JiShe.CollectBus.Host/Plugins/*.dll
|
||||
|
||||
@ -3,49 +3,31 @@ Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio Version 17
|
||||
VisualStudioVersion = 17.9.34728.123
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Domain.Shared", "shared\JiShe.CollectBus.Domain.Shared\JiShe.CollectBus.Domain.Shared.csproj", "{D64C1577-4929-4B60-939E-96DE1534891A}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Domain.Shared", "src\JiShe.CollectBus.Domain.Shared\JiShe.CollectBus.Domain.Shared.csproj", "{D64C1577-4929-4B60-939E-96DE1534891A}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Domain", "services\JiShe.CollectBus.Domain\JiShe.CollectBus.Domain.csproj", "{F2840BC7-0188-4606-9126-DADD0F5ABF7A}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Domain", "src\JiShe.CollectBus.Domain\JiShe.CollectBus.Domain.csproj", "{F2840BC7-0188-4606-9126-DADD0F5ABF7A}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Application.Contracts", "services\JiShe.CollectBus.Application.Contracts\JiShe.CollectBus.Application.Contracts.csproj", "{BD65D04F-08D5-40C1-8C24-77CA0BACB877}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Application.Contracts", "src\JiShe.CollectBus.Application.Contracts\JiShe.CollectBus.Application.Contracts.csproj", "{BD65D04F-08D5-40C1-8C24-77CA0BACB877}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Application", "services\JiShe.CollectBus.Application\JiShe.CollectBus.Application.csproj", "{78040F9E-3501-4A40-82DF-00A597710F35}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Application", "src\JiShe.CollectBus.Application\JiShe.CollectBus.Application.csproj", "{78040F9E-3501-4A40-82DF-00A597710F35}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.MongoDB", "modules\JiShe.CollectBus.MongoDB\JiShe.CollectBus.MongoDB.csproj", "{F1C58097-4C08-4D88-8976-6B3389391481}"
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{649A3FFA-182F-4E56-9717-E6A9A2BEC545}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.HttpApi", "web\JiShe.CollectBus.HttpApi\JiShe.CollectBus.HttpApi.csproj", "{077AA5F8-8B61-420C-A6B5-0150A66FDB34}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.MongoDB", "src\JiShe.CollectBus.MongoDB\JiShe.CollectBus.MongoDB.csproj", "{F1C58097-4C08-4D88-8976-6B3389391481}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Host", "web\JiShe.CollectBus.Host\JiShe.CollectBus.Host.csproj", "{35829A15-4127-4F69-8BDE-9405DEAACA9A}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.HttpApi", "src\JiShe.CollectBus.HttpApi\JiShe.CollectBus.HttpApi.csproj", "{077AA5F8-8B61-420C-A6B5-0150A66FDB34}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Common", "shared\JiShe.CollectBus.Common\JiShe.CollectBus.Common.csproj", "{AD2F1928-4411-4511-B564-5FB996EC08B9}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Host", "src\JiShe.CollectBus.Host\JiShe.CollectBus.Host.csproj", "{35829A15-4127-4F69-8BDE-9405DEAACA9A}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Protocol", "protocols\JiShe.CollectBus.Protocol\JiShe.CollectBus.Protocol.csproj", "{C62EFF95-5C32-435F-BD78-6977E828F894}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Common", "src\JiShe.CollectBus.Common\JiShe.CollectBus.Common.csproj", "{AD2F1928-4411-4511-B564-5FB996EC08B9}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Protocol.Contracts", "protocols\JiShe.CollectBus.Protocol.Contracts\JiShe.CollectBus.Protocol.Contracts.csproj", "{38C1808B-009A-418B-B17B-AB3626341B5D}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Protocol", "src\JiShe.CollectBus.Protocol\JiShe.CollectBus.Protocol.csproj", "{C62EFF95-5C32-435F-BD78-6977E828F894}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.DbMigrator", "services\JiShe.CollectBus.DbMigrator\JiShe.CollectBus.DbMigrator.csproj", "{8BA01C3D-297D-42DF-BD63-EF07202A0A67}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Protocol.Contracts", "src\JiShe.CollectBus.Protocol.Contracts\JiShe.CollectBus.Protocol.Contracts.csproj", "{38C1808B-009A-418B-B17B-AB3626341B5D}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.FreeSql", "modules\JiShe.CollectBus.FreeSql\JiShe.CollectBus.FreeSql.csproj", "{FE0457D9-4038-4A17-8808-DCAD06CFC0A0}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.DbMigrator", "src\JiShe.CollectBus.DbMigrator\JiShe.CollectBus.DbMigrator.csproj", "{8BA01C3D-297D-42DF-BD63-EF07202A0A67}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.FreeRedis", "modules\JiShe.CollectBus.FreeRedis\JiShe.CollectBus.FreeRedis.csproj", "{C06C4082-638F-2996-5FED-7784475766C1}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Kafka", "modules\JiShe.CollectBus.Kafka\JiShe.CollectBus.Kafka.csproj", "{F0288175-F0EC-48BD-945F-CF1512850943}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.IoTDB", "modules\JiShe.CollectBus.IoTDB\JiShe.CollectBus.IoTDB.csproj", "{A3F3C092-0A25-450B-BF6A-5983163CBEF5}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Protocol.Test", "protocols\JiShe.CollectBus.Protocol.Test\JiShe.CollectBus.Protocol.Test.csproj", "{A377955E-7EA1-6F29-8CF7-774569E93925}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.Cassandra", "modules\JiShe.CollectBus.Cassandra\JiShe.CollectBus.Cassandra.csproj", "{443B4549-0AC0-4493-8F3E-49C83225DD76}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "1.Web", "1.Web", "{A02F7D8A-04DC-44D6-94D4-3F65712D6B94}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "4.Modules", "4.Modules", "{2E0FE301-34C3-4561-9CAE-C7A9E65AEE59}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "3.Protocols", "3.Protocols", "{3C3F9DB2-EC97-4464-B49F-BF1A0C2B46DC}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "2.Services", "2.Services", "{BA4DA3E7-9AD0-47AD-A0E6-A0BB6700DA23}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "5.Shared", "5.Shared", "{EBF7C01F-9B4F-48E6-8418-2CBFDA51EB0B}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JiShe.CollectBus.FreeSql", "src\JiShe.CollectBus.FreeSql\JiShe.CollectBus.FreeSql.csproj", "{FE0457D9-4038-4A17-8808-DCAD06CFC0A0}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
@ -101,48 +83,23 @@ Global
|
||||
{FE0457D9-4038-4A17-8808-DCAD06CFC0A0}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{FE0457D9-4038-4A17-8808-DCAD06CFC0A0}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{FE0457D9-4038-4A17-8808-DCAD06CFC0A0}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{C06C4082-638F-2996-5FED-7784475766C1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{C06C4082-638F-2996-5FED-7784475766C1}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{C06C4082-638F-2996-5FED-7784475766C1}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{C06C4082-638F-2996-5FED-7784475766C1}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{F0288175-F0EC-48BD-945F-CF1512850943}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{F0288175-F0EC-48BD-945F-CF1512850943}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F0288175-F0EC-48BD-945F-CF1512850943}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F0288175-F0EC-48BD-945F-CF1512850943}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{A3F3C092-0A25-450B-BF6A-5983163CBEF5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{A3F3C092-0A25-450B-BF6A-5983163CBEF5}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{A3F3C092-0A25-450B-BF6A-5983163CBEF5}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{A3F3C092-0A25-450B-BF6A-5983163CBEF5}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{A377955E-7EA1-6F29-8CF7-774569E93925}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{A377955E-7EA1-6F29-8CF7-774569E93925}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{A377955E-7EA1-6F29-8CF7-774569E93925}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{A377955E-7EA1-6F29-8CF7-774569E93925}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{443B4549-0AC0-4493-8F3E-49C83225DD76}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{443B4549-0AC0-4493-8F3E-49C83225DD76}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{443B4549-0AC0-4493-8F3E-49C83225DD76}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{443B4549-0AC0-4493-8F3E-49C83225DD76}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
{D64C1577-4929-4B60-939E-96DE1534891A} = {EBF7C01F-9B4F-48E6-8418-2CBFDA51EB0B}
|
||||
{F2840BC7-0188-4606-9126-DADD0F5ABF7A} = {BA4DA3E7-9AD0-47AD-A0E6-A0BB6700DA23}
|
||||
{BD65D04F-08D5-40C1-8C24-77CA0BACB877} = {BA4DA3E7-9AD0-47AD-A0E6-A0BB6700DA23}
|
||||
{78040F9E-3501-4A40-82DF-00A597710F35} = {BA4DA3E7-9AD0-47AD-A0E6-A0BB6700DA23}
|
||||
{F1C58097-4C08-4D88-8976-6B3389391481} = {2E0FE301-34C3-4561-9CAE-C7A9E65AEE59}
|
||||
{077AA5F8-8B61-420C-A6B5-0150A66FDB34} = {A02F7D8A-04DC-44D6-94D4-3F65712D6B94}
|
||||
{35829A15-4127-4F69-8BDE-9405DEAACA9A} = {A02F7D8A-04DC-44D6-94D4-3F65712D6B94}
|
||||
{AD2F1928-4411-4511-B564-5FB996EC08B9} = {EBF7C01F-9B4F-48E6-8418-2CBFDA51EB0B}
|
||||
{C62EFF95-5C32-435F-BD78-6977E828F894} = {3C3F9DB2-EC97-4464-B49F-BF1A0C2B46DC}
|
||||
{38C1808B-009A-418B-B17B-AB3626341B5D} = {3C3F9DB2-EC97-4464-B49F-BF1A0C2B46DC}
|
||||
{8BA01C3D-297D-42DF-BD63-EF07202A0A67} = {BA4DA3E7-9AD0-47AD-A0E6-A0BB6700DA23}
|
||||
{FE0457D9-4038-4A17-8808-DCAD06CFC0A0} = {2E0FE301-34C3-4561-9CAE-C7A9E65AEE59}
|
||||
{C06C4082-638F-2996-5FED-7784475766C1} = {2E0FE301-34C3-4561-9CAE-C7A9E65AEE59}
|
||||
{F0288175-F0EC-48BD-945F-CF1512850943} = {2E0FE301-34C3-4561-9CAE-C7A9E65AEE59}
|
||||
{A3F3C092-0A25-450B-BF6A-5983163CBEF5} = {2E0FE301-34C3-4561-9CAE-C7A9E65AEE59}
|
||||
{A377955E-7EA1-6F29-8CF7-774569E93925} = {3C3F9DB2-EC97-4464-B49F-BF1A0C2B46DC}
|
||||
{443B4549-0AC0-4493-8F3E-49C83225DD76} = {2E0FE301-34C3-4561-9CAE-C7A9E65AEE59}
|
||||
{D64C1577-4929-4B60-939E-96DE1534891A} = {649A3FFA-182F-4E56-9717-E6A9A2BEC545}
|
||||
{F2840BC7-0188-4606-9126-DADD0F5ABF7A} = {649A3FFA-182F-4E56-9717-E6A9A2BEC545}
|
||||
{BD65D04F-08D5-40C1-8C24-77CA0BACB877} = {649A3FFA-182F-4E56-9717-E6A9A2BEC545}
|
||||
{78040F9E-3501-4A40-82DF-00A597710F35} = {649A3FFA-182F-4E56-9717-E6A9A2BEC545}
|
||||
{F1C58097-4C08-4D88-8976-6B3389391481} = {649A3FFA-182F-4E56-9717-E6A9A2BEC545}
|
||||
{077AA5F8-8B61-420C-A6B5-0150A66FDB34} = {649A3FFA-182F-4E56-9717-E6A9A2BEC545}
|
||||
{35829A15-4127-4F69-8BDE-9405DEAACA9A} = {649A3FFA-182F-4E56-9717-E6A9A2BEC545}
|
||||
{AD2F1928-4411-4511-B564-5FB996EC08B9} = {649A3FFA-182F-4E56-9717-E6A9A2BEC545}
|
||||
{C62EFF95-5C32-435F-BD78-6977E828F894} = {649A3FFA-182F-4E56-9717-E6A9A2BEC545}
|
||||
{38C1808B-009A-418B-B17B-AB3626341B5D} = {649A3FFA-182F-4E56-9717-E6A9A2BEC545}
|
||||
{8BA01C3D-297D-42DF-BD63-EF07202A0A67} = {649A3FFA-182F-4E56-9717-E6A9A2BEC545}
|
||||
{FE0457D9-4038-4A17-8808-DCAD06CFC0A0} = {649A3FFA-182F-4E56-9717-E6A9A2BEC545}
|
||||
EndGlobalSection
|
||||
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||
SolutionGuid = {4324B3B4-B60B-4E3C-91D8-59576B4E26DD}
|
||||
|
||||
@ -1,64 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Cassandra
|
||||
{
|
||||
public class CassandraConfig
|
||||
{
|
||||
public Node[] Nodes { get; set; }
|
||||
public string Username { get; set; }
|
||||
public string Password { get; set; }
|
||||
public string Keyspace { get; set; }
|
||||
public string ConsistencyLevel { get; set; }
|
||||
public Pooling PoolingOptions { get; set; }
|
||||
public Socket SocketOptions { get; set; }
|
||||
public Query QueryOptions { get; set; }
|
||||
|
||||
public ReplicationStrategy ReplicationStrategy { get; set; }
|
||||
}
|
||||
|
||||
public class Pooling
|
||||
{
|
||||
public int CoreConnectionsPerHost { get; set; }
|
||||
public int MaxConnectionsPerHost { get; set; }
|
||||
public int MaxRequestsPerConnection { get; set; }
|
||||
}
|
||||
|
||||
public class Socket
|
||||
{
|
||||
public int ConnectTimeoutMillis { get; set; }
|
||||
public int ReadTimeoutMillis { get; set; }
|
||||
}
|
||||
|
||||
public class Query
|
||||
{
|
||||
public string ConsistencyLevel { get; set; }
|
||||
public string SerialConsistencyLevel { get; set; }
|
||||
public bool DefaultIdempotence { get; set; }
|
||||
}
|
||||
|
||||
public class ReplicationStrategy
|
||||
{
|
||||
public string Class { get; set; }
|
||||
public DataCenter[] DataCenters { get; set; }
|
||||
}
|
||||
|
||||
public class DataCenter
|
||||
{
|
||||
public string Name { get; set; }
|
||||
public int ReplicationFactor { get; set; }
|
||||
public string Strategy { get; set; }
|
||||
}
|
||||
|
||||
public class Node
|
||||
{
|
||||
public string Host { get; set; }
|
||||
public int Port { get; set; }
|
||||
public string DataCenter { get; set; }
|
||||
public string Rack { get; set; }
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,154 +0,0 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
using Cassandra;
|
||||
using Cassandra.Mapping;
|
||||
using Cassandra.Data.Linq;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
using JiShe.CollectBus.Common.Attributes;
|
||||
|
||||
namespace JiShe.CollectBus.Cassandra
|
||||
{
|
||||
public class CassandraProvider : IDisposable, ICassandraProvider, ISingletonDependency
|
||||
{
|
||||
private readonly ILogger<CassandraProvider> _logger;
|
||||
|
||||
public Cluster Instance { get; set; }
|
||||
|
||||
public ISession Session { get; set; }
|
||||
|
||||
public CassandraConfig CassandraConfig { get; set; }
|
||||
/// <summary>
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="options"></param>
|
||||
/// <param name="logger"></param>
|
||||
public CassandraProvider(
|
||||
IOptions<CassandraConfig> options,
|
||||
ILogger<CassandraProvider> logger)
|
||||
{
|
||||
CassandraConfig = options.Value;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public Task InitClusterAndSessionAsync()
|
||||
{
|
||||
InitClusterAndSession();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public void InitClusterAndSession()
|
||||
{
|
||||
GetCluster((keyspace) =>
|
||||
{
|
||||
GetSession(keyspace);
|
||||
});
|
||||
}
|
||||
|
||||
public Cluster GetCluster(Action<string?>? callback=null)
|
||||
{
|
||||
var clusterBuilder = Cluster.Builder();
|
||||
|
||||
// 添加多个节点
|
||||
foreach (var node in CassandraConfig.Nodes)
|
||||
{
|
||||
clusterBuilder.AddContactPoint(node.Host)
|
||||
.WithPort(node.Port);
|
||||
}
|
||||
|
||||
clusterBuilder.WithCredentials(CassandraConfig.Username, CassandraConfig.Password);
|
||||
|
||||
// 优化连接池配置
|
||||
var poolingOptions = new PoolingOptions()
|
||||
.SetCoreConnectionsPerHost(HostDistance.Local, CassandraConfig.PoolingOptions.CoreConnectionsPerHost)
|
||||
.SetMaxConnectionsPerHost(HostDistance.Local, CassandraConfig.PoolingOptions.MaxConnectionsPerHost)
|
||||
.SetMaxRequestsPerConnection(CassandraConfig.PoolingOptions.MaxRequestsPerConnection)
|
||||
.SetHeartBeatInterval(30000); // 30秒心跳
|
||||
|
||||
clusterBuilder.WithPoolingOptions(poolingOptions);
|
||||
|
||||
// 优化Socket配置
|
||||
var socketOptions = new SocketOptions()
|
||||
.SetConnectTimeoutMillis(CassandraConfig.SocketOptions.ConnectTimeoutMillis)
|
||||
.SetReadTimeoutMillis(CassandraConfig.SocketOptions.ReadTimeoutMillis)
|
||||
.SetTcpNoDelay(true) // 启用Nagle算法
|
||||
.SetKeepAlive(true) // 启用TCP保活
|
||||
.SetReceiveBufferSize(32768) // 32KB接收缓冲区
|
||||
.SetSendBufferSize(32768); // 32KB发送缓冲区
|
||||
|
||||
clusterBuilder.WithSocketOptions(socketOptions);
|
||||
|
||||
// 优化查询选项
|
||||
var queryOptions = new QueryOptions()
|
||||
.SetConsistencyLevel((ConsistencyLevel)Enum.Parse(typeof(ConsistencyLevel), CassandraConfig.QueryOptions.ConsistencyLevel))
|
||||
.SetSerialConsistencyLevel((ConsistencyLevel)Enum.Parse(typeof(ConsistencyLevel), CassandraConfig.QueryOptions.SerialConsistencyLevel))
|
||||
.SetDefaultIdempotence(CassandraConfig.QueryOptions.DefaultIdempotence)
|
||||
.SetPageSize(5000); // 增加页面大小
|
||||
|
||||
clusterBuilder.WithQueryOptions(queryOptions);
|
||||
|
||||
// 启用压缩
|
||||
clusterBuilder.WithCompression(CompressionType.LZ4);
|
||||
|
||||
// 配置重连策略
|
||||
clusterBuilder.WithReconnectionPolicy(new ExponentialReconnectionPolicy(1000, 10 * 60 * 1000));
|
||||
Instance = clusterBuilder.Build();
|
||||
callback?.Invoke(null);
|
||||
return Instance;
|
||||
}
|
||||
|
||||
public ISession GetSession(string? keyspace = null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(keyspace))
|
||||
{
|
||||
keyspace = CassandraConfig.Keyspace;
|
||||
}
|
||||
Session = Instance.Connect();
|
||||
var replication = GetReplicationStrategy();
|
||||
Session.CreateKeyspaceIfNotExists(keyspace, replication);
|
||||
Session.ChangeKeyspace(keyspace);
|
||||
return Session;
|
||||
}
|
||||
|
||||
private Dictionary<string, string> GetReplicationStrategy()
|
||||
{
|
||||
var strategy = CassandraConfig.ReplicationStrategy.Class;
|
||||
var dataCenters = CassandraConfig.ReplicationStrategy.DataCenters;
|
||||
|
||||
switch (strategy)
|
||||
{
|
||||
case "NetworkTopologyStrategy":
|
||||
var networkDic = new Dictionary<string, string> { { "class", "NetworkTopologyStrategy" } };
|
||||
foreach (var dataCenter in dataCenters)
|
||||
{
|
||||
networkDic.Add(dataCenter.Name, dataCenter.ReplicationFactor.ToString());
|
||||
}
|
||||
return networkDic;
|
||||
case "SimpleStrategy":
|
||||
var dic = new Dictionary<string, string> { { "class", "SimpleStrategy" } };
|
||||
if (dataCenters.Length >= 1)
|
||||
{
|
||||
dic.Add("replication_factor", dataCenters[0].ReplicationFactor.ToString());
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogError("SimpleStrategy 不支持多个数据中心!");
|
||||
}
|
||||
return dic;
|
||||
default:
|
||||
throw new ArgumentNullException($"Strategy", "Strategy配置错误!");
|
||||
}
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
Instance.Dispose();
|
||||
Session.Dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,156 +0,0 @@
|
||||
using System.Collections.Concurrent;
|
||||
using Cassandra;
|
||||
using Cassandra.Mapping;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace JiShe.CollectBus.Cassandra
|
||||
{
|
||||
public class CassandraQueryOptimizer
|
||||
{
|
||||
private readonly ISession _session;
|
||||
private readonly ILogger<CassandraQueryOptimizer> _logger;
|
||||
private readonly IMemoryCache _cache;
|
||||
private readonly ConcurrentDictionary<string, PreparedStatement> _preparedStatements;
|
||||
private readonly int _batchSize;
|
||||
private readonly TimeSpan _cacheExpiration;
|
||||
|
||||
public CassandraQueryOptimizer(
|
||||
ISession session,
|
||||
ILogger<CassandraQueryOptimizer> logger,
|
||||
IMemoryCache cache,
|
||||
int batchSize = 100,
|
||||
TimeSpan? cacheExpiration = null)
|
||||
{
|
||||
_session = session;
|
||||
_logger = logger;
|
||||
_cache = cache;
|
||||
_preparedStatements = new ConcurrentDictionary<string, PreparedStatement>();
|
||||
_batchSize = batchSize;
|
||||
_cacheExpiration = cacheExpiration ?? TimeSpan.FromMinutes(5);
|
||||
}
|
||||
|
||||
public async Task<PreparedStatement> GetOrPrepareStatementAsync(string cql)
|
||||
{
|
||||
return _preparedStatements.GetOrAdd(cql, key =>
|
||||
{
|
||||
try
|
||||
{
|
||||
var statement = _session.Prepare(key);
|
||||
_logger.LogDebug($"Prepared statement for CQL: {key}");
|
||||
return statement;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, $"Failed to prepare statement for CQL: {key}");
|
||||
throw;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public async Task ExecuteBatchAsync(IEnumerable<BoundStatement> statements)
|
||||
{
|
||||
var batch = new BatchStatement();
|
||||
var count = 0;
|
||||
|
||||
foreach (var statement in statements)
|
||||
{
|
||||
batch.Add(statement);
|
||||
count++;
|
||||
|
||||
if (count >= _batchSize)
|
||||
{
|
||||
await ExecuteBatchAsync(batch);
|
||||
batch = new BatchStatement();
|
||||
count = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (count > 0)
|
||||
{
|
||||
await ExecuteBatchAsync(batch);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ExecuteBatchAsync(BatchStatement batch)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _session.ExecuteAsync(batch);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to execute batch statement");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<T> GetOrSetFromCacheAsync<T>(string cacheKey, Func<Task<T>> getData)
|
||||
{
|
||||
if (_cache.TryGetValue(cacheKey, out T cachedValue))
|
||||
{
|
||||
_logger.LogDebug($"Cache hit for key: {cacheKey}");
|
||||
return cachedValue;
|
||||
}
|
||||
|
||||
var data = await getData();
|
||||
_cache.Set(cacheKey, data, _cacheExpiration);
|
||||
_logger.LogDebug($"Cache miss for key: {cacheKey}, data cached");
|
||||
return data;
|
||||
}
|
||||
|
||||
public async Task<IEnumerable<T>> ExecutePagedQueryAsync<T>(
|
||||
string cql,
|
||||
object[] parameters,
|
||||
int pageSize = 100,
|
||||
string pagingState = null) where T : class
|
||||
{
|
||||
var statement = await GetOrPrepareStatementAsync(cql);
|
||||
var boundStatement = statement.Bind(parameters);
|
||||
|
||||
if (!string.IsNullOrEmpty(pagingState))
|
||||
{
|
||||
boundStatement.SetPagingState(Convert.FromBase64String(pagingState));
|
||||
}
|
||||
|
||||
boundStatement.SetPageSize(pageSize);
|
||||
|
||||
try
|
||||
{
|
||||
var result = await _session.ExecuteAsync(boundStatement);
|
||||
//TODO: RETURN OBJECT
|
||||
throw new NotImplementedException();
|
||||
//result.GetRows()
|
||||
//return result.Select(row => row);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, $"Failed to execute paged query: {cql}");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task BulkInsertAsync<T>(IEnumerable<T> items, string tableName)
|
||||
{
|
||||
var mapper = new Mapper(_session);
|
||||
var batch = new List<BoundStatement>();
|
||||
var cql = $"INSERT INTO {tableName} ({{0}}) VALUES ({{1}})";
|
||||
|
||||
foreach (var chunk in items.Chunk(_batchSize))
|
||||
{
|
||||
var statements = chunk.Select(item =>
|
||||
{
|
||||
var props = typeof(T).GetProperties();
|
||||
var columns = string.Join(", ", props.Select(p => p.Name));
|
||||
var values = string.Join(", ", props.Select(p => "?"));
|
||||
var statement = _session.Prepare(string.Format(cql, columns, values));
|
||||
return statement.Bind(props.Select(p => p.GetValue(item)).ToArray());
|
||||
});
|
||||
|
||||
batch.AddRange(statements);
|
||||
}
|
||||
|
||||
await ExecuteBatchAsync(batch);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,75 +0,0 @@
|
||||
using Cassandra;
|
||||
using Cassandra.Data.Linq;
|
||||
using Cassandra.Mapping;
|
||||
using JiShe.CollectBus.Cassandra.Extensions;
|
||||
using JiShe.CollectBus.Common.Attributes;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using System.Reflection;
|
||||
using Thrift.Protocol.Entities;
|
||||
using Volo.Abp.Domain.Entities;
|
||||
using Volo.Abp.Domain.Repositories;
|
||||
|
||||
namespace JiShe.CollectBus.Cassandra
|
||||
{
|
||||
public class CassandraRepository<TEntity, TKey>
|
||||
: ICassandraRepository<TEntity, TKey>
|
||||
where TEntity : class
|
||||
{
|
||||
private readonly ICassandraProvider _cassandraProvider;
|
||||
public CassandraRepository(ICassandraProvider cassandraProvider, MappingConfiguration mappingConfig)
|
||||
{
|
||||
_cassandraProvider = cassandraProvider;
|
||||
Mapper = new Mapper(cassandraProvider.Session, mappingConfig);
|
||||
cassandraProvider.Session.CreateTable<TEntity>(cassandraProvider.CassandraConfig.Keyspace);
|
||||
}
|
||||
|
||||
public readonly IMapper Mapper;
|
||||
|
||||
public virtual async Task<TEntity> GetAsync(TKey id)
|
||||
{
|
||||
return await Mapper.SingleOrDefaultAsync<TEntity>("WHERE id = ?", id);
|
||||
}
|
||||
|
||||
public virtual async Task<List<TEntity>> GetListAsync()
|
||||
{
|
||||
return (await Mapper.FetchAsync<TEntity>()).ToList();
|
||||
}
|
||||
|
||||
public virtual async Task<TEntity> InsertAsync(TEntity entity)
|
||||
{
|
||||
await Mapper.InsertAsync(entity);
|
||||
return entity;
|
||||
}
|
||||
|
||||
public virtual async Task<TEntity> UpdateAsync(TEntity entity)
|
||||
{
|
||||
await Mapper.UpdateAsync(entity);
|
||||
return entity;
|
||||
}
|
||||
|
||||
public virtual async Task DeleteAsync(TEntity entity)
|
||||
{
|
||||
await Mapper.DeleteAsync(entity);
|
||||
}
|
||||
|
||||
public virtual async Task DeleteAsync(TKey id)
|
||||
{
|
||||
await Mapper.DeleteAsync<TEntity>("WHERE id = ?", id);
|
||||
}
|
||||
|
||||
public virtual async Task<List<TEntity>> GetPagedListAsync(
|
||||
int skipCount,
|
||||
int maxResultCount,
|
||||
string sorting)
|
||||
{
|
||||
var cql = $"SELECT * FROM {typeof(TEntity).Name.ToLower()}";
|
||||
if (!string.IsNullOrWhiteSpace(sorting))
|
||||
{
|
||||
cql += $" ORDER BY {sorting}";
|
||||
}
|
||||
cql += $" LIMIT {maxResultCount} OFFSET {skipCount}";
|
||||
|
||||
return (await Mapper.FetchAsync<TEntity>(cql)).ToList();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,28 +0,0 @@
|
||||
using Cassandra;
|
||||
using Cassandra.Mapping;
|
||||
using JiShe.CollectBus.Cassandra.Mappers;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Volo.Abp;
|
||||
using Volo.Abp.Autofac;
|
||||
using Volo.Abp.Modularity;
|
||||
|
||||
namespace JiShe.CollectBus.Cassandra
|
||||
{
|
||||
[DependsOn(
|
||||
typeof(AbpAutofacModule)
|
||||
)]
|
||||
public class CollectBusCassandraModule : AbpModule
|
||||
{
|
||||
public override Task ConfigureServicesAsync(ServiceConfigurationContext context)
|
||||
{
|
||||
Configure<CassandraConfig>(context.Services.GetConfiguration().GetSection("Cassandra"));
|
||||
context.AddCassandra();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public override async Task OnApplicationInitializationAsync(ApplicationInitializationContext context)
|
||||
{
|
||||
await context.UseCassandra();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,33 +0,0 @@
|
||||
using Autofac.Core;
|
||||
using Cassandra;
|
||||
using Cassandra.Mapping;
|
||||
using JiShe.CollectBus.Cassandra;
|
||||
using JiShe.CollectBus.Cassandra.Mappers;
|
||||
using Microsoft.Extensions.Options;
|
||||
using System.Reflection;
|
||||
using Volo.Abp;
|
||||
using Volo.Abp.Modularity;
|
||||
|
||||
// ReSharper disable once CheckNamespace
|
||||
namespace Microsoft.Extensions.DependencyInjection
|
||||
{
|
||||
public static class ApplicationInitializationContextExtensions
|
||||
{
|
||||
public static async Task UseCassandra(this ApplicationInitializationContext context)
|
||||
{
|
||||
var service = context.ServiceProvider;
|
||||
var cassandraProvider = service.GetRequiredService<ICassandraProvider>();
|
||||
await cassandraProvider.InitClusterAndSessionAsync();
|
||||
}
|
||||
}
|
||||
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
public static void AddCassandra(this ServiceConfigurationContext context)
|
||||
{
|
||||
context.Services.AddTransient(typeof(ICassandraRepository<,>), typeof(CassandraRepository<,>));
|
||||
context.Services.AddSingleton(new MappingConfiguration()
|
||||
.Define(new CollectBusMapping()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,88 +0,0 @@
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
using Cassandra;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using JiShe.CollectBus.Common.Attributes;
|
||||
using Cassandra.Mapping;
|
||||
using Cassandra.Data.Linq;
|
||||
using Thrift.Protocol.Entities;
|
||||
|
||||
namespace JiShe.CollectBus.Cassandra.Extensions
|
||||
{
|
||||
public static class SessionExtension
|
||||
{
|
||||
public static void CreateTable<TEntity>(this ISession session,string? defaultKeyspace=null) where TEntity : class
|
||||
{
|
||||
var type = typeof(TEntity);
|
||||
var tableAttribute = type.GetCustomAttribute<CassandraTableAttribute>();
|
||||
var tableName = tableAttribute?.Name ?? type.Name.ToLower();
|
||||
//var tableKeyspace = tableAttribute?.Keyspace ?? defaultKeyspace;
|
||||
var tableKeyspace = session.Keyspace;
|
||||
|
||||
var properties = type.GetProperties();
|
||||
var primaryKey = properties.FirstOrDefault(p => p.GetCustomAttribute<KeyAttribute>() != null);
|
||||
|
||||
if (primaryKey == null)
|
||||
{
|
||||
throw new InvalidOperationException($"No primary key defined for type {type.Name}");
|
||||
}
|
||||
|
||||
var cql = new StringBuilder();
|
||||
cql.Append($"CREATE TABLE IF NOT EXISTS {tableKeyspace}.{tableName} (");
|
||||
|
||||
foreach (var prop in properties)
|
||||
{
|
||||
var ignoreAttribute = prop.GetCustomAttribute<CassandraIgnoreAttribute>();
|
||||
if (ignoreAttribute != null) continue;
|
||||
var columnName = prop.Name.ToLower();
|
||||
var cqlType = GetCassandraType(prop.PropertyType);
|
||||
|
||||
cql.Append($"{columnName} {cqlType}, ");
|
||||
}
|
||||
cql.Length -= 2; // Remove last comma and space
|
||||
cql.Append($", PRIMARY KEY ({primaryKey.Name.ToLower()}))");
|
||||
|
||||
session.Execute(cql.ToString());
|
||||
}
|
||||
|
||||
private static string GetCassandraType(Type type)
|
||||
{
|
||||
// 基础类型处理
|
||||
switch (Type.GetTypeCode(type))
|
||||
{
|
||||
case TypeCode.String: return "text";
|
||||
case TypeCode.Int32: return "int";
|
||||
case TypeCode.Int64: return "bigint";
|
||||
case TypeCode.Boolean: return "boolean";
|
||||
case TypeCode.DateTime: return "timestamp";
|
||||
case TypeCode.Byte: return "tinyint";
|
||||
}
|
||||
|
||||
if (type == typeof(Guid)) return "uuid";
|
||||
if (type == typeof(DateTimeOffset)) return "timestamp";
|
||||
if (type == typeof(Byte[])) return "blob";
|
||||
|
||||
// 处理集合类型
|
||||
if (type.IsGenericType)
|
||||
{
|
||||
var genericType = type.GetGenericTypeDefinition();
|
||||
var elementType = type.GetGenericArguments()[0];
|
||||
|
||||
if (genericType == typeof(List<>))
|
||||
return $"list<{GetCassandraType(elementType)}>";
|
||||
if (genericType == typeof(HashSet<>))
|
||||
return $"set<{GetCassandraType(elementType)}>";
|
||||
if (genericType == typeof(Dictionary<,>))
|
||||
{
|
||||
var keyType = type.GetGenericArguments()[0];
|
||||
var valueType = type.GetGenericArguments()[1];
|
||||
return $"map<{GetCassandraType(keyType)}, {GetCassandraType(valueType)}>";
|
||||
}
|
||||
}
|
||||
|
||||
throw new NotSupportedException($"不支持的类型: {type.Name}");
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
@ -1,26 +0,0 @@
|
||||
using Cassandra;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Cassandra
|
||||
{
|
||||
public interface ICassandraProvider
|
||||
{
|
||||
Cluster Instance { get;}
|
||||
|
||||
ISession Session { get;}
|
||||
|
||||
CassandraConfig CassandraConfig { get; }
|
||||
|
||||
ISession GetSession(string? keyspace = null);
|
||||
|
||||
Cluster GetCluster(Action<string?>? callback = null);
|
||||
|
||||
void InitClusterAndSession();
|
||||
|
||||
Task InitClusterAndSessionAsync();
|
||||
}
|
||||
}
|
||||
@ -1,20 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using Volo.Abp.Domain.Entities;
|
||||
|
||||
namespace JiShe.CollectBus.Cassandra
|
||||
{
|
||||
public interface ICassandraRepository<TEntity, TKey> where TEntity : class
|
||||
{
|
||||
Task<TEntity> GetAsync(TKey id);
|
||||
Task<List<TEntity>> GetListAsync();
|
||||
Task<TEntity> InsertAsync(TEntity entity);
|
||||
Task<TEntity> UpdateAsync(TEntity entity);
|
||||
Task DeleteAsync(TEntity entity);
|
||||
Task DeleteAsync(TKey id);
|
||||
Task<List<TEntity>> GetPagedListAsync(int skipCount, int maxResultCount, string sorting);
|
||||
}
|
||||
}
|
||||
@ -1,22 +0,0 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="CassandraCSharpDriver" Version="3.22.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.2" />
|
||||
<PackageReference Include="Volo.Abp.Autofac" Version="8.3.3" />
|
||||
<PackageReference Include="Volo.Abp.Core" Version="8.3.3" />
|
||||
<PackageReference Include="Volo.Abp.Ddd.Domain" Version="8.3.3" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\services\JiShe.CollectBus.Domain\JiShe.CollectBus.Domain.csproj" />
|
||||
<ProjectReference Include="..\..\shared\JiShe.CollectBus.Common\JiShe.CollectBus.Common.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@ -1,20 +0,0 @@
|
||||
using Cassandra.Mapping;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
||||
using static Cassandra.QueryTrace;
|
||||
|
||||
namespace JiShe.CollectBus.Cassandra.Mappers
|
||||
{
|
||||
public class CollectBusMapping: Mappings
|
||||
{
|
||||
public CollectBusMapping()
|
||||
{
|
||||
For<MessageIssued>()
|
||||
.Column(e => e.Type, cm => cm.WithName("type").WithDbType<int>());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,25 +0,0 @@
|
||||
using JiShe.CollectBus.FreeRedis.Options;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Volo.Abp.Modularity;
|
||||
|
||||
namespace JiShe.CollectBus.FreeRedis
|
||||
{
|
||||
public class CollectBusFreeRedisModule : AbpModule
|
||||
{
|
||||
public override void ConfigureServices(ServiceConfigurationContext context)
|
||||
{
|
||||
var configuration = context.Services.GetConfiguration();
|
||||
|
||||
Configure<FreeRedisOptions>(options =>
|
||||
{
|
||||
configuration.GetSection("Redis").Bind(options);
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -1,502 +0,0 @@
|
||||
using System.Diagnostics;
|
||||
using FreeRedis;
|
||||
using JiShe.CollectBus.Common.Helpers;
|
||||
using JiShe.CollectBus.FreeRedis.Options;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
|
||||
namespace JiShe.CollectBus.FreeRedis
|
||||
{
|
||||
|
||||
public class FreeRedisProvider : IFreeRedisProvider, ISingletonDependency
|
||||
{
|
||||
|
||||
private readonly FreeRedisOptions _option;
|
||||
|
||||
/// <summary>
|
||||
/// FreeRedis
|
||||
/// </summary>
|
||||
public FreeRedisProvider(IOptions<FreeRedisOptions> options)
|
||||
{
|
||||
_option = options.Value;
|
||||
GetInstance();
|
||||
}
|
||||
|
||||
public RedisClient Instance { get; set; } = new(string.Empty);
|
||||
|
||||
/// <summary>
|
||||
/// 获取 FreeRedis 客户端
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public IRedisClient GetInstance()
|
||||
{
|
||||
|
||||
var connectionString = $"{_option.Configuration},defaultdatabase={_option.DefaultDB},MaxPoolSize={_option.MaxPoolSize}";
|
||||
Instance = new RedisClient(connectionString);
|
||||
Instance.Serialize = obj => BusJsonSerializer.Serialize(obj);
|
||||
Instance.Deserialize = (json, type) => BusJsonSerializer.Deserialize(json, type);
|
||||
Instance.Notice += (s, e) => Trace.WriteLine(e.Log);
|
||||
return Instance;
|
||||
}
|
||||
|
||||
///// <summary>
|
||||
///// 单个添加数据
|
||||
///// </summary>
|
||||
///// <typeparam name="T"></typeparam>
|
||||
///// <param name="redisCacheKey">主数据存储Hash缓存Key</param>
|
||||
///// <param name="redisCacheFocusIndexKey">集中器索引Set缓存Key</param>
|
||||
///// <param name="redisCacheScoresIndexKey">集中器排序索引ZSET缓存Key</param>
|
||||
///// <param name="redisCacheGlobalIndexKey">集中器采集频率分组全局索引ZSet缓存Key</param>
|
||||
///// <param name="data">表计信息</param>
|
||||
///// <param name="timestamp">可选时间戳</param>
|
||||
///// <returns></returns>
|
||||
//public async Task AddMeterCacheData<T>(
|
||||
//string redisCacheKey,
|
||||
//string redisCacheFocusIndexKey,
|
||||
//string redisCacheScoresIndexKey,
|
||||
//string redisCacheGlobalIndexKey,
|
||||
//T data,
|
||||
//DateTimeOffset? timestamp = null) where T : DeviceCacheBasicModel
|
||||
//{
|
||||
// // 参数校验增强
|
||||
// if (data == null || string.IsNullOrWhiteSpace(redisCacheKey)
|
||||
// || string.IsNullOrWhiteSpace(redisCacheFocusIndexKey)
|
||||
// || string.IsNullOrWhiteSpace(redisCacheScoresIndexKey)
|
||||
// || string.IsNullOrWhiteSpace(redisCacheGlobalIndexKey))
|
||||
// {
|
||||
// throw new ArgumentException($"{nameof(AddMeterCacheData)} 参数异常,-101");
|
||||
// }
|
||||
|
||||
// // 计算组合score(分类ID + 时间戳)
|
||||
// var actualTimestamp = timestamp ?? DateTimeOffset.UtcNow;
|
||||
|
||||
// long scoreValue = ((long)data.FocusId << 32) | (uint)actualTimestamp.Ticks;
|
||||
|
||||
// //全局索引写入
|
||||
// long globalScore = actualTimestamp.ToUnixTimeMilliseconds();
|
||||
|
||||
// // 使用事务保证原子性
|
||||
// using (var trans = Instance.Multi())
|
||||
// {
|
||||
// // 主数据存储Hash
|
||||
// trans.HSet(redisCacheKey, data.MemberID, data.Serialize());
|
||||
|
||||
// // 分类索引
|
||||
// trans.SAdd(redisCacheFocusIndexKey, data.MemberID);
|
||||
|
||||
// // 排序索引使用ZSET
|
||||
// trans.ZAdd(redisCacheScoresIndexKey, scoreValue, data.MemberID);
|
||||
|
||||
// //全局索引
|
||||
// trans.ZAdd(redisCacheGlobalIndexKey, globalScore, data.MemberID);
|
||||
|
||||
// var results = trans.Exec();
|
||||
|
||||
// if (results == null || results.Length <= 0)
|
||||
// throw new Exception($"{nameof(AddMeterCacheData)} 事务提交失败,-102");
|
||||
// }
|
||||
|
||||
// await Task.CompletedTask;
|
||||
//}
|
||||
|
||||
///// <summary>
|
||||
///// 批量添加数据
|
||||
///// </summary>
|
||||
///// <typeparam name="T"></typeparam>
|
||||
///// <param name="redisCacheKey">主数据存储Hash缓存Key</param>
|
||||
///// <param name="redisCacheFocusIndexKey">集中器索引Set缓存Key</param>
|
||||
///// <param name="redisCacheScoresIndexKey">集中器排序索引ZSET缓存Key</param>
|
||||
///// <param name="redisCacheGlobalIndexKey">集中器采集频率分组全局索引ZSet缓存Key</param>
|
||||
///// <param name="items">数据集合</param>
|
||||
///// <param name="timestamp">可选时间戳</param>
|
||||
///// <returns></returns>
|
||||
//public async Task BatchAddMeterData<T>(
|
||||
//string redisCacheKey,
|
||||
//string redisCacheFocusIndexKey,
|
||||
//string redisCacheScoresIndexKey,
|
||||
//string redisCacheGlobalIndexKey,
|
||||
//IEnumerable<T> items,
|
||||
//DateTimeOffset? timestamp = null) where T : DeviceCacheBasicModel
|
||||
//{
|
||||
// if (items == null
|
||||
// || items.Count() <=0
|
||||
// || string.IsNullOrWhiteSpace(redisCacheKey)
|
||||
// || string.IsNullOrWhiteSpace(redisCacheFocusIndexKey)
|
||||
// || string.IsNullOrWhiteSpace(redisCacheScoresIndexKey)
|
||||
// || string.IsNullOrWhiteSpace(redisCacheGlobalIndexKey))
|
||||
// {
|
||||
// throw new ArgumentException($"{nameof(BatchAddMeterData)} 参数异常,-101");
|
||||
// }
|
||||
|
||||
// const int BATCH_SIZE = 1000; // 每批1000条
|
||||
// var semaphore = new SemaphoreSlim(Environment.ProcessorCount * 2);
|
||||
|
||||
// foreach (var batch in items.Batch(BATCH_SIZE))
|
||||
// {
|
||||
// await semaphore.WaitAsync();
|
||||
|
||||
// _ = Task.Run(() =>
|
||||
// {
|
||||
// using (var pipe = Instance.StartPipe())
|
||||
// {
|
||||
// foreach (var item in batch)
|
||||
// {
|
||||
// // 计算组合score(分类ID + 时间戳)
|
||||
// var actualTimestamp = timestamp ?? DateTimeOffset.UtcNow;
|
||||
|
||||
// long scoreValue = ((long)item.FocusId << 32) | (uint)actualTimestamp.Ticks;
|
||||
|
||||
// //全局索引写入
|
||||
// long globalScore = actualTimestamp.ToUnixTimeMilliseconds();
|
||||
|
||||
// // 主数据存储Hash
|
||||
// pipe.HSet(redisCacheKey, item.MemberID, item.Serialize());
|
||||
|
||||
// // 分类索引Set
|
||||
// pipe.SAdd(redisCacheFocusIndexKey, item.MemberID);
|
||||
|
||||
// // 排序索引使用ZSET
|
||||
// pipe.ZAdd(redisCacheScoresIndexKey, scoreValue, item.MemberID);
|
||||
|
||||
// //全局索引
|
||||
// pipe.ZAdd(redisCacheGlobalIndexKey, globalScore, item.MemberID);
|
||||
// }
|
||||
// pipe.EndPipe();
|
||||
// }
|
||||
// semaphore.Release();
|
||||
// });
|
||||
// }
|
||||
|
||||
// await Task.CompletedTask;
|
||||
//}
|
||||
|
||||
///// <summary>
|
||||
///// 删除指定redis缓存key的缓存数据
|
||||
///// </summary>
|
||||
///// <typeparam name="T"></typeparam>
|
||||
///// <param name="redisCacheKey">主数据存储Hash缓存Key</param>
|
||||
///// <param name="redisCacheFocusIndexKey">集中器索引Set缓存Key</param>
|
||||
///// <param name="redisCacheScoresIndexKey">集中器排序索引ZSET缓存Key</param>
|
||||
///// <param name="redisCacheGlobalIndexKey">集中器采集频率分组全局索引ZSet缓存Key</param>
|
||||
///// <param name="data">表计信息</param>
|
||||
///// <returns></returns>
|
||||
//public async Task RemoveMeterData<T>(
|
||||
//string redisCacheKey,
|
||||
//string redisCacheFocusIndexKey,
|
||||
//string redisCacheScoresIndexKey,
|
||||
//string redisCacheGlobalIndexKey,
|
||||
//T data) where T : DeviceCacheBasicModel
|
||||
//{
|
||||
|
||||
// if (data == null
|
||||
// || string.IsNullOrWhiteSpace(redisCacheKey)
|
||||
// || string.IsNullOrWhiteSpace(redisCacheFocusIndexKey)
|
||||
// || string.IsNullOrWhiteSpace(redisCacheScoresIndexKey)
|
||||
// || string.IsNullOrWhiteSpace(redisCacheGlobalIndexKey))
|
||||
// {
|
||||
// throw new ArgumentException($"{nameof(RemoveMeterData)} 参数异常,-101");
|
||||
// }
|
||||
|
||||
// const string luaScript = @"
|
||||
// local mainKey = KEYS[1]
|
||||
// local focusIndexKey = KEYS[2]
|
||||
// local scoresIndexKey = KEYS[3]
|
||||
// local globalIndexKey = KEYS[4]
|
||||
// local member = ARGV[1]
|
||||
|
||||
// local deleted = 0
|
||||
// if redis.call('HDEL', mainKey, member) > 0 then
|
||||
// deleted = 1
|
||||
// end
|
||||
|
||||
// redis.call('SREM', focusIndexKey, member)
|
||||
// redis.call('ZREM', scoresIndexKey, member)
|
||||
// redis.call('ZREM', globalIndexKey, member)
|
||||
// return deleted
|
||||
// ";
|
||||
|
||||
// var keys = new[]
|
||||
// {
|
||||
// redisCacheKey,
|
||||
// redisCacheFocusIndexKey,
|
||||
// redisCacheScoresIndexKey,
|
||||
// redisCacheGlobalIndexKey
|
||||
// };
|
||||
|
||||
// var result = await Instance.EvalAsync(luaScript, keys, new[] { data.MemberID });
|
||||
|
||||
// if ((int)result == 0)
|
||||
// throw new KeyNotFoundException("指定数据不存在");
|
||||
//}
|
||||
|
||||
///// <summary>
|
||||
///// 修改表计缓存信息
|
||||
///// </summary>
|
||||
///// <typeparam name="T"></typeparam>
|
||||
///// <param name="redisCacheKey">主数据存储Hash缓存Key</param>
|
||||
///// <param name="oldRedisCacheFocusIndexKey">旧集中器索引Set缓存Key</param>
|
||||
///// <param name="newRedisCacheFocusIndexKey">新集中器索引Set缓存Key</param>
|
||||
///// <param name="redisCacheScoresIndexKey">集中器排序索引ZSET缓存Key</param>
|
||||
///// <param name="redisCacheGlobalIndexKey">集中器采集频率分组全局索引ZSet缓存Key</param>
|
||||
///// <param name="newData">表计信息</param>
|
||||
///// <param name="newTimestamp">可选时间戳</param>
|
||||
///// <returns></returns>
|
||||
//public async Task UpdateMeterData<T>(
|
||||
//string redisCacheKey,
|
||||
//string oldRedisCacheFocusIndexKey,
|
||||
//string newRedisCacheFocusIndexKey,
|
||||
//string redisCacheScoresIndexKey,
|
||||
//string redisCacheGlobalIndexKey,
|
||||
//T newData,
|
||||
//DateTimeOffset? newTimestamp = null) where T : DeviceCacheBasicModel
|
||||
//{
|
||||
// if (newData == null
|
||||
// || string.IsNullOrWhiteSpace(redisCacheKey)
|
||||
// || string.IsNullOrWhiteSpace(oldRedisCacheFocusIndexKey)
|
||||
// || string.IsNullOrWhiteSpace(newRedisCacheFocusIndexKey)
|
||||
// || string.IsNullOrWhiteSpace(redisCacheScoresIndexKey)
|
||||
// || string.IsNullOrWhiteSpace(redisCacheGlobalIndexKey))
|
||||
// {
|
||||
// throw new ArgumentException($"{nameof(UpdateMeterData)} 参数异常,-101");
|
||||
// }
|
||||
|
||||
// var luaScript = @"
|
||||
// local mainKey = KEYS[1]
|
||||
// local oldFocusIndexKey = KEYS[2]
|
||||
// local newFocusIndexKey = KEYS[3]
|
||||
// local scoresIndexKey = KEYS[4]
|
||||
// local globalIndexKey = KEYS[5]
|
||||
// local member = ARGV[1]
|
||||
// local newData = ARGV[2]
|
||||
// local newScore = ARGV[3]
|
||||
// local newGlobalScore = ARGV[4]
|
||||
|
||||
// -- 校验存在性
|
||||
// if redis.call('HEXISTS', mainKey, member) == 0 then
|
||||
// return 0
|
||||
// end
|
||||
|
||||
// -- 更新主数据
|
||||
// redis.call('HSET', mainKey, member, newData)
|
||||
|
||||
// -- 处理变更
|
||||
// if newScore ~= '' then
|
||||
// -- 删除旧索引
|
||||
// redis.call('SREM', oldFocusIndexKey, member)
|
||||
// redis.call('ZREM', scoresIndexKey, member)
|
||||
|
||||
// -- 添加新索引
|
||||
// redis.call('SADD', newFocusIndexKey, member)
|
||||
// redis.call('ZADD', scoresIndexKey, newScore, member)
|
||||
// end
|
||||
|
||||
// -- 更新全局索引
|
||||
// if newGlobalScore ~= '' then
|
||||
// -- 删除旧索引
|
||||
// redis.call('ZREM', globalIndexKey, member)
|
||||
|
||||
// -- 添加新索引
|
||||
// redis.call('ZADD', globalIndexKey, newGlobalScore, member)
|
||||
// end
|
||||
|
||||
// return 1
|
||||
// ";
|
||||
|
||||
// var actualTimestamp = newTimestamp ?? DateTimeOffset.UtcNow;
|
||||
// var newGlobalScore = actualTimestamp.ToUnixTimeMilliseconds();
|
||||
// var newScoreValue = ((long)newData.FocusId << 32) | (uint)actualTimestamp.Ticks;
|
||||
|
||||
// var result = await Instance.EvalAsync(luaScript,
|
||||
// new[]
|
||||
// {
|
||||
// redisCacheKey,
|
||||
// oldRedisCacheFocusIndexKey,
|
||||
// newRedisCacheFocusIndexKey,
|
||||
// redisCacheScoresIndexKey,
|
||||
// redisCacheGlobalIndexKey
|
||||
// },
|
||||
// new object[]
|
||||
// {
|
||||
// newData.MemberID,
|
||||
// newData.Serialize(),
|
||||
// newScoreValue.ToString() ?? "",
|
||||
// newGlobalScore.ToString() ?? ""
|
||||
// });
|
||||
|
||||
// if ((int)result == 0)
|
||||
// {
|
||||
// throw new KeyNotFoundException($"{nameof(UpdateMeterData)}指定Key{redisCacheKey}的数据不存在");
|
||||
// }
|
||||
//}
|
||||
|
||||
//public async Task<BusPagedResult<T>> SingleGetMeterPagedData<T>(
|
||||
//string redisCacheKey,
|
||||
//string redisCacheScoresIndexKey,
|
||||
//int focusId,
|
||||
//int pageSize = 10,
|
||||
//int pageIndex = 1,
|
||||
//bool descending = true)
|
||||
//{
|
||||
// // 计算score范围
|
||||
// long minScore = (long)focusId << 32;
|
||||
// long maxScore = ((long)focusId + 1) << 32;
|
||||
|
||||
// // 分页参数计算
|
||||
// int start = (pageIndex - 1) * pageSize;
|
||||
|
||||
// // 获取排序后的member列表
|
||||
// var members = descending
|
||||
// ? await Instance.ZRevRangeByScoreAsync(
|
||||
// redisCacheScoresIndexKey,
|
||||
// maxScore,
|
||||
// minScore,
|
||||
// start,
|
||||
// pageSize)
|
||||
// : await Instance.ZRangeByScoreAsync(
|
||||
// redisCacheScoresIndexKey,
|
||||
// minScore,
|
||||
// maxScore,
|
||||
// start,
|
||||
// pageSize);
|
||||
|
||||
// // 批量获取实际数据
|
||||
// var dataTasks = members.Select(m =>
|
||||
// Instance.HGetAsync<T>(redisCacheKey, m)).ToArray();
|
||||
// await Task.WhenAll(dataTasks);
|
||||
|
||||
// // 总数统计优化
|
||||
// var total = await Instance.ZCountAsync(
|
||||
// redisCacheScoresIndexKey,
|
||||
// minScore,
|
||||
// maxScore);
|
||||
|
||||
// return new BusPagedResult<T>
|
||||
// {
|
||||
// Items = dataTasks.Select(t => t.Result).ToList(),
|
||||
// TotalCount = total,
|
||||
// PageIndex = pageIndex,
|
||||
// PageSize = pageSize
|
||||
// };
|
||||
//}
|
||||
|
||||
|
||||
//public async Task<BusPagedResult<T>> GetFocusPagedData<T>(
|
||||
//string redisCacheKey,
|
||||
//string redisCacheScoresIndexKey,
|
||||
//int focusId,
|
||||
//int pageSize = 10,
|
||||
//long? lastScore = null,
|
||||
//string lastMember = null,
|
||||
//bool descending = true) where T : DeviceCacheBasicModel
|
||||
//{
|
||||
// // 计算分数范围
|
||||
// long minScore = (long)focusId << 32;
|
||||
// long maxScore = ((long)focusId + 1) << 32;
|
||||
|
||||
// // 获取成员列表
|
||||
// var members = await GetSortedMembers(
|
||||
// redisCacheScoresIndexKey,
|
||||
// minScore,
|
||||
// maxScore,
|
||||
// pageSize,
|
||||
// lastScore,
|
||||
// lastMember,
|
||||
// descending);
|
||||
|
||||
// // 批量获取数据
|
||||
// var dataDict = await Instance.HMGetAsync<T>(redisCacheKey, members.CurrentItems);
|
||||
|
||||
// return new BusPagedResult<T>
|
||||
// {
|
||||
// Items = dataDict,
|
||||
// TotalCount = await GetTotalCount(redisCacheScoresIndexKey, minScore, maxScore),
|
||||
// HasNext = members.HasNext,
|
||||
// NextScore = members.NextScore,
|
||||
// NextMember = members.NextMember
|
||||
// };
|
||||
//}
|
||||
|
||||
//private async Task<(string[] CurrentItems, bool HasNext, decimal? NextScore, string NextMember)>
|
||||
// GetSortedMembers(
|
||||
// string zsetKey,
|
||||
// long minScore,
|
||||
// long maxScore,
|
||||
// int pageSize,
|
||||
// long? lastScore,
|
||||
// string lastMember,
|
||||
// bool descending)
|
||||
//{
|
||||
// var querySize = pageSize + 1;
|
||||
// var (startScore, exclude) = descending
|
||||
// ? (lastScore ?? maxScore, lastMember)
|
||||
// : (lastScore ?? minScore, lastMember);
|
||||
|
||||
// var members = descending
|
||||
// ? await Instance.ZRevRangeByScoreAsync(
|
||||
// zsetKey,
|
||||
// max: startScore,
|
||||
// min: minScore,
|
||||
// offset: 0,
|
||||
// count: querySize)
|
||||
// : await Instance.ZRangeByScoreAsync(
|
||||
// zsetKey,
|
||||
// min: startScore,
|
||||
// max: maxScore,
|
||||
// offset: 0,
|
||||
// count: querySize);
|
||||
|
||||
// var hasNext = members.Length > pageSize;
|
||||
// var currentItems = members.Take(pageSize).ToArray();
|
||||
|
||||
// var nextCursor = currentItems.Any()
|
||||
// ? await GetNextCursor(zsetKey, currentItems.Last(), descending)
|
||||
// : (null, null);
|
||||
|
||||
// return (currentItems, hasNext, nextCursor.score, nextCursor.member);
|
||||
//}
|
||||
|
||||
//private async Task<long> GetTotalCount(string zsetKey, long min, long max)
|
||||
//{
|
||||
// // 缓存计数优化
|
||||
// var cacheKey = $"{zsetKey}_count_{min}_{max}";
|
||||
// var cached = await Instance.GetAsync<long?>(cacheKey);
|
||||
|
||||
// if (cached.HasValue)
|
||||
// return cached.Value;
|
||||
|
||||
// var count = await Instance.ZCountAsync(zsetKey, min, max);
|
||||
// await Instance.SetExAsync(cacheKey, 60, count); // 缓存60秒
|
||||
// return count;
|
||||
//}
|
||||
|
||||
|
||||
//public async Task<Dictionary<int, BusPagedResult<T>>> BatchGetMeterPagedData<T>(
|
||||
//string redisCacheKey,
|
||||
//string redisCacheScoresIndexKey,
|
||||
//IEnumerable<int> focusIds,
|
||||
//int pageSizePerFocus = 10) where T : DeviceCacheBasicModel
|
||||
//{
|
||||
// var results = new ConcurrentDictionary<int, BusPagedResult<T>>();
|
||||
// var parallelOptions = new ParallelOptions
|
||||
// {
|
||||
// MaxDegreeOfParallelism = Environment.ProcessorCount * 2
|
||||
// };
|
||||
|
||||
// await Parallel.ForEachAsync(focusIds, parallelOptions, async (focusId, _) =>
|
||||
// {
|
||||
// var data = await SingleGetMeterPagedData<T>(
|
||||
// redisCacheKey,
|
||||
// redisCacheScoresIndexKey,
|
||||
// focusId,
|
||||
// pageSizePerFocus);
|
||||
|
||||
// results.TryAdd(focusId, data);
|
||||
// });
|
||||
|
||||
// return new Dictionary<int, BusPagedResult<T>>(results);
|
||||
//}
|
||||
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
@ -1,14 +0,0 @@
|
||||
using FreeRedis;
|
||||
|
||||
namespace JiShe.CollectBus.FreeRedis
|
||||
{
|
||||
public interface IFreeRedisProvider
|
||||
{
|
||||
/// <summary>
|
||||
/// 获取客户端
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
RedisClient Instance { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,15 +0,0 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FreeRedis" Version="1.3.6" />
|
||||
<PackageReference Include="Volo.Abp" Version="8.3.3" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\shared\JiShe.CollectBus.Common\JiShe.CollectBus.Common.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@ -1,26 +0,0 @@
|
||||
namespace JiShe.CollectBus.FreeRedis.Options
|
||||
{
|
||||
public class FreeRedisOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// 连接字符串
|
||||
/// </summary>
|
||||
public string? Configuration { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 最大连接数
|
||||
/// </summary>
|
||||
public string? MaxPoolSize { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 默认数据库
|
||||
/// </summary>
|
||||
public string? DefaultDB { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// HangfireDB
|
||||
/// </summary>
|
||||
public string? HangfireDB { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,10 +0,0 @@
|
||||
namespace JiShe.CollectBus.IoTDB.Attribute
|
||||
{
|
||||
/// <summary>
|
||||
/// Column分类标记特性(ATTRIBUTE字段),也就是属性字段
|
||||
/// </summary>
|
||||
[AttributeUsage(AttributeTargets.Property)]
|
||||
public class ATTRIBUTEColumnAttribute : System.Attribute
|
||||
{
|
||||
}
|
||||
}
|
||||
@ -1,10 +0,0 @@
|
||||
namespace JiShe.CollectBus.IoTDB.Attribute
|
||||
{
|
||||
/// <summary>
|
||||
/// Column分类标记特性(FIELD字段),数据列字段
|
||||
/// </summary>
|
||||
[AttributeUsage(AttributeTargets.Property)]
|
||||
public class FIELDColumnAttribute : System.Attribute
|
||||
{
|
||||
}
|
||||
}
|
||||
@ -1,16 +0,0 @@
|
||||
namespace JiShe.CollectBus.IoTDB.Attribute
|
||||
{
|
||||
/// <summary>
|
||||
/// 用于标识当前实体为单侧点模式,单侧点模式只有一个Filed标识字段,类型是Tuple<string,object>,Item1=>测点名称,Item2=>测点值,泛型
|
||||
/// </summary>
|
||||
[AttributeUsage(AttributeTargets.Property)]
|
||||
public class SingleMeasuringAttribute : System.Attribute
|
||||
{
|
||||
public string FieldName { get; set;}
|
||||
|
||||
public SingleMeasuringAttribute(string fieldName)
|
||||
{
|
||||
FieldName = fieldName;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,10 +0,0 @@
|
||||
namespace JiShe.CollectBus.IoTDB.Attribute
|
||||
{
|
||||
/// <summary>
|
||||
/// Column分类标记特性(TAG字段),标签字段
|
||||
/// </summary>
|
||||
[AttributeUsage(AttributeTargets.Property)]
|
||||
public class TAGColumnAttribute : System.Attribute
|
||||
{
|
||||
}
|
||||
}
|
||||
@ -1,33 +0,0 @@
|
||||
using JiShe.CollectBus.IoTDB.Context;
|
||||
using JiShe.CollectBus.IoTDB.Interface;
|
||||
using JiShe.CollectBus.IoTDB.Options;
|
||||
using JiShe.CollectBus.IoTDB.Provider;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Volo.Abp.Modularity;
|
||||
|
||||
namespace JiShe.CollectBus.IoTDB
|
||||
{
|
||||
public class CollectBusIoTDBModule : AbpModule
|
||||
{
|
||||
public override void ConfigureServices(ServiceConfigurationContext context)
|
||||
{
|
||||
|
||||
var configuration = context.Services.GetConfiguration();
|
||||
Configure<IoTDBOptions>(options =>
|
||||
{
|
||||
configuration.GetSection(nameof(IoTDBOptions)).Bind(options);
|
||||
});
|
||||
|
||||
// 注册上下文为Scoped
|
||||
context.Services.AddScoped<IoTDBRuntimeContext>();
|
||||
|
||||
// 注册Session工厂
|
||||
context.Services.AddSingleton<IIoTDBSessionFactory, IoTDBSessionFactory>();
|
||||
|
||||
// 注册Provider
|
||||
context.Services.AddScoped<IIoTDBProvider, IoTDBProvider>();
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,32 +0,0 @@
|
||||
using JiShe.CollectBus.IoTDB.Options;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace JiShe.CollectBus.IoTDB.Context
|
||||
{
|
||||
/// <summary>
|
||||
/// IoTDB SessionPool 运行时上下文
|
||||
/// </summary>
|
||||
public class IoTDBRuntimeContext
|
||||
{
|
||||
private readonly bool _defaultValue;
|
||||
|
||||
public IoTDBRuntimeContext(IOptions<IoTDBOptions> options)
|
||||
{
|
||||
_defaultValue = options.Value.UseTableSessionPoolByDefault;
|
||||
UseTableSessionPool = _defaultValue;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 是否使用表模型存储, 默认false,使用tree模型存储
|
||||
/// </summary>
|
||||
public bool UseTableSessionPool { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 重置为默认值
|
||||
/// </summary>
|
||||
public void ResetToDefault()
|
||||
{
|
||||
UseTableSessionPool = _defaultValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,51 +0,0 @@
|
||||
using JiShe.CollectBus.Common.Models;
|
||||
using JiShe.CollectBus.IoTDB.Options;
|
||||
using JiShe.CollectBus.IoTDB.Provider;
|
||||
|
||||
namespace JiShe.CollectBus.IoTDB.Interface
|
||||
{
|
||||
/// <summary>
|
||||
/// IoTDB数据源,数据库能同时存多个时序模型,但数据是完全隔离的,不能跨时序模型查询,通过连接字符串配置
|
||||
/// </summary>
|
||||
public interface IIoTDBProvider
|
||||
{
|
||||
///// <summary>
|
||||
///// 切换 SessionPool
|
||||
///// </summary>
|
||||
///// <param name="useTableSession">是否使用表模型</param>
|
||||
//void SwitchSessionPool(bool useTableSession);
|
||||
|
||||
/// <summary>
|
||||
/// 插入数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="entity"></param>
|
||||
/// <returns></returns>
|
||||
Task InsertAsync<T>(T entity) where T : IoTEntity;
|
||||
|
||||
/// <summary>
|
||||
/// 批量插入数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="entities"></param>
|
||||
/// <returns></returns>
|
||||
Task BatchInsertAsync<T>(IEnumerable<T> entities) where T : IoTEntity;
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 删除数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="options"></param>
|
||||
/// <returns></returns>
|
||||
Task<object> DeleteAsync<T>(QueryOptions options) where T : IoTEntity;
|
||||
|
||||
/// <summary>
|
||||
/// 查询数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="options"></param>
|
||||
/// <returns></returns>
|
||||
Task<BusPagedResult<T>> QueryAsync<T>(QueryOptions options) where T : IoTEntity, new();
|
||||
}
|
||||
}
|
||||
@ -1,10 +0,0 @@
|
||||
namespace JiShe.CollectBus.IoTDB.Interface
|
||||
{
|
||||
/// <summary>
|
||||
/// Session 工厂接口
|
||||
/// </summary>
|
||||
public interface IIoTDBSessionFactory:IDisposable
|
||||
{
|
||||
IIoTDBSessionPool GetSessionPool(bool useTableSession);
|
||||
}
|
||||
}
|
||||
@ -1,30 +0,0 @@
|
||||
using Apache.IoTDB.DataStructure;
|
||||
|
||||
namespace JiShe.CollectBus.IoTDB.Interface
|
||||
{
|
||||
/// <summary>
|
||||
/// Session 连接池
|
||||
/// </summary>
|
||||
public interface IIoTDBSessionPool : IDisposable
|
||||
{
|
||||
/// <summary>
|
||||
/// 打开连接池
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task OpenAsync();
|
||||
|
||||
/// <summary>
|
||||
/// 插入数据
|
||||
/// </summary>
|
||||
/// <param name="tablet"></param>
|
||||
/// <returns></returns>
|
||||
Task<int> InsertAsync(Tablet tablet);
|
||||
|
||||
/// <summary>
|
||||
/// 查询数据
|
||||
/// </summary>
|
||||
/// <param name="sql"></param>
|
||||
/// <returns></returns>
|
||||
Task<SessionDataSet> ExecuteQueryStatementAsync(string sql);
|
||||
}
|
||||
}
|
||||
@ -1,16 +0,0 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<!--<PackageReference Include="Apache.IoTDB" Version="1.3.3.1" />-->
|
||||
<PackageReference Include="Apache.IoTDB" Version="2.0.2" />
|
||||
<PackageReference Include="Volo.Abp" Version="8.3.3" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\shared\JiShe.CollectBus.Common\JiShe.CollectBus.Common.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@ -1,46 +0,0 @@
|
||||
namespace JiShe.CollectBus.IoTDB.Options
|
||||
{
|
||||
/// <summary>
|
||||
/// IOTDB配置
|
||||
/// </summary>
|
||||
public class IoTDBOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// 数据库名称,表模型才有,树模型为空
|
||||
/// </summary>
|
||||
public string DataBaseName { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 集群列表
|
||||
/// </summary>
|
||||
public List<string> ClusterList { get; set; }
|
||||
/// <summary>
|
||||
/// 用户名
|
||||
/// </summary>
|
||||
public string UserName { get; set; }
|
||||
/// <summary>
|
||||
/// 密码
|
||||
/// </summary>
|
||||
public string Password { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 连接池大小
|
||||
/// </summary>
|
||||
public int PoolSize { get; set; } = 2;
|
||||
|
||||
/// <summary>
|
||||
/// 查询时,每次查询的数据量,默认1024
|
||||
/// </summary>
|
||||
public int FetchSize { get; set; } = 1024;
|
||||
|
||||
/// <summary>
|
||||
/// 是否开启调试模式,生产环境请关闭,因为底层的实现方式,可能会导致内存持续增长。
|
||||
/// </summary>
|
||||
public bool OpenDebugMode { get; set;}
|
||||
|
||||
/// <summary>
|
||||
/// 是否使用表模型存储, 默认false,使用tree模型存储
|
||||
/// </summary>
|
||||
public bool UseTableSessionPoolByDefault { get; set; } = false;
|
||||
}
|
||||
}
|
||||
@ -1,21 +0,0 @@
|
||||
namespace JiShe.CollectBus.IoTDB.Options
|
||||
{
|
||||
/// <summary>
|
||||
/// 查询条件
|
||||
/// </summary>
|
||||
public class QueryCondition
|
||||
{
|
||||
/// <summary>
|
||||
/// 字段
|
||||
/// </summary>
|
||||
public string Field { get; set; }
|
||||
/// <summary>
|
||||
/// 操作符
|
||||
/// </summary>
|
||||
public string Operator { get; set; }
|
||||
/// <summary>
|
||||
/// 值
|
||||
/// </summary>
|
||||
public object Value { get; set; }
|
||||
}
|
||||
}
|
||||
@ -1,28 +0,0 @@
|
||||
namespace JiShe.CollectBus.IoTDB.Options
|
||||
{
|
||||
/// <summary>
|
||||
/// 查询条件
|
||||
/// </summary>
|
||||
public class QueryOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// 表模型的表名称或者树模型的设备路径
|
||||
/// </summary>
|
||||
public required string TableNameOrTreePath { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 分页
|
||||
/// </summary>
|
||||
public int Page { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 分页大小
|
||||
/// </summary>
|
||||
public int PageSize { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 查询条件
|
||||
/// </summary>
|
||||
public List<QueryCondition> Conditions { get; } = new();
|
||||
}
|
||||
}
|
||||
@ -1,30 +0,0 @@
|
||||
using Apache.IoTDB;
|
||||
|
||||
namespace JiShe.CollectBus.IoTDB.Provider
|
||||
{
|
||||
/// <summary>
|
||||
/// 设备元数据
|
||||
/// </summary>
|
||||
public class DeviceMetadata
|
||||
{
|
||||
/// <summary>
|
||||
/// 是否有单测量值
|
||||
/// </summary>
|
||||
public bool IsSingleMeasuring { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 测量值集合,用于构建Table的测量值,也就是columnNames参数
|
||||
/// </summary>
|
||||
public List<string> ColumnNames { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// 列类型集合,用于构建Table的列类型,也就是columnCategories参数
|
||||
/// </summary>
|
||||
public List<ColumnCategory> ColumnCategories { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// 值类型集合,用于构建Table的值类型,也就是dataTypes参数
|
||||
/// </summary>
|
||||
public List<TSDataType> DataTypes { get; } = new();
|
||||
}
|
||||
}
|
||||
@ -1,33 +0,0 @@
|
||||
namespace JiShe.CollectBus.IoTDB.Provider
|
||||
{
|
||||
/// <summary>
|
||||
/// 设备路径构建器
|
||||
/// </summary>
|
||||
public static class DevicePathBuilder
|
||||
{
|
||||
/// <summary>
|
||||
/// 构建设备路径,由于路径的层级约束规范不能是纯数字字符,所以需要做特殊处理。
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="entity"></param>
|
||||
/// <returns></returns>
|
||||
public static string GetDevicePath<T>(T entity) where T : IoTEntity
|
||||
{
|
||||
return $"root.{entity.SystemName.ToLower()}.`{entity.ProjectCode}`.`{entity.DeviceId}`";
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 获取表名称
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="entity"></param>
|
||||
/// <returns></returns>
|
||||
public static string GetTableName<T>() where T : IoTEntity
|
||||
{
|
||||
var type = typeof(T);
|
||||
return $"{type.Name.ToLower()}";
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,615 +0,0 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
using Apache.IoTDB;
|
||||
using Apache.IoTDB.DataStructure;
|
||||
using JiShe.CollectBus.Common.Models;
|
||||
using JiShe.CollectBus.IoTDB.Attribute;
|
||||
using JiShe.CollectBus.IoTDB.Context;
|
||||
using JiShe.CollectBus.IoTDB.Interface;
|
||||
using JiShe.CollectBus.IoTDB.Options;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace JiShe.CollectBus.IoTDB.Provider
|
||||
{
|
||||
/// <summary>
|
||||
/// IoTDB数据源
|
||||
/// </summary>
|
||||
public class IoTDBProvider : IIoTDBProvider
|
||||
{
|
||||
private static readonly ConcurrentDictionary<Type, DeviceMetadata> _metadataCache = new();
|
||||
private readonly ILogger<IoTDBProvider> _logger;
|
||||
private readonly IIoTDBSessionFactory _sessionFactory;
|
||||
private readonly IoTDBRuntimeContext _runtimeContext;
|
||||
|
||||
private IIoTDBSessionPool CurrentSession =>
|
||||
_sessionFactory.GetSessionPool(_runtimeContext.UseTableSessionPool);
|
||||
|
||||
public IoTDBProvider(
|
||||
ILogger<IoTDBProvider> logger,
|
||||
IIoTDBSessionFactory sessionFactory,
|
||||
IoTDBRuntimeContext runtimeContext)
|
||||
{
|
||||
_logger = logger;
|
||||
_sessionFactory = sessionFactory;
|
||||
_runtimeContext = runtimeContext;
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 插入数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="entity"></param>
|
||||
/// <returns></returns>
|
||||
public async Task InsertAsync<T>(T entity) where T : IoTEntity
|
||||
{
|
||||
var metadata = GetMetadata<T>();
|
||||
|
||||
var tablet = BuildTablet(new[] { entity }, metadata);
|
||||
|
||||
await CurrentSession.InsertAsync(tablet);
|
||||
|
||||
//int result = await _currentSession.InsertAsync(tablet);
|
||||
//if (result <= 0)
|
||||
//{
|
||||
// _logger.LogError($"{typeof(T).Name}插入数据没有成功");
|
||||
//}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 批量插入数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <returns></returns>
|
||||
public async Task BatchInsertAsync<T>(IEnumerable<T> entities) where T : IoTEntity
|
||||
{
|
||||
var metadata = GetMetadata<T>();
|
||||
|
||||
var batchSize = 1000;
|
||||
var batches = entities.Chunk(batchSize);
|
||||
|
||||
foreach (var batch in batches)
|
||||
{
|
||||
var tablet = BuildTablet(batch, metadata);
|
||||
await CurrentSession.InsertAsync(tablet);
|
||||
//var result = await _currentSession.InsertAsync(tablet);
|
||||
//if (result <= 0)
|
||||
//{
|
||||
// _logger.LogWarning($"{typeof(T).Name} 批量插入数据第{batch}批次没有成功,共{batches}批次。");
|
||||
//}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 删除数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="options"></param>
|
||||
/// <returns></returns>
|
||||
public async Task<object> DeleteAsync<T>(QueryOptions options) where T : IoTEntity
|
||||
{
|
||||
var query = BuildDeleteSQL<T>(options);
|
||||
var sessionDataSet = await CurrentSession.ExecuteQueryStatementAsync(query);
|
||||
|
||||
if (!sessionDataSet.HasNext())
|
||||
{
|
||||
_logger.LogWarning($"{typeof(T).Name} 删除数据时,没有返回受影响记录数量。");
|
||||
return 0;
|
||||
}
|
||||
|
||||
//获取唯一结果行
|
||||
var row = sessionDataSet.Next();
|
||||
return row.Values[0];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 查询数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="options"></param>
|
||||
/// <returns></returns>
|
||||
public async Task<BusPagedResult<T>> QueryAsync<T>(QueryOptions options) where T : IoTEntity, new()
|
||||
{
|
||||
var query = BuildQuerySQL<T>(options);
|
||||
var sessionDataSet = await CurrentSession.ExecuteQueryStatementAsync(query);
|
||||
|
||||
var result = new BusPagedResult<T>
|
||||
{
|
||||
TotalCount = await GetTotalCount<T>(options),
|
||||
Items = ParseResults<T>(sessionDataSet, options.PageSize)
|
||||
};
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 构建Tablet
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="entities">表实体</param>
|
||||
/// <param name="metadata">设备元数据</param></param>
|
||||
/// <returns></returns>
|
||||
private Tablet BuildTablet<T>(IEnumerable<T> entities, DeviceMetadata metadata) where T : IoTEntity
|
||||
{
|
||||
var timestamps = new List<long>();
|
||||
var values = new List<List<object>>();
|
||||
var devicePaths = new HashSet<string>();
|
||||
List<string> tempColumnNames = new List<string>();
|
||||
tempColumnNames.AddRange(metadata.ColumnNames);
|
||||
|
||||
foreach (var entity in entities)
|
||||
{
|
||||
timestamps.Add(entity.Timestamps);
|
||||
var rowValues = new List<object>();
|
||||
foreach (var measurement in tempColumnNames)
|
||||
{
|
||||
|
||||
PropertyInfo propertyInfo = typeof(T).GetProperty(measurement);
|
||||
if (propertyInfo == null)
|
||||
{
|
||||
throw new Exception($"{nameof(BuildTablet)} 构建表模型{typeof(T).Name}时,没有找到{measurement}属性,属于异常情况,-101。");
|
||||
}
|
||||
|
||||
var value = propertyInfo.GetValue(entity);
|
||||
if (propertyInfo.IsDefined(typeof(SingleMeasuringAttribute), false) && value != null)//表示当前对象是单测点模式
|
||||
{
|
||||
Type tupleType = value.GetType();
|
||||
Type[] tupleArgs = tupleType.GetGenericArguments();
|
||||
Type item2Type = tupleArgs[1]; // T 的实际类型
|
||||
var item1 = tupleType.GetProperty("Item1")!.GetValue(value);
|
||||
var item2 = tupleType.GetProperty("Item2")!.GetValue(value);
|
||||
if (item1 == null || item2 == null)
|
||||
{
|
||||
throw new Exception($"{nameof(BuildTablet)} 构建表模型{typeof(T).Name}时,单测点模式构建失败,没有获取测点名称或者测点值,-102。");
|
||||
}
|
||||
|
||||
var indexOf = metadata.ColumnNames.IndexOf(measurement);
|
||||
metadata.ColumnNames[indexOf] = (string)item1!;
|
||||
|
||||
rowValues.Add(item2);
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
if (value != null)
|
||||
{
|
||||
rowValues.Add(value);
|
||||
}
|
||||
else
|
||||
{
|
||||
//填充默认数据值
|
||||
DataTypeDefaultValueMap.TryGetValue(propertyInfo.PropertyType.Name, out object defaultValue);
|
||||
|
||||
rowValues.Add(defaultValue);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
values.Add(rowValues);
|
||||
|
||||
if (!_runtimeContext.UseTableSessionPool)//树模型
|
||||
{
|
||||
devicePaths.Add(DevicePathBuilder.GetDevicePath(entity));
|
||||
}
|
||||
else
|
||||
{
|
||||
devicePaths.Add(DevicePathBuilder.GetTableName<T>());
|
||||
}
|
||||
}
|
||||
|
||||
if (devicePaths.Count > 1)
|
||||
{
|
||||
throw new Exception($"{nameof(BuildTablet)} 构建Tablet《{typeof(T).Name}》时,批量插入的设备路径不一致。");
|
||||
}
|
||||
|
||||
return _runtimeContext.UseTableSessionPool
|
||||
? BuildTableSessionTablet(metadata, devicePaths.First(), values, timestamps)
|
||||
: BuildSessionTablet(metadata, devicePaths.First(), values, timestamps);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 构建tree模型的Tablet
|
||||
/// </summary>
|
||||
/// <param name="metadata"></param>
|
||||
/// <param name="devicePath"></param>
|
||||
/// <param name="values"></param>
|
||||
/// <param name="timestamps"></param>
|
||||
/// <returns></returns>
|
||||
private Tablet BuildSessionTablet(DeviceMetadata metadata, string devicePath,
|
||||
List<List<object>> values, List<long> timestamps)
|
||||
{
|
||||
return new Tablet(
|
||||
devicePath,
|
||||
metadata.ColumnNames,
|
||||
metadata.DataTypes,
|
||||
values,
|
||||
timestamps
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 构建表模型的Tablet
|
||||
/// </summary>
|
||||
/// <param name="metadata"></param>
|
||||
/// <param name="devicePath"></param>
|
||||
/// <param name="values"></param>
|
||||
/// <param name="timestamps"></param>
|
||||
/// <returns></returns>
|
||||
private Tablet BuildTableSessionTablet(DeviceMetadata metadata, string devicePath,
|
||||
List<List<object>> values, List<long> timestamps)
|
||||
{
|
||||
var tablet = new Tablet(
|
||||
devicePath,
|
||||
metadata.ColumnNames,
|
||||
metadata.ColumnCategories,
|
||||
metadata.DataTypes,
|
||||
values,
|
||||
timestamps
|
||||
);
|
||||
|
||||
return tablet;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 构建查询语句
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="options"></param>
|
||||
/// <returns></returns>
|
||||
private string BuildQuerySQL<T>(QueryOptions options) where T : IoTEntity
|
||||
{
|
||||
var metadata = GetMetadata<T>();
|
||||
var sb = new StringBuilder("SELECT ");
|
||||
sb.AppendJoin(", ", metadata.ColumnNames);
|
||||
sb.Append($" FROM {options.TableNameOrTreePath}");
|
||||
|
||||
if (options.Conditions.Any())
|
||||
{
|
||||
sb.Append(" WHERE ");
|
||||
sb.AppendJoin(" AND ", options.Conditions.Select(TranslateCondition));
|
||||
}
|
||||
|
||||
sb.Append($" LIMIT {options.PageSize} OFFSET {options.Page * options.PageSize}");
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 构建删除语句
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="options"></param>
|
||||
/// <returns></returns>
|
||||
private string BuildDeleteSQL<T>(QueryOptions options) where T : IoTEntity
|
||||
{
|
||||
var metadata = GetMetadata<T>();
|
||||
var sb = new StringBuilder();
|
||||
|
||||
if (!_runtimeContext.UseTableSessionPool)
|
||||
{
|
||||
sb.Append("DELETE ");
|
||||
}
|
||||
else
|
||||
{
|
||||
sb.Append("DROP ");
|
||||
}
|
||||
|
||||
sb.Append($" FROM {options.TableNameOrTreePath}");
|
||||
|
||||
sb.AppendJoin(", ", metadata.ColumnNames);
|
||||
|
||||
if (options.Conditions.Any())
|
||||
{
|
||||
sb.Append(" WHERE ");
|
||||
sb.AppendJoin(" AND ", options.Conditions.Select(TranslateCondition));
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 将查询条件转换为SQL语句
|
||||
/// </summary>
|
||||
/// <param name="condition"></param>
|
||||
/// <returns></returns>
|
||||
/// <exception cref="NotSupportedException"></exception>
|
||||
private string TranslateCondition(QueryCondition condition)
|
||||
{
|
||||
return condition.Operator switch
|
||||
{
|
||||
">" => $"{condition.Field} > {condition.Value}",
|
||||
"<" => $"{condition.Field} < {condition.Value}",
|
||||
"=" => $"{condition.Field} = '{condition.Value}'",
|
||||
_ => throw new NotSupportedException($"Operator {condition.Operator} not supported")
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获取查询条件的总数量
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="options"></param>
|
||||
/// <returns></returns>
|
||||
private async Task<int> GetTotalCount<T>(QueryOptions options) where T : IoTEntity
|
||||
{
|
||||
var countQuery = $"SELECT COUNT(*) FROM {options.TableNameOrTreePath}";
|
||||
if (options.Conditions.Any())
|
||||
{
|
||||
countQuery += " WHERE " + string.Join(" AND ", options.Conditions.Select(TranslateCondition));
|
||||
}
|
||||
|
||||
var result = await CurrentSession.ExecuteQueryStatementAsync(countQuery);
|
||||
return result.HasNext() ? Convert.ToInt32(result.Next().Values[0]) : 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 解析查询结果
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="dataSet"></param>
|
||||
/// <param name="pageSize"></param>
|
||||
/// <returns></returns>
|
||||
private IEnumerable<T> ParseResults<T>(SessionDataSet dataSet, int pageSize) where T : IoTEntity, new()
|
||||
{
|
||||
var results = new List<T>();
|
||||
var metadata = GetMetadata<T>();
|
||||
|
||||
var properties = typeof(T).GetProperties();
|
||||
|
||||
while (dataSet.HasNext() && results.Count < pageSize)
|
||||
{
|
||||
var record = dataSet.Next();
|
||||
var entity = new T
|
||||
{
|
||||
Timestamps = record.Timestamps
|
||||
};
|
||||
|
||||
|
||||
foreach (var measurement in metadata.ColumnNames)
|
||||
{
|
||||
var value = record.Values;
|
||||
|
||||
var prop = properties.FirstOrDefault(p =>
|
||||
p.Name.Equals(measurement, StringComparison.OrdinalIgnoreCase));
|
||||
if (prop != null)
|
||||
{
|
||||
typeof(T).GetProperty(measurement)?.SetValue(entity, value);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
results.Add(entity);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获取设备元数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <returns></returns>
|
||||
private DeviceMetadata GetMetadata<T>() where T : IoTEntity
|
||||
{
|
||||
|
||||
var columns = CollectColumnMetadata(typeof(T));
|
||||
var metadata = BuildDeviceMetadata(columns);
|
||||
|
||||
return _metadataCache.AddOrUpdate(
|
||||
typeof(T),
|
||||
addValueFactory: t => metadata, // 如果键不存在,用此值添加
|
||||
updateValueFactory: (t, existingValue) =>
|
||||
{
|
||||
var columns = CollectColumnMetadata(t);
|
||||
var metadata = BuildDeviceMetadata(columns);
|
||||
|
||||
//对现有值 existingValue 进行修改,返回新值
|
||||
existingValue.ColumnNames = metadata.ColumnNames;
|
||||
return existingValue;
|
||||
}
|
||||
);
|
||||
|
||||
//return _metadataCache.GetOrAdd(typeof(T), type =>
|
||||
//{
|
||||
// var columns = CollectColumnMetadata(type);
|
||||
// var metadata = BuildDeviceMetadata(columns);
|
||||
// //if (metadata.IsSingleMeasuring)
|
||||
// //{
|
||||
// // _metadataCache.Remove(typeof(T));
|
||||
// //}
|
||||
// return metadata;
|
||||
//});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获取设备元数据的列
|
||||
/// </summary>
|
||||
/// <param name="type"></param>
|
||||
/// <returns></returns>
|
||||
private List<ColumnInfo> CollectColumnMetadata(Type type)
|
||||
{
|
||||
var columns = new List<ColumnInfo>();
|
||||
|
||||
foreach (var prop in type.GetProperties())
|
||||
{
|
||||
//先获取Tag标签和属性标签
|
||||
ColumnInfo? column = prop.GetCustomAttribute<TAGColumnAttribute>() is not null ? new ColumnInfo(
|
||||
name: prop.Name,
|
||||
category: ColumnCategory.TAG,
|
||||
dataType: GetDataTypeFromTypeName(prop.PropertyType.Name),
|
||||
false
|
||||
) : prop.GetCustomAttribute<ATTRIBUTEColumnAttribute>() is not null ? new ColumnInfo(
|
||||
prop.Name,
|
||||
ColumnCategory.ATTRIBUTE,
|
||||
GetDataTypeFromTypeName(prop.PropertyType.Name),
|
||||
false
|
||||
) : prop.GetCustomAttribute<FIELDColumnAttribute>() is not null ? new ColumnInfo(
|
||||
prop.Name,
|
||||
ColumnCategory.FIELD,
|
||||
GetDataTypeFromTypeName(prop.PropertyType.Name),
|
||||
false)
|
||||
: null;
|
||||
|
||||
//最先检查是不是单侧点模式
|
||||
SingleMeasuringAttribute singleMeasuringAttribute = prop.GetCustomAttribute<SingleMeasuringAttribute>();
|
||||
|
||||
if (singleMeasuringAttribute != null && column == null)
|
||||
{
|
||||
//warning: 单侧点模式注意事项
|
||||
//Entity实体 字段类型是 Tuple<string,T>,Item1=>测点名称,Item2=>测点值,泛型
|
||||
//只有一个Filed字段。
|
||||
//MeasuringName 默认为 SingleMeasuringAttribute.FieldName,以便于在获取对应的Value的时候重置为 Item1 的值。
|
||||
|
||||
Type tupleType = prop.PropertyType;
|
||||
Type[] tupleArgs = tupleType.GetGenericArguments();
|
||||
|
||||
column = new ColumnInfo(
|
||||
singleMeasuringAttribute.FieldName,
|
||||
ColumnCategory.FIELD,
|
||||
GetDataTypeFromTypeName(tupleArgs[1].Name),
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
if (column.HasValue)
|
||||
{
|
||||
columns.Add(column.Value);
|
||||
}
|
||||
}
|
||||
return columns;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 构建设备元数据
|
||||
/// </summary>
|
||||
/// <param name="columns"></param>
|
||||
/// <returns></returns>
|
||||
private DeviceMetadata BuildDeviceMetadata(List<ColumnInfo> columns)
|
||||
{
|
||||
var metadata = new DeviceMetadata();
|
||||
|
||||
//先检查是不是单侧点模型
|
||||
if (columns.Any(c => c.IsSingleMeasuring))
|
||||
{
|
||||
metadata.IsSingleMeasuring = true;
|
||||
}
|
||||
|
||||
//按业务逻辑顺序处理(TAG -> ATTRIBUTE -> FIELD)
|
||||
var groupedColumns = columns
|
||||
.GroupBy(c => c.Category)
|
||||
.ToDictionary(g => g.Key, g => g.ToList());
|
||||
|
||||
ProcessCategory(groupedColumns, ColumnCategory.TAG, metadata);
|
||||
ProcessCategory(groupedColumns, ColumnCategory.ATTRIBUTE, metadata);
|
||||
ProcessCategory(groupedColumns, ColumnCategory.FIELD, metadata);
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 处理不同列类型的逻辑
|
||||
/// </summary>
|
||||
/// <param name="groupedColumns"></param>
|
||||
/// <param name="category"></param>
|
||||
/// <param name="metadata"></param>
|
||||
private void ProcessCategory(IReadOnlyDictionary<ColumnCategory, List<ColumnInfo>> groupedColumns, ColumnCategory category, DeviceMetadata metadata)
|
||||
{
|
||||
if (groupedColumns.TryGetValue(category, out var cols))
|
||||
{
|
||||
metadata.ColumnNames.AddRange(cols.Select(c => c.Name));
|
||||
metadata.ColumnCategories.AddRange(cols.Select(c => c.Category));
|
||||
metadata.DataTypes.AddRange(cols.Select(c => c.DataType));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 数据列结构
|
||||
/// </summary>
|
||||
private readonly struct ColumnInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// 列名
|
||||
/// </summary>
|
||||
public string Name { get; }
|
||||
|
||||
/// <summary>
|
||||
/// 是否是单测点
|
||||
/// </summary>
|
||||
public bool IsSingleMeasuring { get; }
|
||||
|
||||
/// <summary>
|
||||
/// 列类型
|
||||
/// </summary>
|
||||
public ColumnCategory Category { get; }
|
||||
|
||||
/// <summary>
|
||||
/// 数据类型
|
||||
/// </summary>
|
||||
public TSDataType DataType { get; }
|
||||
|
||||
public ColumnInfo(string name, ColumnCategory category, TSDataType dataType, bool isSingleMeasuring)
|
||||
{
|
||||
Name = name;
|
||||
Category = category;
|
||||
DataType = dataType;
|
||||
IsSingleMeasuring = isSingleMeasuring;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 根据类型名称获取对应的 IoTDB 数据类型
|
||||
/// </summary>
|
||||
/// <param name="typeName">类型名称(不区分大小写)</param>
|
||||
/// <returns>对应的 TSDataType,默认返回 TSDataType.STRING</returns>
|
||||
private TSDataType GetDataTypeFromTypeName(string typeName)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(typeName))
|
||||
return TSDataType.STRING;
|
||||
|
||||
return DataTypeMap.TryGetValue(typeName.Trim(), out var dataType)
|
||||
? dataType
|
||||
: TSDataType.STRING;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 根据类型名称获取 IoTDB 数据类型
|
||||
/// </summary>
|
||||
private readonly IReadOnlyDictionary<string, TSDataType> DataTypeMap =
|
||||
new Dictionary<string, TSDataType>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["BOOLEAN"] = TSDataType.BOOLEAN,
|
||||
["INT32"] = TSDataType.INT32,
|
||||
["INT64"] = TSDataType.INT64,
|
||||
["FLOAT"] = TSDataType.FLOAT,
|
||||
["DOUBLE"] = TSDataType.DOUBLE,
|
||||
["TEXT"] = TSDataType.TEXT,
|
||||
["NULLTYPE"] = TSDataType.NONE,
|
||||
["TIMESTAMP"] = TSDataType.TIMESTAMP,
|
||||
["DATE"] = TSDataType.DATE,
|
||||
["BLOB"] = TSDataType.BLOB,
|
||||
["DECIMAL"] = TSDataType.STRING,
|
||||
["STRING"] = TSDataType.STRING
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// 根据类型名称获取 IoTDB 数据默认值
|
||||
/// </summary>
|
||||
private readonly IReadOnlyDictionary<string, object> DataTypeDefaultValueMap =
|
||||
new Dictionary<string, object>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["BOOLEAN"] = false,
|
||||
["INT32"] = 0,
|
||||
["INT64"] = 0,
|
||||
["FLOAT"] = 0.0f,
|
||||
["DOUBLE"] = 0.0d,
|
||||
["TEXT"] = string.Empty,
|
||||
["NULLTYPE"] = null,
|
||||
["TIMESTAMP"] = null,
|
||||
["DATE"] = null,
|
||||
["BLOB"] = null,
|
||||
["DECIMAL"] = "0.0",
|
||||
["STRING"] = string.Empty
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -1,48 +0,0 @@
|
||||
using System.Collections.Concurrent;
|
||||
using JiShe.CollectBus.IoTDB.Interface;
|
||||
using JiShe.CollectBus.IoTDB.Options;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace JiShe.CollectBus.IoTDB.Provider
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// 实现带缓存的Session工厂
|
||||
/// </summary>
|
||||
public class IoTDBSessionFactory : IIoTDBSessionFactory
|
||||
{
|
||||
private readonly IoTDBOptions _options;
|
||||
private readonly ConcurrentDictionary<bool, IIoTDBSessionPool> _pools = new();
|
||||
private bool _disposed;
|
||||
|
||||
public IoTDBSessionFactory(IOptions<IoTDBOptions> options)
|
||||
{
|
||||
_options = options.Value;
|
||||
}
|
||||
|
||||
public IIoTDBSessionPool GetSessionPool(bool useTableSession)
|
||||
{
|
||||
if (_disposed) throw new ObjectDisposedException(nameof(IoTDBSessionFactory));
|
||||
|
||||
return _pools.GetOrAdd(useTableSession, key =>
|
||||
{
|
||||
var pool = key
|
||||
? (IIoTDBSessionPool)new TableSessionPoolAdapter(_options)
|
||||
: new SessionPoolAdapter(_options);
|
||||
|
||||
pool.OpenAsync().ConfigureAwait(false).GetAwaiter().GetResult(); ;
|
||||
return pool;
|
||||
});
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
foreach (var pool in _pools.Values)
|
||||
{
|
||||
pool.Dispose();
|
||||
}
|
||||
_pools.Clear();
|
||||
_disposed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,39 +0,0 @@
|
||||
using JiShe.CollectBus.IoTDB.Attribute;
|
||||
|
||||
namespace JiShe.CollectBus.IoTDB.Provider
|
||||
{
|
||||
/// <summary>
|
||||
/// IoT实体基类
|
||||
/// </summary>
|
||||
public abstract class IoTEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// 系统名称
|
||||
/// </summary>
|
||||
[TAGColumn]
|
||||
public string SystemName { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 项目编码
|
||||
/// </summary>
|
||||
[TAGColumn]
|
||||
public string ProjectCode { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 设备类型集中器、电表、水表、流量计、传感器等
|
||||
/// </summary>
|
||||
[TAGColumn]
|
||||
public string DeviceType { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 设备ID
|
||||
/// </summary>
|
||||
[TAGColumn]
|
||||
public string DeviceId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 当前时间戳,单位毫秒
|
||||
/// </summary>
|
||||
public long Timestamps { get; set; } = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds();
|
||||
}
|
||||
}
|
||||
@ -1,76 +0,0 @@
|
||||
using Apache.IoTDB;
|
||||
using Apache.IoTDB.DataStructure;
|
||||
using JiShe.CollectBus.IoTDB.Interface;
|
||||
using JiShe.CollectBus.IoTDB.Options;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace JiShe.CollectBus.IoTDB.Provider
|
||||
{
|
||||
/// <summary>
|
||||
/// 树模型连接池
|
||||
/// </summary>
|
||||
public class SessionPoolAdapter : IIoTDBSessionPool
|
||||
{
|
||||
private readonly SessionPool _sessionPool;
|
||||
private readonly IoTDBOptions _options;
|
||||
|
||||
public SessionPoolAdapter(IoTDBOptions options)
|
||||
{
|
||||
_options = options;
|
||||
_sessionPool = new SessionPool.Builder()
|
||||
.SetNodeUrl(options.ClusterList)
|
||||
.SetUsername(options.UserName)
|
||||
.SetPassword(options.Password)
|
||||
.SetFetchSize(options.FetchSize)
|
||||
.SetPoolSize(options.PoolSize)
|
||||
.Build();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 打开连接池
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public async Task OpenAsync()
|
||||
{
|
||||
await _sessionPool.Open(false);
|
||||
if (_options.OpenDebugMode)
|
||||
{
|
||||
_sessionPool.OpenDebugMode(builder =>
|
||||
{
|
||||
builder.AddConsole();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 批量插入对齐时间序列数据
|
||||
/// </summary>
|
||||
/// <param name="tablet"></param>
|
||||
/// <returns></returns>
|
||||
public async Task<int> InsertAsync(Tablet tablet)
|
||||
{
|
||||
var result = await _sessionPool.InsertAlignedTabletAsync(tablet);
|
||||
if (result != 0)
|
||||
{
|
||||
throw new Exception($"{nameof(TableSessionPoolAdapter)} ");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 查询数据
|
||||
/// </summary>
|
||||
/// <param name="sql"></param>
|
||||
/// <returns></returns>
|
||||
public async Task<SessionDataSet> ExecuteQueryStatementAsync(string sql)
|
||||
{
|
||||
return await _sessionPool.ExecuteQueryStatementAsync(sql);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
_sessionPool?.Close().ConfigureAwait(false).GetAwaiter().GetResult();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,74 +0,0 @@
|
||||
using Apache.IoTDB;
|
||||
using Apache.IoTDB.DataStructure;
|
||||
using JiShe.CollectBus.IoTDB.Interface;
|
||||
using JiShe.CollectBus.IoTDB.Options;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace JiShe.CollectBus.IoTDB.Provider
|
||||
{
|
||||
/// <summary>
|
||||
/// 表模型Session连接池
|
||||
/// </summary>
|
||||
public class TableSessionPoolAdapter : IIoTDBSessionPool
|
||||
{
|
||||
private readonly TableSessionPool _sessionPool;
|
||||
private readonly IoTDBOptions _options;
|
||||
|
||||
public TableSessionPoolAdapter(IoTDBOptions options)
|
||||
{
|
||||
_options = options;
|
||||
_sessionPool = new TableSessionPool.Builder()
|
||||
.SetNodeUrls(options.ClusterList)
|
||||
.SetUsername(options.UserName)
|
||||
.SetPassword(options.Password)
|
||||
.SetFetchSize(options.FetchSize)
|
||||
.SetPoolSize(options.PoolSize)
|
||||
.SetDatabase(options.DataBaseName)
|
||||
.Build();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 打开连接池
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public async Task OpenAsync()
|
||||
{
|
||||
await _sessionPool.Open(false);
|
||||
if (_options.OpenDebugMode)
|
||||
{
|
||||
_sessionPool.OpenDebugMode(builder => builder.AddConsole());
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 批量插入
|
||||
/// </summary>
|
||||
/// <param name="tablet"></param>
|
||||
/// <returns></returns>
|
||||
public async Task<int> InsertAsync(Tablet tablet)
|
||||
{
|
||||
var result = await _sessionPool.InsertAsync(tablet);
|
||||
if (result != 0)
|
||||
{
|
||||
throw new Exception($"{nameof(TableSessionPoolAdapter)} ");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 查询数据
|
||||
/// </summary>
|
||||
/// <param name="sql"></param>
|
||||
/// <returns></returns>
|
||||
public async Task<SessionDataSet> ExecuteQueryStatementAsync(string sql)
|
||||
{
|
||||
return await _sessionPool.ExecuteQueryStatementAsync(sql);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
_sessionPool?.Close().ConfigureAwait(false).GetAwaiter().GetResult();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,72 +0,0 @@
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Http.Features;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka.Test
|
||||
{
|
||||
public class ConsoleApplicationBuilder: IApplicationBuilder
|
||||
{
|
||||
public IServiceProvider ApplicationServices { get; set; }
|
||||
public IDictionary<string, object> Properties { get; set; } = new Dictionary<string, object>();
|
||||
|
||||
public IFeatureCollection ServerFeatures => throw new NotImplementedException();
|
||||
|
||||
private readonly List<Func<RequestDelegate, RequestDelegate>> _middlewares = new();
|
||||
|
||||
public IApplicationBuilder Use(Func<RequestDelegate, RequestDelegate> middleware)
|
||||
{
|
||||
_middlewares.Add(middleware);
|
||||
return this;
|
||||
}
|
||||
|
||||
public RequestDelegate Build()
|
||||
{
|
||||
RequestDelegate app = context => Task.CompletedTask;
|
||||
foreach (var middleware in _middlewares)
|
||||
{
|
||||
app = middleware(app);
|
||||
}
|
||||
return app;
|
||||
}
|
||||
|
||||
public IApplicationBuilder New()
|
||||
{
|
||||
return new ConsoleApplicationBuilder
|
||||
{
|
||||
ApplicationServices = this.ApplicationServices,
|
||||
Properties = new Dictionary<string, object>(this.Properties)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class HostBuilderExtensions
|
||||
{
|
||||
public static IHostBuilder ConfigureConsoleAppBuilder(
|
||||
this IHostBuilder hostBuilder,
|
||||
Action<IApplicationBuilder> configure)
|
||||
{
|
||||
hostBuilder.ConfigureServices((context, services) =>
|
||||
{
|
||||
// 注册 ConsoleApplicationBuilder 到 DI 容器
|
||||
services.AddSingleton<IApplicationBuilder>(provider =>
|
||||
{
|
||||
var appBuilder = new ConsoleApplicationBuilder
|
||||
{
|
||||
ApplicationServices = provider // 注入服务提供者
|
||||
};
|
||||
configure(appBuilder); // 执行配置委托
|
||||
return appBuilder;
|
||||
});
|
||||
});
|
||||
return hostBuilder;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,40 +0,0 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="appsettings.json">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
<ExcludeFromSingleFile>true</ExcludeFromSingleFile>
|
||||
<CopyToPublishDirectory>PreserveNewest</CopyToPublishDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BenchmarkDotNet" Version="0.14.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="10.0.0-preview.3.25171.5" />
|
||||
<PackageReference Include="Serilog" Version="4.2.0" />
|
||||
<PackageReference Include="Serilog.Extensions.Logging" Version="9.0.1" />
|
||||
<PackageReference Include="Serilog.Settings.Configuration" Version="9.0.0" />
|
||||
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />
|
||||
<PackageReference Include="Serilog.Sinks.File" Version="6.0.0" />
|
||||
<PackageReference Include="Volo.Abp.Core" Version="8.3.3" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\services\JiShe.CollectBus.Domain\JiShe.CollectBus.Domain.csproj" />
|
||||
<ProjectReference Include="..\JiShe.CollectBus.Kafka\JiShe.CollectBus.Kafka.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<!--<ItemGroup>
|
||||
<Reference Include="JiShe.CollectBus.Kafka">
|
||||
<HintPath>Lib\JiShe.CollectBus.Kafka.dll</HintPath>
|
||||
</Reference>
|
||||
</ItemGroup>-->
|
||||
|
||||
</Project>
|
||||
@ -1,108 +0,0 @@
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using BenchmarkDotNet.Jobs;
|
||||
using Confluent.Kafka;
|
||||
using JiShe.CollectBus.Kafka.AdminClient;
|
||||
using JiShe.CollectBus.Kafka.Consumer;
|
||||
using JiShe.CollectBus.Kafka.Producer;
|
||||
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Serilog;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka.Test
|
||||
{
|
||||
[SimpleJob(RuntimeMoniker.Net80)]
|
||||
//[SimpleJob(RuntimeMoniker.NativeAot80)]
|
||||
[RPlotExporter]
|
||||
public class KafkaProduceBenchmark
|
||||
{
|
||||
|
||||
// 每批消息数量
|
||||
[Params(1000, 10000, 100000)]
|
||||
public int N;
|
||||
public ServiceProvider _serviceProvider;
|
||||
public IConsumerService _consumerService;
|
||||
public IProducerService _producerService;
|
||||
public string topic = "test-topic1";
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
// 构建配置
|
||||
var config = new ConfigurationBuilder()
|
||||
.SetBasePath(Directory.GetCurrentDirectory())
|
||||
.AddJsonFile("appsettings.json")
|
||||
.Build();
|
||||
// 直接读取配置项
|
||||
var greeting = config["ServerTagName"];
|
||||
Console.WriteLine(greeting); // 输出: Hello, World!
|
||||
// 创建服务容器
|
||||
var services = new ServiceCollection();
|
||||
// 注册 IConfiguration 实例
|
||||
services.AddSingleton<IConfiguration>(config);
|
||||
|
||||
// 初始化日志
|
||||
Log.Logger = new LoggerConfiguration()
|
||||
.ReadFrom.Configuration(config) // 从 appsettings.json 读取配置
|
||||
.CreateLogger();
|
||||
|
||||
// 配置日志系统
|
||||
services.AddLogging(logging =>
|
||||
{
|
||||
logging.ClearProviders();
|
||||
logging.AddSerilog();
|
||||
});
|
||||
services.AddSingleton<IAdminClientService, AdminClientService>();
|
||||
services.AddSingleton<IProducerService, ProducerService>();
|
||||
services.AddSingleton<IConsumerService, ConsumerService>();
|
||||
|
||||
// 构建ServiceProvider
|
||||
_serviceProvider = services.BuildServiceProvider();
|
||||
|
||||
// 获取日志记录器工厂
|
||||
var loggerFactory = _serviceProvider.GetRequiredService<ILoggerFactory>();
|
||||
var logger = loggerFactory.CreateLogger<Program>();
|
||||
logger.LogInformation("程序启动");
|
||||
|
||||
var adminClientService = _serviceProvider.GetRequiredService<IAdminClientService>();
|
||||
|
||||
|
||||
//await adminClientService.DeleteTopicAsync(topic);
|
||||
// 创建 topic
|
||||
adminClientService.CreateTopicAsync(topic, 3, 3).ConfigureAwait(false).GetAwaiter();
|
||||
|
||||
_consumerService = _serviceProvider.GetRequiredService<IConsumerService>();
|
||||
|
||||
_producerService = _serviceProvider.GetRequiredService<IProducerService>();
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public async Task UseAsync()
|
||||
{
|
||||
List<Task> tasks = new();
|
||||
for (int i = 0; i < N; ++i)
|
||||
{
|
||||
var task = _producerService.ProduceAsync<string>(topic, i.ToString());
|
||||
tasks.Add(task);
|
||||
}
|
||||
await Task.WhenAll(tasks);
|
||||
}
|
||||
|
||||
[Benchmark]
|
||||
public async Task UseLibrd()
|
||||
{
|
||||
List<Task> tasks = new();
|
||||
for (int i = 0; i < N; ++i)
|
||||
{
|
||||
var task = _producerService.ProduceAsync<string>(topic, i.ToString(),null);
|
||||
}
|
||||
await Task.WhenAll(tasks);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,68 +0,0 @@
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.Common.Enums;
|
||||
using JiShe.CollectBus.Common.Models;
|
||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||
using JiShe.CollectBus.Kafka.Attributes;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading.Tasks;
|
||||
using Volo.Abp.Timing;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka.Test
|
||||
{
|
||||
public class KafkaSubscribeTest: IKafkaSubscribe
|
||||
{
|
||||
[KafkaSubscribe(ProtocolConst.TESTTOPIC, EnableBatch=false,BatchSize=1000)]
|
||||
|
||||
public async Task<ISubscribeAck> KafkaSubscribeAsync(object obj)
|
||||
{
|
||||
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(obj)}");
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
|
||||
|
||||
[KafkaSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
|
||||
public async Task<ISubscribeAck> LoginIssuedEvent(IssuedEventMessage issuedEventMessage)
|
||||
{
|
||||
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(issuedEventMessage)}");
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
|
||||
[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
|
||||
public async Task<ISubscribeAck> HeartbeatIssuedEvent(IssuedEventMessage issuedEventMessage)
|
||||
{
|
||||
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(issuedEventMessage)}");
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
|
||||
[KafkaSubscribe(ProtocolConst.SubscriberReceivedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.SubscriberReceivedEventName)]
|
||||
public async Task<ISubscribeAck> ReceivedEvent(MessageReceived receivedMessage)
|
||||
{
|
||||
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(receivedMessage)}");
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
|
||||
[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
|
||||
public async Task<ISubscribeAck> ReceivedHeartbeatEvent(MessageReceivedHeartbeat receivedHeartbeatMessage)
|
||||
{
|
||||
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(receivedHeartbeatMessage)}");
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
|
||||
[KafkaSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
|
||||
public async Task<ISubscribeAck> ReceivedLoginEvent(MessageReceivedLogin receivedLoginMessage)
|
||||
{
|
||||
Console.WriteLine($"收到订阅消息: {JsonSerializer.Serialize(receivedLoginMessage)}");
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
}
|
||||
}
|
||||
Binary file not shown.
@ -1,172 +0,0 @@
|
||||
// See https://aka.ms/new-console-template for more information
|
||||
using BenchmarkDotNet.Configs;
|
||||
using BenchmarkDotNet.Running;
|
||||
using Confluent.Kafka;
|
||||
using DeviceDetectorNET.Parser.Device;
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.Kafka;
|
||||
using JiShe.CollectBus.Kafka.AdminClient;
|
||||
using JiShe.CollectBus.Kafka.Consumer;
|
||||
using JiShe.CollectBus.Kafka.Producer;
|
||||
using JiShe.CollectBus.Kafka.Test;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Serilog;
|
||||
using System.Diagnostics;
|
||||
using System.Reflection;
|
||||
using System.Reflection.PortableExecutable;
|
||||
using System.Text.Json;
|
||||
|
||||
#region 基准测试
|
||||
//var summary = BenchmarkRunner.Run<KafkaProduceBenchmark>();
|
||||
//Console.WriteLine("压测完成");
|
||||
//return;
|
||||
#endregion 基准测试
|
||||
|
||||
|
||||
var host = Host.CreateDefaultBuilder(args)
|
||||
.ConfigureServices(services =>
|
||||
{
|
||||
// 构建配置
|
||||
var config = new ConfigurationBuilder()
|
||||
.SetBasePath(Directory.GetCurrentDirectory())
|
||||
.AddJsonFile("appsettings.json")
|
||||
.Build();
|
||||
// 直接读取配置项
|
||||
var greeting = config["Kafka:ServerTagName"];
|
||||
Console.WriteLine(greeting); // 输出: Hello, World!
|
||||
|
||||
|
||||
// 创建服务容器
|
||||
//var services = new ServiceCollection();
|
||||
// 注册 IConfiguration 实例
|
||||
services.AddSingleton<IConfiguration>(config);
|
||||
|
||||
// 初始化日志
|
||||
Log.Logger = new LoggerConfiguration()
|
||||
.ReadFrom.Configuration(config) // 从 appsettings.json 读取配置
|
||||
.CreateLogger();
|
||||
|
||||
// 配置日志系统
|
||||
services.AddLogging(logging =>
|
||||
{
|
||||
logging.ClearProviders();
|
||||
logging.AddSerilog();
|
||||
});
|
||||
services.Configure<KafkaOptionConfig>(config.GetSection("Kafka"));
|
||||
|
||||
services.AddSingleton<IAdminClientService, AdminClientService>();
|
||||
services.AddSingleton<IProducerService, ProducerService>();
|
||||
services.AddSingleton<IConsumerService, ConsumerService>();
|
||||
services.AddTransient<KafkaSubscribeTest>();
|
||||
|
||||
})
|
||||
.ConfigureConsoleAppBuilder(appBuilder =>
|
||||
{
|
||||
|
||||
})
|
||||
.Build();
|
||||
|
||||
|
||||
await host.StartAsync();
|
||||
var appBuilder = host.Services.GetRequiredService<IApplicationBuilder>();
|
||||
appBuilder.ApplicationServices.UseKafkaSubscribe();
|
||||
|
||||
|
||||
// 构建ServiceProvider
|
||||
//var serviceProvider = services.BuildServiceProvider();
|
||||
|
||||
// 获取日志记录器工厂
|
||||
var loggerFactory = host.Services.GetRequiredService<ILoggerFactory>();
|
||||
var logger = loggerFactory.CreateLogger<Program>();
|
||||
logger.LogInformation("程序启动");
|
||||
var adminClientService = host.Services.GetRequiredService<IAdminClientService>();
|
||||
var configuration = host.Services.GetRequiredService<IConfiguration>();
|
||||
string topic = "test-topic";
|
||||
//await adminClientService.DeleteTopicAsync(topic);
|
||||
// 创建 topic
|
||||
//await adminClientService.CreateTopicAsync(topic, configuration.GetValue<int>(CommonConst.NumPartitions), 3);
|
||||
|
||||
var consumerService = host.Services.GetRequiredService<IConsumerService>();
|
||||
//var kafkaOptions = host.Services.GetRequiredService<IOptions<KafkaOptionConfig>>();
|
||||
//await consumerService.SubscribeAsync<object>(topic, (message) =>
|
||||
//{
|
||||
// try
|
||||
// {
|
||||
// logger.LogInformation($"消费消息:{message}");
|
||||
// return Task.FromResult(true);
|
||||
|
||||
// }
|
||||
// catch (ConsumeException ex)
|
||||
// {
|
||||
// // 处理消费错误
|
||||
// logger.LogError($"kafka消费异常:{ex.Message}");
|
||||
// }
|
||||
// return Task.FromResult(false);
|
||||
//}, "default");
|
||||
|
||||
//Stopwatch stopwatch = Stopwatch.StartNew();
|
||||
|
||||
//for (int i = 0; i < 3; i++)
|
||||
//{
|
||||
// await consumerService.SubscribeBatchAsync<dynamic>(topic, (message) =>
|
||||
// {
|
||||
// try
|
||||
// {
|
||||
// int index = 0;
|
||||
// logger.LogInformation($"消费消息_{index}消费总数:{message.Count()}:{JsonSerializer.Serialize(message)}");
|
||||
// return Task.FromResult(true);
|
||||
|
||||
// }
|
||||
// catch (ConsumeException ex)
|
||||
// {
|
||||
// // 处理消费错误
|
||||
// logger.LogError($"kafka消费异常:{ex.Message}");
|
||||
// }
|
||||
// return Task.FromResult(false);
|
||||
// });
|
||||
//}
|
||||
//stopwatch.Stop();
|
||||
//Console.WriteLine($"耗时: {stopwatch.ElapsedMilliseconds} 毫秒,{stopwatch.ElapsedMilliseconds/1000} 秒");
|
||||
|
||||
var producerService = host.Services.GetRequiredService<IProducerService>();
|
||||
//int num = 840;
|
||||
//while (num <= 900)
|
||||
//{
|
||||
// //await producerService.ProduceAsync(topic, new TestTopic { Topic = topic, Val = i });
|
||||
// await producerService.ProduceAsync<string>(topic, num.ToString());
|
||||
// num++;
|
||||
//}
|
||||
await Task.Factory.StartNew(async() => {
|
||||
int num = 0;
|
||||
while (true)
|
||||
{
|
||||
//await producerService.ProduceAsync(topic, new TestTopic { Topic = topic, Val = i });
|
||||
await producerService.ProduceAsync<string>(topic, num.ToString());
|
||||
num++;
|
||||
}
|
||||
});
|
||||
Console.WriteLine("\n按Esc键退出");
|
||||
while (true)
|
||||
{
|
||||
var key = Console.ReadKey(intercept: true); // intercept:true 隐藏按键显示
|
||||
|
||||
if (key.Key == ConsoleKey.Escape)
|
||||
{
|
||||
await host.StopAsync();
|
||||
Console.WriteLine("\n程序已退出");
|
||||
break;
|
||||
}
|
||||
}
|
||||
(host.Services as IDisposable)?.Dispose();
|
||||
|
||||
|
||||
public class TestTopic
|
||||
{
|
||||
public string Topic { get; set; }
|
||||
public int Val { get; set; }
|
||||
}
|
||||
@ -1,180 +0,0 @@
|
||||
{
|
||||
"Serilog": {
|
||||
"Using": [
|
||||
"Serilog.Sinks.Console",
|
||||
"Serilog.Sinks.File"
|
||||
],
|
||||
"MinimumLevel": {
|
||||
"Default": "Information",
|
||||
"Override": {
|
||||
"Microsoft": "Warning",
|
||||
"Volo.Abp": "Warning",
|
||||
"Hangfire": "Warning",
|
||||
"DotNetCore.CAP": "Warning",
|
||||
"Serilog.AspNetCore": "Information",
|
||||
"Microsoft.EntityFrameworkCore": "Warning",
|
||||
"Microsoft.AspNetCore": "Warning"
|
||||
}
|
||||
},
|
||||
"WriteTo": [
|
||||
{
|
||||
"Name": "Console"
|
||||
},
|
||||
{
|
||||
"Name": "File",
|
||||
"Args": {
|
||||
"path": "logs/logs-.txt",
|
||||
"rollingInterval": "Day"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"App": {
|
||||
"SelfUrl": "http://localhost:44315",
|
||||
"CorsOrigins": "http://localhost:4200,http://localhost:3100"
|
||||
},
|
||||
"ConnectionStrings": {
|
||||
"Default": "mongodb://admin:admin02023@118.190.144.92:37117,118.190.144.92:37119,118.190.144.92:37120/JiSheCollectBus?authSource=admin&maxPoolSize=400&minPoolSize=10&waitQueueTimeoutMS=5000",
|
||||
"Kafka": "192.168.1.9:29092,192.168.1.9:39092,192.168.1.9:49092",
|
||||
"PrepayDB": "server=118.190.144.92;database=jishe.sysdb;uid=sa;pwd=admin@2023;Encrypt=False;Trust Server Certificate=False",
|
||||
"EnergyDB": "server=118.190.144.92;database=db_energy;uid=sa;pwd=admin@2023;Encrypt=False;Trust Server Certificate=False"
|
||||
},
|
||||
"Redis": {
|
||||
"Configuration": "192.168.1.9:6380,password=1q2w3e!@#,syncTimeout=30000,abortConnect=false,connectTimeout=30000,allowAdmin=true",
|
||||
"MaxPoolSize": "50",
|
||||
"DefaultDB": "14",
|
||||
"HangfireDB": "15"
|
||||
},
|
||||
"Jwt": {
|
||||
"Audience": "JiShe.CollectBus",
|
||||
"SecurityKey": "dzehzRz9a8asdfasfdadfasdfasdfafsdadfasbasdf=",
|
||||
"Issuer": "JiShe.CollectBus",
|
||||
"ExpirationTime": 2
|
||||
},
|
||||
"HealthCheck": {
|
||||
"IsEnable": true,
|
||||
"MySql": {
|
||||
"IsEnable": true
|
||||
},
|
||||
"Pings": {
|
||||
"IsEnable": true,
|
||||
"Host": "https://www.baidu.com/",
|
||||
"TimeOut": 5000
|
||||
}
|
||||
},
|
||||
"SwaggerConfig": [
|
||||
{
|
||||
"GroupName": "Basic",
|
||||
"Title": "【后台管理】基础模块",
|
||||
"Version": "V1"
|
||||
},
|
||||
{
|
||||
"GroupName": "Business",
|
||||
"Title": "【后台管理】业务模块",
|
||||
"Version": "V1"
|
||||
}
|
||||
],
|
||||
"Cap": {
|
||||
"RabbitMq": {
|
||||
"HostName": "118.190.144.92",
|
||||
"UserName": "collectbus",
|
||||
"Password": "123456",
|
||||
"Port": 5672
|
||||
}
|
||||
},
|
||||
"Kafka": {
|
||||
"BootstrapServers": "192.168.1.9:29092,192.168.1.9:39092,192.168.1.9:49092",
|
||||
"EnableFilter": true,
|
||||
"EnableAuthorization": false,
|
||||
"SecurityProtocol": "SaslPlaintext",
|
||||
"SaslMechanism": "Plain",
|
||||
"SaslUserName": "lixiao",
|
||||
"SaslPassword": "lixiao1980",
|
||||
"KafkaReplicationFactor": 3,
|
||||
"NumPartitions": 1,
|
||||
"ServerTagName": "JiSheCollectBus2"
|
||||
//"Topic": {
|
||||
// "ReplicationFactor": 3,
|
||||
// "NumPartitions": 1000
|
||||
//}
|
||||
},
|
||||
//"Kafka": {
|
||||
// "Connections": {
|
||||
// "Default": {
|
||||
// "BootstrapServers": "192.168.1.9:29092,192.168.1.9:39092,192.168.1.9:49092"
|
||||
// // "SecurityProtocol": "SASL_PLAINTEXT",
|
||||
// // "SaslMechanism": "PLAIN",
|
||||
// // "SaslUserName": "lixiao",
|
||||
// // "SaslPassword": "lixiao1980",
|
||||
// }
|
||||
// },
|
||||
// "Consumer": {
|
||||
// "GroupId": "JiShe.CollectBus"
|
||||
// },
|
||||
// "Producer": {
|
||||
// "MessageTimeoutMs": 6000,
|
||||
// "Acks": -1
|
||||
// },
|
||||
// "Topic": {
|
||||
// "ReplicationFactor": 3,
|
||||
// "NumPartitions": 1000
|
||||
// },
|
||||
// "EventBus": {
|
||||
// "GroupId": "JiShe.CollectBus",
|
||||
// "TopicName": "DefaultTopicName"
|
||||
// }
|
||||
//},
|
||||
"IoTDBOptions": {
|
||||
"UserName": "root",
|
||||
"Password": "root",
|
||||
"ClusterList": [ "192.168.1.9:6667" ],
|
||||
"PoolSize": 2,
|
||||
"DataBaseName": "energy",
|
||||
"OpenDebugMode": true,
|
||||
"UseTableSessionPoolByDefault": false
|
||||
},
|
||||
"ServerTagName": "JiSheCollectBus3",
|
||||
"Cassandra": {
|
||||
"ReplicationStrategy": {
|
||||
"Class": "NetworkTopologyStrategy", //策略为NetworkTopologyStrategy时才会有多个数据中心,SimpleStrategy用在只有一个数据中心的情况下
|
||||
"DataCenters": [
|
||||
{
|
||||
"Name": "dc1",
|
||||
"ReplicationFactor": 3
|
||||
}
|
||||
]
|
||||
},
|
||||
"Nodes": [
|
||||
{
|
||||
"Host": "192.168.1.9",
|
||||
"Port": 9042,
|
||||
"DataCenter": "dc1",
|
||||
"Rack": "RAC1"
|
||||
},
|
||||
{
|
||||
"Host": "192.168.1.9",
|
||||
"Port": 9043,
|
||||
"DataCenter": "dc1",
|
||||
"Rack": "RAC2"
|
||||
}
|
||||
],
|
||||
"Username": "admin",
|
||||
"Password": "lixiao1980",
|
||||
"Keyspace": "jishecollectbus",
|
||||
"ConsistencyLevel": "Quorum",
|
||||
"PoolingOptions": {
|
||||
"CoreConnectionsPerHost": 4,
|
||||
"MaxConnectionsPerHost": 8,
|
||||
"MaxRequestsPerConnection": 2000
|
||||
},
|
||||
"SocketOptions": {
|
||||
"ConnectTimeoutMillis": 10000,
|
||||
"ReadTimeoutMillis": 20000
|
||||
},
|
||||
"QueryOptions": {
|
||||
"ConsistencyLevel": "Quorum",
|
||||
"SerialConsistencyLevel": "Serial",
|
||||
"DefaultIdempotence": true
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,204 +0,0 @@
|
||||
using Confluent.Kafka;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using Confluent.Kafka.Admin;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka.AdminClient
|
||||
{
|
||||
public class AdminClientService : IAdminClientService, IDisposable,ISingletonDependency
|
||||
{
|
||||
|
||||
private readonly ILogger<AdminClientService> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="AdminClientService"/> class.
|
||||
/// </summary>
|
||||
/// <param name="configuration">The configuration.</param>
|
||||
/// <param name="logger">The logger.</param>
|
||||
public AdminClientService(IConfiguration configuration, ILogger<AdminClientService> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
GetInstance(configuration);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the instance.
|
||||
/// </summary>
|
||||
/// <value>
|
||||
/// The instance.
|
||||
/// </value>
|
||||
public IAdminClient Instance { get; set; } = default;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the instance.
|
||||
/// </summary>
|
||||
/// <param name="configuration">The configuration.</param>
|
||||
/// <returns></returns>
|
||||
public IAdminClient GetInstance(IConfiguration configuration)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNullOrWhiteSpace(configuration["Kafka:EnableAuthorization"]);
|
||||
var enableAuthorization = bool.Parse(configuration["Kafka:EnableAuthorization"]!);
|
||||
var adminClientConfig = new AdminClientConfig()
|
||||
{
|
||||
BootstrapServers = configuration["Kafka:BootstrapServers"],
|
||||
};
|
||||
if (enableAuthorization)
|
||||
{
|
||||
adminClientConfig.SecurityProtocol = SecurityProtocol.SaslPlaintext;
|
||||
adminClientConfig.SaslMechanism = SaslMechanism.Plain;
|
||||
adminClientConfig.SaslUsername = configuration["Kafka:SaslUserName"];
|
||||
adminClientConfig.SaslPassword = configuration["Kafka:SaslPassword"];
|
||||
}
|
||||
Instance = new AdminClientBuilder(adminClientConfig).Build();
|
||||
return Instance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks the topic asynchronous.
|
||||
/// </summary>
|
||||
/// <param name="topic">The topic.</param>
|
||||
/// <returns></returns>
|
||||
public async Task<bool> CheckTopicAsync(string topic)
|
||||
{
|
||||
var metadata = Instance.GetMetadata(TimeSpan.FromSeconds(5));
|
||||
return await Task.FromResult(metadata.Topics.Exists(a => a.Topic == topic));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 判断Kafka主题是否存在
|
||||
/// </summary>
|
||||
/// <param name="topic">主题名称</param>
|
||||
/// <param name="numPartitions">副本数量,不能高于Brokers数量</param>
|
||||
/// <returns></returns>
|
||||
public async Task<bool> CheckTopicAsync(string topic,int numPartitions)
|
||||
{
|
||||
var metadata = Instance.GetMetadata(TimeSpan.FromSeconds(5));
|
||||
if(numPartitions > metadata.Brokers.Count)
|
||||
{
|
||||
throw new Exception($"{nameof(CheckTopicAsync)} 主题检查时,副本数量大于了节点数量。") ;
|
||||
}
|
||||
|
||||
return await Task.FromResult(metadata.Topics.Exists(a => a.Topic == topic));
|
||||
}
|
||||
|
||||
//// <summary>
|
||||
/// 创建Kafka主题
|
||||
/// </summary>
|
||||
/// <param name="topic">主题名称</param>
|
||||
/// <param name="numPartitions">主题分区数量</param>
|
||||
/// <param name="replicationFactor">副本数量,不能高于Brokers数量</param>
|
||||
/// <returns></returns>
|
||||
public async Task CreateTopicAsync(string topic, int numPartitions, short replicationFactor)
|
||||
{
|
||||
|
||||
try
|
||||
{
|
||||
if (await CheckTopicAsync(topic)) return;
|
||||
|
||||
|
||||
await Instance.CreateTopicsAsync(new[]
|
||||
{
|
||||
new TopicSpecification
|
||||
{
|
||||
Name = topic,
|
||||
NumPartitions = numPartitions,
|
||||
ReplicationFactor = replicationFactor
|
||||
}
|
||||
});
|
||||
}
|
||||
catch (CreateTopicsException e)
|
||||
{
|
||||
if (e.Results[0].Error.Code != ErrorCode.TopicAlreadyExists)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 删除Kafka主题
|
||||
/// </summary>
|
||||
/// <param name="topic"></param>
|
||||
/// <returns></returns>
|
||||
public async Task DeleteTopicAsync(string topic)
|
||||
{
|
||||
await Instance.DeleteTopicsAsync(new[] { topic });
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获取Kafka主题列表
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public async Task<List<string>> ListTopicsAsync()
|
||||
{
|
||||
var metadata = Instance.GetMetadata(TimeSpan.FromSeconds(10));
|
||||
return new List<string>(metadata.Topics.Select(t => t.Topic));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 判断Kafka主题是否存在
|
||||
/// </summary>
|
||||
/// <param name="topic"></param>
|
||||
/// <returns></returns>
|
||||
public async Task<bool> TopicExistsAsync(string topic)
|
||||
{
|
||||
var metadata = Instance.GetMetadata(TimeSpan.FromSeconds(10));
|
||||
return metadata.Topics.Any(t => t.Topic == topic);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 检测分区是否存在
|
||||
/// </summary>
|
||||
/// <param name="topic"></param>
|
||||
/// <param name="partitions"></param>
|
||||
/// <returns></returns>
|
||||
public Dictionary<int, bool> CheckPartitionsExists(string topic, int[] partitions)
|
||||
{
|
||||
var result = new Dictionary<int, bool>();
|
||||
var metadata = Instance.GetMetadata(topic, TimeSpan.FromSeconds(10));
|
||||
if (metadata.Topics.Count == 0)
|
||||
return partitions.ToDictionary(p => p, p => false);
|
||||
var existingPartitions = metadata.Topics[0].Partitions.Select(p => p.PartitionId).ToHashSet();
|
||||
return partitions.ToDictionary(p => p, p => existingPartitions.Contains(p));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 检测分区是否存在
|
||||
/// </summary>
|
||||
/// <param name="topic"></param>
|
||||
/// <param name="targetPartition"></param>
|
||||
/// <returns></returns>
|
||||
public bool CheckPartitionsExist(string topic, int targetPartition)
|
||||
{
|
||||
var metadata = Instance.GetMetadata(topic, TimeSpan.FromSeconds(10));
|
||||
if (metadata.Topics.Count == 0)
|
||||
return false;
|
||||
var partitions = metadata.Topics[0].Partitions;
|
||||
return partitions.Any(p => p.PartitionId == targetPartition);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获取主题的分区数量
|
||||
/// </summary>
|
||||
/// <param name="topic"></param>
|
||||
/// <returns></returns>
|
||||
public int GetTopicPartitionsNum(string topic)
|
||||
{
|
||||
var metadata = Instance.GetMetadata(topic, TimeSpan.FromSeconds(10));
|
||||
if (metadata.Topics.Count == 0)
|
||||
return 0;
|
||||
return metadata.Topics[0].Partitions.Count;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
Instance?.Dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,63 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka.AdminClient
|
||||
{
|
||||
public interface IAdminClientService
|
||||
{
|
||||
/// <summary>
|
||||
/// 创建Kafka主题
|
||||
/// </summary>
|
||||
/// <param name="topic">主题名称</param>
|
||||
/// <param name="numPartitions">主题分区数量</param>
|
||||
/// <param name="replicationFactor">副本数量,不能高于Brokers数量</param>
|
||||
/// <returns></returns>
|
||||
Task CreateTopicAsync(string topic, int numPartitions, short replicationFactor);
|
||||
|
||||
/// <summary>
|
||||
/// 删除Kafka主题
|
||||
/// </summary>
|
||||
/// <param name="topic"></param>
|
||||
/// <returns></returns>
|
||||
Task DeleteTopicAsync(string topic);
|
||||
|
||||
/// <summary>
|
||||
/// 获取Kafka主题列表
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task<List<string>> ListTopicsAsync();
|
||||
|
||||
/// <summary>
|
||||
/// 判断Kafka主题是否存在
|
||||
/// </summary>
|
||||
/// <param name="topic"></param>
|
||||
/// <returns></returns>
|
||||
Task<bool> TopicExistsAsync(string topic);
|
||||
|
||||
/// <summary>
|
||||
/// 检测分区是否存在
|
||||
/// </summary>
|
||||
/// <param name="topic"></param>
|
||||
/// <param name="partitions"></param>
|
||||
/// <returns></returns>
|
||||
Dictionary<int, bool> CheckPartitionsExists(string topic, int[] partitions);
|
||||
|
||||
/// <summary>
|
||||
/// 检测分区是否存在
|
||||
/// </summary>
|
||||
/// <param name="topic"></param>
|
||||
/// <param name="targetPartition"></param>
|
||||
/// <returns></returns>
|
||||
bool CheckPartitionsExist(string topic, int targetPartition);
|
||||
|
||||
/// <summary>
|
||||
/// 获取主题的分区数量
|
||||
/// </summary>
|
||||
/// <param name="topic"></param>
|
||||
/// <returns></returns>
|
||||
int GetTopicPartitionsNum(string topic);
|
||||
}
|
||||
}
|
||||
@ -1,68 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka.Attributes
|
||||
{
|
||||
[AttributeUsage(AttributeTargets.Method)]
|
||||
public class KafkaSubscribeAttribute : Attribute
|
||||
{
|
||||
/// <summary>
|
||||
/// 订阅的主题
|
||||
/// </summary>
|
||||
public string Topic { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// 分区
|
||||
/// </summary>
|
||||
public int Partition { get; set; } = -1;
|
||||
|
||||
/// <summary>
|
||||
/// 消费者组
|
||||
/// </summary>
|
||||
public string GroupId { get; set; } = "default";
|
||||
|
||||
/// <summary>
|
||||
/// 任务数(默认是多少个分区多少个任务)
|
||||
/// 如设置订阅指定Partition则任务数始终为1
|
||||
/// </summary>
|
||||
public int TaskCount { get; set; } = -1;
|
||||
|
||||
/// <summary>
|
||||
/// 批量处理数量
|
||||
/// </summary>
|
||||
public int BatchSize { get; set; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// 是否启用批量处理
|
||||
/// </summary>
|
||||
public bool EnableBatch { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// 批次超时时间
|
||||
/// 格式:("00:05:00")
|
||||
/// </summary>
|
||||
public TimeSpan? BatchTimeout { get; set; }=null;
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 订阅主题
|
||||
/// </summary>
|
||||
/// <param name="batchTimeout"></param>
|
||||
public KafkaSubscribeAttribute(string topic)
|
||||
{
|
||||
this.Topic = topic;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 订阅主题
|
||||
/// </summary>
|
||||
public KafkaSubscribeAttribute(string topic, int partition)
|
||||
{
|
||||
this.Topic = topic;
|
||||
this.Partition = partition;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,29 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka.Attributes
|
||||
{
|
||||
[AttributeUsage(AttributeTargets.Class, Inherited = false)]
|
||||
public class TopicAttribute: Attribute
|
||||
{
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="TopicAttribute"/> class.
|
||||
/// </summary>
|
||||
/// <param name="name">The name.</param>
|
||||
public TopicAttribute(string name = "Default")
|
||||
{
|
||||
Name = name;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the name.
|
||||
/// </summary>
|
||||
/// <value>
|
||||
/// The name.
|
||||
/// </value>
|
||||
public string Name { get; set; }
|
||||
}
|
||||
}
|
||||
@ -1,57 +0,0 @@
|
||||
using Confluent.Kafka;
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.Kafka.Consumer;
|
||||
using JiShe.CollectBus.Kafka.Producer;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using System.Reflection;
|
||||
using Volo.Abp;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
using Volo.Abp.Modularity;
|
||||
using static Confluent.Kafka.ConfigPropertyNames;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka
|
||||
{
|
||||
public class CollectBusKafkaModule : AbpModule
|
||||
{
|
||||
public override void ConfigureServices(ServiceConfigurationContext context)
|
||||
{
|
||||
var configuration = context.Services.GetConfiguration();
|
||||
//var kafkaSection = configuration.GetSection(CommonConst.Kafka);
|
||||
//KafkaOptionConfig kafkaOptionConfig = new KafkaOptionConfig ();
|
||||
//kafkaSection.Bind(kafkaOptionConfig);
|
||||
//if (configuration[CommonConst.ServerTagName] != null)
|
||||
//{
|
||||
// kafkaOptionConfig.ServerTagName = configuration[CommonConst.ServerTagName]!;
|
||||
//}
|
||||
//context.Services.AddSingleton(kafkaOptionConfig);
|
||||
|
||||
//context.Services.Configure<KafkaOptionConfig>(context.Services.GetConfiguration().GetSection(CommonConst.Kafka));
|
||||
|
||||
Configure<KafkaOptionConfig>(options =>
|
||||
{
|
||||
configuration.GetSection(CommonConst.Kafka).Bind(options);
|
||||
});
|
||||
|
||||
|
||||
// 注册Producer
|
||||
context.Services.AddSingleton<IProducerService, ProducerService>();
|
||||
// 注册Consumer
|
||||
context.Services.AddSingleton<IConsumerService, ConsumerService>();
|
||||
|
||||
//context.Services.AddHostedService<HostedService>();
|
||||
}
|
||||
|
||||
public override void OnApplicationInitialization(ApplicationInitializationContext context)
|
||||
{
|
||||
var app = context.GetApplicationBuilder();
|
||||
|
||||
// 注册Subscriber
|
||||
app.ApplicationServices.UseKafkaSubscribe();
|
||||
|
||||
// 获取程序集
|
||||
//app.UseKafkaSubscribers(Assembly.Load("JiShe.CollectBus.Application"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,540 +0,0 @@
|
||||
using Confluent.Kafka;
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Text;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka.Consumer
|
||||
{
|
||||
public class ConsumerService : IConsumerService, IDisposable
|
||||
{
|
||||
private readonly ILogger<ConsumerService> _logger;
|
||||
private readonly IConfiguration _configuration;
|
||||
private readonly ConcurrentDictionary<Type, (object Consumer, CancellationTokenSource CTS)>
|
||||
_consumerStore = new();
|
||||
private readonly KafkaOptionConfig _kafkaOptionConfig;
|
||||
private class KafkaConsumer<TKey, TValue> where TKey : notnull where TValue : class { }
|
||||
|
||||
public ConsumerService(IConfiguration configuration, ILogger<ConsumerService> logger, IOptions<KafkaOptionConfig> kafkaOptionConfig)
|
||||
{
|
||||
_configuration = configuration;
|
||||
_logger = logger;
|
||||
_kafkaOptionConfig = kafkaOptionConfig.Value;
|
||||
}
|
||||
|
||||
#region private 私有方法
|
||||
|
||||
/// <summary>
|
||||
/// 创建消费者
|
||||
/// </summary>
|
||||
/// <typeparam name="TKey"></typeparam>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
/// <returns></returns>
|
||||
private IConsumer<TKey, TValue> CreateConsumer<TKey, TValue>(string? groupId = null) where TKey : notnull where TValue : class
|
||||
{
|
||||
var config = BuildConsumerConfig(groupId);
|
||||
return new ConsumerBuilder<TKey, TValue>(config)
|
||||
.SetValueDeserializer(new JsonSerializer<TValue>())
|
||||
.SetLogHandler((_, log) => _logger.LogInformation($"消费者Log: {log.Message}"))
|
||||
.SetErrorHandler((_, e) => _logger.LogError($"消费者错误: {e.Reason}"))
|
||||
.Build();
|
||||
}
|
||||
|
||||
private ConsumerConfig BuildConsumerConfig(string? groupId = null)
|
||||
{
|
||||
var config = new ConsumerConfig
|
||||
{
|
||||
BootstrapServers = _kafkaOptionConfig.BootstrapServers,
|
||||
GroupId = groupId ?? "default",
|
||||
AutoOffsetReset = AutoOffsetReset.Earliest,
|
||||
EnableAutoCommit = false, // 禁止AutoCommit
|
||||
EnablePartitionEof = true, // 启用分区末尾标记
|
||||
AllowAutoCreateTopics = true, // 启用自动创建
|
||||
FetchMaxBytes = 1024 * 1024 * 50 // 增加拉取大小(50MB)
|
||||
};
|
||||
|
||||
if (_kafkaOptionConfig.EnableAuthorization)
|
||||
{
|
||||
config.SecurityProtocol = _kafkaOptionConfig.SecurityProtocol;
|
||||
config.SaslMechanism = _kafkaOptionConfig.SaslMechanism;
|
||||
config.SaslUsername = _kafkaOptionConfig.SaslUserName;
|
||||
config.SaslPassword = _kafkaOptionConfig.SaslPassword;
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// 订阅消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TKey"></typeparam>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
/// <param name="topic"></param>
|
||||
/// <param name="messageHandler"></param>
|
||||
/// <returns></returns>
|
||||
public async Task SubscribeAsync<TKey, TValue>(string topic, Func<TKey, TValue, Task<bool>> messageHandler, string? groupId = null) where TKey : notnull where TValue : class
|
||||
{
|
||||
await SubscribeAsync<TKey, TValue>(new[] { topic }, messageHandler, groupId);
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 订阅消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
/// <param name="topic"></param>
|
||||
/// <param name="messageHandler"></param>
|
||||
/// <returns></returns>
|
||||
public async Task SubscribeAsync<TValue>(string topic, Func<TValue, Task<bool>> messageHandler, string? groupId = null) where TValue : class
|
||||
{
|
||||
await SubscribeAsync<TValue>(new[] { topic }, messageHandler,groupId);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 订阅消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TKey"></typeparam>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
/// <param name="topics"></param>
|
||||
/// <param name="messageHandler"></param>
|
||||
/// <returns></returns>
|
||||
public async Task SubscribeAsync<TKey, TValue>(string[] topics, Func<TKey, TValue, Task<bool>> messageHandler, string? groupId = null) where TKey : notnull where TValue : class
|
||||
{
|
||||
var consumerKey = typeof(KafkaConsumer<TKey, TValue>);
|
||||
var cts = new CancellationTokenSource();
|
||||
|
||||
//var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
|
||||
//(
|
||||
// CreateConsumer<TKey, TValue>(groupId),
|
||||
// cts
|
||||
//)).Consumer as IConsumer<TKey, TValue>;
|
||||
var consumer = CreateConsumer<TKey, TValue>(groupId);
|
||||
consumer!.Subscribe(topics);
|
||||
|
||||
await Task.Run(async () =>
|
||||
{
|
||||
while (!cts.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
//_logger.LogInformation($"Kafka消费: {string.Join("", topics)} 开始拉取消息....");
|
||||
|
||||
var result = consumer.Consume(cts.Token);
|
||||
if (result == null || result.Message==null || result.Message.Value == null)
|
||||
continue;
|
||||
|
||||
if (result.IsPartitionEOF)
|
||||
{
|
||||
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
|
||||
await Task.Delay(TimeSpan.FromSeconds(1),cts.Token);
|
||||
continue;
|
||||
}
|
||||
if (_kafkaOptionConfig.EnableFilter)
|
||||
{
|
||||
var headersFilter = new HeadersFilter { { "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) } };
|
||||
// 检查 Header 是否符合条件
|
||||
if (!headersFilter.Match(result.Message.Headers))
|
||||
{
|
||||
//consumer.Commit(result); // 提交偏移量
|
||||
// 跳过消息
|
||||
continue;
|
||||
}
|
||||
}
|
||||
bool sucess= await messageHandler(result.Message.Key, result.Message.Value);
|
||||
if (sucess)
|
||||
{
|
||||
consumer.Commit(result); // 手动提交
|
||||
}
|
||||
}
|
||||
catch (ConsumeException ex)
|
||||
{
|
||||
_logger.LogError(ex, $"{string.Join("、", topics)}消息消费失败: {ex.Error.Reason}");
|
||||
}
|
||||
}
|
||||
});
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 订阅消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TKey"></typeparam>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
/// <param name="topics"></param>
|
||||
/// <param name="messageHandler"></param>
|
||||
/// <returns></returns>
|
||||
public async Task SubscribeAsync<TValue>(string[] topics, Func<TValue, Task<bool>> messageHandler, string? groupId) where TValue : class
|
||||
{
|
||||
try {
|
||||
var consumerKey = typeof(KafkaConsumer<Ignore, TValue>);
|
||||
var cts = new CancellationTokenSource();
|
||||
//if (topics.Contains(ProtocolConst.SubscriberLoginReceivedEventName))
|
||||
//{
|
||||
// string ssss = "";
|
||||
//}
|
||||
//var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
|
||||
//(
|
||||
// CreateConsumer<string, TValue>(groupId),
|
||||
// cts
|
||||
//)).Consumer as IConsumer<string, TValue>;
|
||||
|
||||
var consumer = CreateConsumer<Ignore, TValue>(groupId);
|
||||
consumer!.Subscribe(topics);
|
||||
|
||||
_ = Task.Run(async () =>
|
||||
{
|
||||
int count = 0;
|
||||
while (!cts.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
//_logger.LogInformation($"Kafka消费: {string.Join("", topics)}_{count} 开始拉取消息....");
|
||||
count++;
|
||||
var result = consumer.Consume(cts.Token);
|
||||
if (result == null || result.Message == null || result.Message.Value == null)
|
||||
{
|
||||
await Task.Delay(500, cts.Token);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (result.IsPartitionEOF)
|
||||
{
|
||||
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
|
||||
await Task.Delay(100, cts.Token);
|
||||
continue;
|
||||
}
|
||||
if (_kafkaOptionConfig.EnableFilter)
|
||||
{
|
||||
var headersFilter = new HeadersFilter { { "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) } };
|
||||
// 检查 Header 是否符合条件
|
||||
if (!headersFilter.Match(result.Message.Headers))
|
||||
{
|
||||
await Task.Delay(500, cts.Token);
|
||||
//consumer.Commit(result); // 提交偏移量
|
||||
// 跳过消息
|
||||
continue;
|
||||
}
|
||||
}
|
||||
bool sucess = await messageHandler(result.Message.Value);
|
||||
if (sucess)
|
||||
consumer.Commit(result); // 手动提交
|
||||
else
|
||||
consumer.StoreOffset(result);
|
||||
}
|
||||
catch (ConsumeException ex)
|
||||
{
|
||||
_logger.LogError(ex, $"{string.Join("、", topics)}消息消费失败: {ex.Error.Reason}");
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning($"Kafka消费异常: {ex.Message}");
|
||||
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 批量订阅消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TKey">消息Key类型</typeparam>
|
||||
/// <typeparam name="TValue">消息Value类型</typeparam>
|
||||
/// <param name="topic">主题</param>
|
||||
/// <param name="messageBatchHandler">批量消息处理函数</param>
|
||||
/// <param name="groupId">消费组ID</param>
|
||||
/// <param name="batchSize">批次大小</param>
|
||||
/// <param name="batchTimeout">批次超时时间</param>
|
||||
public async Task SubscribeBatchAsync<TKey, TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class
|
||||
{
|
||||
await SubscribeBatchAsync<TKey, TValue>(new[] { topic }, messageBatchHandler, groupId, batchSize, batchTimeout);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 批量订阅消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TKey">消息Key类型</typeparam>
|
||||
/// <typeparam name="TValue">消息Value类型</typeparam>
|
||||
/// <param name="topics">主题列表</param>
|
||||
/// <param name="messageBatchHandler">批量消息处理函数</param>
|
||||
/// <param name="groupId">消费组ID</param>
|
||||
/// <param name="batchSize">批次大小</param>
|
||||
/// <param name="batchTimeout">批次超时时间</param>
|
||||
public async Task SubscribeBatchAsync<TKey, TValue>(string[] topics,Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null,int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class
|
||||
{
|
||||
var consumerKey = typeof(KafkaConsumer<TKey, TValue>);
|
||||
var cts = new CancellationTokenSource();
|
||||
|
||||
var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
|
||||
(
|
||||
CreateConsumer<TKey, TValue>(groupId),
|
||||
cts
|
||||
)).Consumer as IConsumer<TKey, TValue>;
|
||||
|
||||
consumer!.Subscribe(topics);
|
||||
|
||||
var timeout = batchTimeout ?? TimeSpan.FromSeconds(5); // 默认超时时间调整为5秒
|
||||
|
||||
_ = Task.Run(async () =>
|
||||
{
|
||||
var messages = new List<(TValue Value, TopicPartitionOffset Offset)>();
|
||||
var startTime = DateTime.UtcNow;
|
||||
|
||||
while (!cts.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
// 非阻塞快速累积消息
|
||||
while (messages.Count < batchSize && (DateTime.UtcNow - startTime) < timeout)
|
||||
{
|
||||
var result = consumer.Consume(TimeSpan.Zero); // 非阻塞调用
|
||||
|
||||
if (result != null)
|
||||
{
|
||||
if (result.IsPartitionEOF)
|
||||
{
|
||||
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
|
||||
await Task.Delay(TimeSpan.FromSeconds(1), cts.Token);
|
||||
}
|
||||
else if (result.Message.Value != null)
|
||||
{
|
||||
if (_kafkaOptionConfig.EnableFilter)
|
||||
{
|
||||
var headersFilter = new HeadersFilter { { "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) } };
|
||||
// 检查 Header 是否符合条件
|
||||
if (!headersFilter.Match(result.Message.Headers))
|
||||
{
|
||||
//consumer.Commit(result); // 提交偏移量
|
||||
// 跳过消息
|
||||
continue;
|
||||
}
|
||||
}
|
||||
messages.Add((result.Message.Value, result.TopicPartitionOffset));
|
||||
//messages.Add(result.Message.Value);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// 无消息时短暂等待
|
||||
await Task.Delay(10, cts.Token);
|
||||
}
|
||||
}
|
||||
|
||||
// 处理批次
|
||||
if (messages.Count > 0)
|
||||
{
|
||||
bool success = await messageBatchHandler(messages.Select(m => m.Value));
|
||||
if (success)
|
||||
{
|
||||
var offsetsByPartition = new Dictionary<TopicPartition, long>();
|
||||
foreach (var msg in messages)
|
||||
{
|
||||
var tp = msg.Offset.TopicPartition;
|
||||
var offset = msg.Offset.Offset;
|
||||
if (!offsetsByPartition.TryGetValue(tp, out var currentMax) || offset > currentMax)
|
||||
{
|
||||
offsetsByPartition[tp] = offset;
|
||||
}
|
||||
}
|
||||
|
||||
var offsetsToCommit = offsetsByPartition
|
||||
.Select(kv => new TopicPartitionOffset(kv.Key, new Offset(kv.Value + 1)))
|
||||
.ToList();
|
||||
consumer.Commit(offsetsToCommit);
|
||||
}
|
||||
messages.Clear();
|
||||
}
|
||||
|
||||
startTime = DateTime.UtcNow;
|
||||
}
|
||||
catch (ConsumeException ex)
|
||||
{
|
||||
_logger.LogError(ex, $"{string.Join("、", topics)} 消息消费失败: {ex.Error.Reason}");
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
// 任务取消,正常退出
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "处理批量消息时发生未知错误");
|
||||
}
|
||||
}
|
||||
}, cts.Token);
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 批量订阅消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TValue">消息Value类型</typeparam>
|
||||
/// <param name="topic">主题列表</param>
|
||||
/// <param name="messageBatchHandler">批量消息处理函数</param>
|
||||
/// <param name="groupId">消费组ID</param>
|
||||
/// <param name="batchSize">批次大小</param>
|
||||
/// <param name="batchTimeout">批次超时时间</param>
|
||||
/// <param name="consumeTimeout">消费等待时间</param>
|
||||
public async Task SubscribeBatchAsync<TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class
|
||||
{
|
||||
await SubscribeBatchAsync<TValue>(new[] { topic }, messageBatchHandler, groupId, batchSize, batchTimeout, consumeTimeout);
|
||||
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 批量订阅消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TValue">消息Value类型</typeparam>
|
||||
/// <param name="topics">主题列表</param>
|
||||
/// <param name="messageBatchHandler">批量消息处理函数</param>
|
||||
/// <param name="groupId">消费组ID</param>
|
||||
/// <param name="batchSize">批次大小</param>
|
||||
/// <param name="batchTimeout">批次超时时间</param>
|
||||
/// <param name="consumeTimeout">消费等待时间</param>
|
||||
public async Task SubscribeBatchAsync<TValue>(string[] topics,Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100,TimeSpan? batchTimeout = null,TimeSpan? consumeTimeout = null)where TValue : class
|
||||
{
|
||||
var consumerKey = typeof(KafkaConsumer<string, TValue>);
|
||||
var cts = new CancellationTokenSource();
|
||||
|
||||
var consumer = _consumerStore.GetOrAdd(consumerKey, _ =>
|
||||
(
|
||||
CreateConsumer<string, TValue>(groupId),
|
||||
cts
|
||||
)).Consumer as IConsumer<string, TValue>;
|
||||
|
||||
consumer!.Subscribe(topics);
|
||||
|
||||
var timeout = batchTimeout ?? TimeSpan.FromSeconds(5); // 默认超时时间调整为5秒
|
||||
|
||||
_ = Task.Run(async () =>
|
||||
{
|
||||
var messages = new List<(TValue Value, TopicPartitionOffset Offset)>();
|
||||
//var messages = new List<ConsumeResult<TKey, TValue>>();
|
||||
var startTime = DateTime.UtcNow;
|
||||
|
||||
while (!cts.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
// 非阻塞快速累积消息
|
||||
while (messages.Count < batchSize && (DateTime.UtcNow - startTime) < timeout)
|
||||
{
|
||||
var result = consumer.Consume(TimeSpan.Zero); // 非阻塞调用
|
||||
|
||||
if (result != null)
|
||||
{
|
||||
if (result.IsPartitionEOF)
|
||||
{
|
||||
_logger.LogInformation("Kafka消费: {Topic} 分区 {Partition} 已消费完", result.Topic, result.Partition);
|
||||
await Task.Delay(TimeSpan.FromSeconds(1), cts.Token);
|
||||
}
|
||||
else if (result.Message.Value != null)
|
||||
{
|
||||
if (_kafkaOptionConfig.EnableFilter)
|
||||
{
|
||||
var headersFilter = new HeadersFilter { { "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) } };
|
||||
// 检查 Header 是否符合条件
|
||||
if (!headersFilter.Match(result.Message.Headers))
|
||||
{
|
||||
//consumer.Commit(result); // 提交偏移量
|
||||
// 跳过消息
|
||||
continue;
|
||||
}
|
||||
}
|
||||
messages.Add((result.Message.Value, result.TopicPartitionOffset));
|
||||
//messages.Add(result.Message.Value);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// 无消息时短暂等待
|
||||
await Task.Delay(10, cts.Token);
|
||||
}
|
||||
}
|
||||
|
||||
// 处理批次
|
||||
if (messages.Count > 0)
|
||||
{
|
||||
bool success = await messageBatchHandler(messages.Select(m => m.Value));
|
||||
if (success)
|
||||
{
|
||||
var offsetsByPartition = new Dictionary<TopicPartition, long>();
|
||||
foreach (var msg in messages)
|
||||
{
|
||||
var tp = msg.Offset.TopicPartition;
|
||||
var offset = msg.Offset.Offset;
|
||||
if (!offsetsByPartition.TryGetValue(tp, out var currentMax) || offset > currentMax)
|
||||
{
|
||||
offsetsByPartition[tp] = offset;
|
||||
}
|
||||
}
|
||||
|
||||
var offsetsToCommit = offsetsByPartition
|
||||
.Select(kv => new TopicPartitionOffset(kv.Key, new Offset(kv.Value + 1)))
|
||||
.ToList();
|
||||
consumer.Commit(offsetsToCommit);
|
||||
}
|
||||
messages.Clear();
|
||||
}
|
||||
|
||||
startTime = DateTime.UtcNow;
|
||||
}
|
||||
catch (ConsumeException ex)
|
||||
{
|
||||
_logger.LogError(ex, $"消息消费失败: {ex.Error.Reason}");
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
// 任务取消,正常退出
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "处理批量消息时发生未知错误");
|
||||
}
|
||||
}
|
||||
}, cts.Token);
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 取消消息订阅
|
||||
/// </summary>
|
||||
/// <typeparam name="TKey"></typeparam>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
public void Unsubscribe<TKey, TValue>() where TKey : notnull where TValue : class
|
||||
{
|
||||
var consumerKey = typeof((TKey, TValue));
|
||||
if (_consumerStore.TryRemove(consumerKey, out var entry))
|
||||
{
|
||||
entry.CTS.Cancel();
|
||||
(entry.Consumer as IDisposable)?.Dispose();
|
||||
entry.CTS.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 释放资源
|
||||
/// </summary>
|
||||
public void Dispose()
|
||||
{
|
||||
foreach (var entry in _consumerStore.Values)
|
||||
{
|
||||
entry.CTS.Cancel();
|
||||
(entry.Consumer as IDisposable)?.Dispose();
|
||||
entry.CTS.Dispose();
|
||||
}
|
||||
_consumerStore.Clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,46 +0,0 @@
|
||||
using Confluent.Kafka;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka.Consumer
|
||||
{
|
||||
public interface IConsumerService
|
||||
{
|
||||
Task SubscribeAsync<TKey, TValue>(string topic, Func<TKey, TValue, Task<bool>> messageHandler, string? groupId=null) where TKey : notnull where TValue : class;
|
||||
|
||||
/// <summary>
|
||||
/// 订阅消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
/// <param name="topic"></param>
|
||||
/// <param name="messageHandler"></param>
|
||||
/// <returns></returns>
|
||||
Task SubscribeAsync<TValue>(string topic, Func<TValue, Task<bool>> messageHandler, string? groupId = null) where TValue : class;
|
||||
|
||||
Task SubscribeAsync<TKey, TValue>(string[] topics, Func<TKey, TValue, Task<bool>> messageHandler, string? groupId) where TKey : notnull where TValue : class;
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 订阅消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TKey"></typeparam>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
/// <param name="topics"></param>
|
||||
/// <param name="messageHandler"></param>
|
||||
/// <returns></returns>
|
||||
Task SubscribeAsync<TValue>(string[] topics, Func<TValue, Task<bool>> messageHandler, string? groupId = null) where TValue : class;
|
||||
|
||||
Task SubscribeBatchAsync<TKey, TValue>(string[] topics, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class;
|
||||
|
||||
Task SubscribeBatchAsync<TKey, TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null) where TKey : notnull where TValue : class;
|
||||
|
||||
Task SubscribeBatchAsync<TValue>(string topic, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class;
|
||||
|
||||
Task SubscribeBatchAsync<TValue>(string[] topics, Func<IEnumerable<TValue>, Task<bool>> messageBatchHandler, string? groupId = null, int batchSize = 100, TimeSpan? batchTimeout = null, TimeSpan? consumeTimeout = null) where TValue : class;
|
||||
|
||||
void Unsubscribe<TKey, TValue>() where TKey : notnull where TValue : class;
|
||||
}
|
||||
}
|
||||
@ -1,30 +0,0 @@
|
||||
using Confluent.Kafka;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka
|
||||
{
|
||||
/// <summary>
|
||||
/// 消息头过滤器
|
||||
/// </summary>
|
||||
public class HeadersFilter : Dictionary<string, byte[]>
|
||||
{
|
||||
/// <summary>
|
||||
/// 判断Headers是否匹配
|
||||
/// </summary>
|
||||
/// <param name="headers"></param>
|
||||
/// <returns></returns>
|
||||
public bool Match(Headers headers)
|
||||
{
|
||||
foreach (var kvp in this)
|
||||
{
|
||||
if (!headers.TryGetLastBytes(kvp.Key, out var value) || !value.SequenceEqual(kvp.Value))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,43 +0,0 @@
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka
|
||||
{
|
||||
public class HostedService : IHostedService, IDisposable
|
||||
{
|
||||
private readonly ILogger _logger;
|
||||
private readonly IServiceProvider _provider;
|
||||
public HostedService(ILogger<HostedService> logger, IServiceProvider provider)
|
||||
{
|
||||
_logger = logger;
|
||||
_provider = provider;
|
||||
}
|
||||
|
||||
public Task StartAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
_logger.LogInformation("程序启动");
|
||||
Task.Run(() =>
|
||||
{
|
||||
_provider.UseKafkaSubscribe();
|
||||
});
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task StopAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
_logger.LogInformation("结束");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,18 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka
|
||||
{
|
||||
/// <summary>
|
||||
/// Kafka订阅者
|
||||
/// <para>
|
||||
/// 订阅者需要继承此接口并需要依赖注入,并使用<see cref="KafkaSubscribeAttribute"/>标记
|
||||
/// </para>
|
||||
/// </summary>
|
||||
public interface IKafkaSubscribe
|
||||
{
|
||||
}
|
||||
}
|
||||
@ -1,21 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka
|
||||
{
|
||||
public interface ISubscribeAck
|
||||
{
|
||||
/// <summary>
|
||||
/// 是否成功标记
|
||||
/// </summary>
|
||||
bool Ack { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 消息
|
||||
/// </summary>
|
||||
string? Msg { get; set; }
|
||||
}
|
||||
}
|
||||
@ -1,19 +0,0 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Confluent.Kafka" Version="2.9.0" />
|
||||
<PackageReference Include="Volo.Abp.AspNetCore" Version="8.3.3" />
|
||||
<PackageReference Include="Volo.Abp.Core" Version="8.3.3" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\shared\JiShe.CollectBus.Common\JiShe.CollectBus.Common.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@ -1,88 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using System.Text.Json;
|
||||
using Confluent.Kafka;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Text.Encodings.Web;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka
|
||||
{
|
||||
/// <summary>
|
||||
/// JSON 序列化器(支持泛型)
|
||||
/// </summary>
|
||||
public class JsonSerializer<T> : ISerializer<T>, IDeserializer<T>
|
||||
{
|
||||
private static readonly JsonSerializerOptions _options = new JsonSerializerOptions
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.Never,
|
||||
WriteIndented = false,// 设置格式化输出
|
||||
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,// 允许特殊字符
|
||||
IgnoreReadOnlyFields = true,
|
||||
IgnoreReadOnlyProperties = true,
|
||||
NumberHandling = JsonNumberHandling.AllowReadingFromString, // 允许数字字符串
|
||||
AllowTrailingCommas = true, // 忽略尾随逗号
|
||||
ReadCommentHandling = JsonCommentHandling.Skip, // 忽略注释
|
||||
PropertyNameCaseInsensitive = true, // 属性名称大小写不敏感
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase, // 属性名称使用驼峰命名规则
|
||||
Converters = { new DateTimeJsonConverter() } // 注册你的自定义转换器,
|
||||
};
|
||||
|
||||
public byte[] Serialize(T data, SerializationContext context)
|
||||
{
|
||||
if (data == null)
|
||||
return null;
|
||||
|
||||
try
|
||||
{
|
||||
return JsonSerializer.SerializeToUtf8Bytes(data, _options);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new InvalidOperationException("Kafka序列化失败", ex);
|
||||
}
|
||||
}
|
||||
|
||||
public T Deserialize(ReadOnlySpan<byte> data, bool isNull, SerializationContext context)
|
||||
{
|
||||
if (isNull)
|
||||
return default;
|
||||
|
||||
try
|
||||
{
|
||||
return JsonSerializer.Deserialize<T>(data, _options);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new InvalidOperationException("Kafka反序列化失败", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public class DateTimeJsonConverter : JsonConverter<DateTime>
|
||||
{
|
||||
private readonly string _dateFormatString;
|
||||
public DateTimeJsonConverter()
|
||||
{
|
||||
_dateFormatString = "yyyy-MM-dd HH:mm:ss";
|
||||
}
|
||||
|
||||
public DateTimeJsonConverter(string dateFormatString)
|
||||
{
|
||||
_dateFormatString = dateFormatString;
|
||||
}
|
||||
|
||||
public override DateTime Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
|
||||
{
|
||||
return DateTime.Parse(reader.GetString());
|
||||
}
|
||||
|
||||
public override void Write(Utf8JsonWriter writer, DateTime value, JsonSerializerOptions options)
|
||||
{
|
||||
writer.WriteStringValue(value.ToString(_dateFormatString));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,63 +0,0 @@
|
||||
using Confluent.Kafka;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka
|
||||
{
|
||||
public class KafkaOptionConfig
|
||||
{
|
||||
/// <summary>
|
||||
/// kafka地址
|
||||
/// </summary>
|
||||
public string BootstrapServers { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// 服务器标识
|
||||
/// </summary>
|
||||
public string ServerTagName { get; set; }= "KafkaFilterKey";
|
||||
|
||||
/// <summary>
|
||||
/// kafka主题副本数量
|
||||
/// </summary>
|
||||
public short KafkaReplicationFactor { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// kafka主题分区数量
|
||||
/// </summary>
|
||||
public int NumPartitions { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 是否开启过滤器
|
||||
/// </summary>
|
||||
public bool EnableFilter { get; set; }= true;
|
||||
|
||||
/// <summary>
|
||||
/// 是否开启认证
|
||||
/// </summary>
|
||||
public bool EnableAuthorization { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// 安全协议
|
||||
/// </summary>
|
||||
public SecurityProtocol SecurityProtocol { get; set; } = SecurityProtocol.SaslPlaintext;
|
||||
|
||||
/// <summary>
|
||||
/// 认证方式
|
||||
/// </summary>
|
||||
public SaslMechanism SaslMechanism { get; set; }= SaslMechanism.Plain;
|
||||
|
||||
/// <summary>
|
||||
/// 用户名
|
||||
/// </summary>
|
||||
public string? SaslUserName { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 密码
|
||||
/// </summary>
|
||||
public string? SaslPassword { get; set; }
|
||||
|
||||
}
|
||||
}
|
||||
@ -1,266 +0,0 @@
|
||||
using Confluent.Kafka;
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.Common.Extensions;
|
||||
using JiShe.CollectBus.Common.Helpers;
|
||||
using JiShe.CollectBus.Kafka.AdminClient;
|
||||
using JiShe.CollectBus.Kafka.Attributes;
|
||||
using JiShe.CollectBus.Kafka.Consumer;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Newtonsoft.Json;
|
||||
using System.Collections.Generic;
|
||||
using System.Reflection;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka
|
||||
{
|
||||
public static class KafkaSubcribesExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// 添加Kafka订阅
|
||||
/// </summary>
|
||||
/// <param name="app"></param>
|
||||
/// <param name="assembly"></param>
|
||||
public static void UseKafkaSubscribe(this IServiceProvider provider)
|
||||
{
|
||||
var lifetime = provider.GetRequiredService<IHostApplicationLifetime>();
|
||||
|
||||
//初始化主题信息
|
||||
var kafkaAdminClient = provider.GetRequiredService<IAdminClientService>();
|
||||
var kafkaOptions = provider.GetRequiredService<IOptions<KafkaOptionConfig>>();
|
||||
|
||||
List<string> topics = ProtocolConstExtensions.GetAllTopicNamesByIssued();
|
||||
topics.AddRange(ProtocolConstExtensions.GetAllTopicNamesByReceived());
|
||||
|
||||
foreach (var item in topics)
|
||||
{
|
||||
kafkaAdminClient.CreateTopicAsync(item, kafkaOptions.Value.NumPartitions, kafkaOptions.Value.KafkaReplicationFactor).ConfigureAwait(false).GetAwaiter().GetResult();
|
||||
}
|
||||
lifetime.ApplicationStarted.Register(() =>
|
||||
{
|
||||
var logger = provider.GetRequiredService<ILogger<CollectBusKafkaModule>>();
|
||||
int threadCount = 0;
|
||||
int topicCount = 0;
|
||||
var assemblyPath = Path.GetDirectoryName(Assembly.GetEntryAssembly()?.Location);
|
||||
if (string.IsNullOrWhiteSpace(assemblyPath))
|
||||
{
|
||||
logger.LogInformation($"kafka订阅未能找到程序路径");
|
||||
return;
|
||||
}
|
||||
var dllFiles = Directory.GetFiles(assemblyPath, "*.dll");
|
||||
foreach (var file in dllFiles)
|
||||
{
|
||||
// 跳过已加载的程序集
|
||||
var assemblyName = AssemblyName.GetAssemblyName(file);
|
||||
var existingAssembly = AppDomain.CurrentDomain.GetAssemblies()
|
||||
.FirstOrDefault(a => a.GetName().FullName == assemblyName.FullName);
|
||||
var assembly = existingAssembly ?? Assembly.LoadFrom(file);
|
||||
// 实现IKafkaSubscribe接口
|
||||
var subscribeTypes = assembly.GetTypes().Where(type =>
|
||||
typeof(IKafkaSubscribe).IsAssignableFrom(type) &&
|
||||
!type.IsAbstract && !type.IsInterface).ToList(); ;
|
||||
if (subscribeTypes.Count == 0)
|
||||
continue;
|
||||
|
||||
foreach (var subscribeType in subscribeTypes)
|
||||
{
|
||||
var subscribes = provider.GetServices(subscribeType).ToList();
|
||||
subscribes.ForEach(subscribe =>
|
||||
{
|
||||
if (subscribe != null)
|
||||
{
|
||||
Tuple<int, int> tuple = BuildKafkaSubscribe(subscribe, provider, logger, kafkaOptions.Value);
|
||||
threadCount += tuple.Item1;
|
||||
topicCount += tuple.Item2;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
logger.LogInformation($"kafka订阅主题:{topicCount}数,共启动:{threadCount}线程");
|
||||
});
|
||||
}
|
||||
|
||||
public static void UseKafkaSubscribersAsync(this IApplicationBuilder app, Assembly assembly)
|
||||
{
|
||||
var provider = app.ApplicationServices;
|
||||
var lifetime = provider.GetRequiredService<IHostApplicationLifetime>();
|
||||
//初始化主题信息
|
||||
var kafkaAdminClient = provider.GetRequiredService<IAdminClientService>();
|
||||
var kafkaOptions = provider.GetRequiredService<IOptions<KafkaOptionConfig>>();
|
||||
|
||||
List<string> topics = ProtocolConstExtensions.GetAllTopicNamesByIssued();
|
||||
topics.AddRange(ProtocolConstExtensions.GetAllTopicNamesByReceived());
|
||||
|
||||
foreach (var item in topics)
|
||||
{
|
||||
kafkaAdminClient.CreateTopicAsync(item, kafkaOptions.Value.NumPartitions, kafkaOptions.Value.KafkaReplicationFactor).ConfigureAwait(false).GetAwaiter().GetResult();
|
||||
}
|
||||
lifetime.ApplicationStarted.Register(() =>
|
||||
{
|
||||
var logger = provider.GetRequiredService<ILogger<CollectBusKafkaModule>>();
|
||||
int threadCount = 0;
|
||||
int topicCount = 0;
|
||||
var subscribeTypes = assembly.GetTypes()
|
||||
.Where(t => typeof(IKafkaSubscribe).IsAssignableFrom(t))
|
||||
.ToList();
|
||||
|
||||
if (subscribeTypes.Count == 0) return;
|
||||
foreach (var subscribeType in subscribeTypes)
|
||||
{
|
||||
var subscribes = provider.GetServices(subscribeType).ToList();
|
||||
subscribes.ForEach(subscribe =>
|
||||
{
|
||||
|
||||
if (subscribe != null)
|
||||
{
|
||||
Tuple<int, int> tuple = BuildKafkaSubscribe(subscribe, provider, logger, kafkaOptions.Value);
|
||||
threadCount += tuple.Item1;
|
||||
topicCount += tuple.Item2;
|
||||
}
|
||||
});
|
||||
}
|
||||
logger.LogInformation($"kafka订阅主题:{topicCount}数,共启动:{threadCount}线程");
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 构建Kafka订阅
|
||||
/// </summary>
|
||||
/// <param name="subscribe"></param>
|
||||
/// <param name="provider"></param>
|
||||
private static Tuple<int,int> BuildKafkaSubscribe(object subscribe, IServiceProvider provider,ILogger<CollectBusKafkaModule> logger, KafkaOptionConfig kafkaOptionConfig)
|
||||
{
|
||||
var subscribedMethods = subscribe.GetType().GetMethods()
|
||||
.Select(m => new { Method = m, Attribute = m.GetCustomAttribute<KafkaSubscribeAttribute>() })
|
||||
.Where(x => x.Attribute != null)
|
||||
.ToArray();
|
||||
//var configuration = provider.GetRequiredService<IConfiguration>();
|
||||
int threadCount = 0;
|
||||
|
||||
foreach (var sub in subscribedMethods)
|
||||
{
|
||||
int partitionCount = 3;// kafkaOptionConfig.NumPartitions;
|
||||
#if DEBUG
|
||||
var adminClientService = provider.GetRequiredService<IAdminClientService>();
|
||||
int topicCount = adminClientService.GetTopicPartitionsNum(sub.Attribute!.Topic);
|
||||
partitionCount= partitionCount> topicCount ? topicCount: partitionCount;
|
||||
#endif
|
||||
//int partitionCount = sub.Attribute!.TaskCount==-1?adminClientService.GetTopicPartitionsNum(sub.Attribute!.Topic) : sub.Attribute!.TaskCount;
|
||||
if (partitionCount <= 0)
|
||||
partitionCount = 1;
|
||||
for (int i = 0; i < partitionCount; i++)
|
||||
{
|
||||
//if (sub.Attribute!.Topic == ProtocolConst.SubscriberLoginReceivedEventName)
|
||||
Task.Run(() => StartConsumerAsync(provider, sub.Attribute!, sub.Method, subscribe, logger));
|
||||
threadCount++;
|
||||
}
|
||||
}
|
||||
return Tuple.Create(threadCount, subscribedMethods.Length);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 启动后台消费线程
|
||||
/// </summary>
|
||||
/// <param name="config"></param>
|
||||
/// <param name="attr"></param>
|
||||
/// <param name="method"></param>
|
||||
/// <param name="consumerInstance"></param>
|
||||
/// <returns></returns>
|
||||
private static async Task StartConsumerAsync(IServiceProvider provider, KafkaSubscribeAttribute attr,MethodInfo method, object subscribe, ILogger<CollectBusKafkaModule> logger)
|
||||
{
|
||||
var consumerService = provider.GetRequiredService<IConsumerService>();
|
||||
|
||||
if (attr.EnableBatch)
|
||||
{
|
||||
await consumerService.SubscribeBatchAsync<object>(attr.Topic, async (message) =>
|
||||
{
|
||||
try
|
||||
{
|
||||
#if DEBUG
|
||||
logger.LogInformation($"kafka批量消费消息:{message}");
|
||||
#endif
|
||||
// 处理消息
|
||||
return await ProcessMessageAsync(message.ToList(), method, subscribe);
|
||||
}
|
||||
catch (ConsumeException ex)
|
||||
{
|
||||
// 处理消费错误
|
||||
logger.LogError($"kafka批量消费异常:{ex.Message}");
|
||||
}
|
||||
return await Task.FromResult(false);
|
||||
}, attr.GroupId, attr.BatchSize, attr.BatchTimeout);
|
||||
}
|
||||
else
|
||||
{
|
||||
await consumerService.SubscribeAsync<object>(attr.Topic, async (message) =>
|
||||
{
|
||||
try
|
||||
{
|
||||
#if DEBUG
|
||||
logger.LogInformation($"kafka消费消息:{message}");
|
||||
#endif
|
||||
// 处理消息
|
||||
return await ProcessMessageAsync(new List<object>() { message }, method, subscribe);
|
||||
}
|
||||
catch (ConsumeException ex)
|
||||
{
|
||||
// 处理消费错误
|
||||
logger.LogError($"kafka消费异常:{ex.Message}");
|
||||
}
|
||||
return await Task.FromResult(false);
|
||||
}, attr.GroupId);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 处理消息
|
||||
/// </summary>
|
||||
/// <param name="message"></param>
|
||||
/// <param name="method"></param>
|
||||
/// <param name="subscribe"></param>
|
||||
/// <returns></returns>
|
||||
private static async Task<bool> ProcessMessageAsync(List<object> messages, MethodInfo method, object subscribe)
|
||||
{
|
||||
var parameters = method.GetParameters();
|
||||
bool isGenericTask = method.ReturnType.IsGenericType
|
||||
&& method.ReturnType.GetGenericTypeDefinition() == typeof(Task<>);
|
||||
bool existParameters = parameters.Length > 0;
|
||||
List<object>? messageObj = null;
|
||||
if (existParameters)
|
||||
{
|
||||
messageObj = new List<object>();
|
||||
var paramType = parameters[0].ParameterType;
|
||||
foreach (var msg in messages)
|
||||
{
|
||||
var data = paramType != typeof(string) ? msg?.ToString()?.Deserialize(paramType) : msg;
|
||||
if (data != null)
|
||||
messageObj.Add(data);
|
||||
}
|
||||
}
|
||||
|
||||
var result = method.Invoke(subscribe, messageObj?.ToArray());
|
||||
if (result is Task<ISubscribeAck> genericTask)
|
||||
{
|
||||
await genericTask.ConfigureAwait(false);
|
||||
return genericTask.Result.Ack;
|
||||
}
|
||||
else if (result is Task nonGenericTask)
|
||||
{
|
||||
await nonGenericTask.ConfigureAwait(false);
|
||||
return true;
|
||||
}
|
||||
else if (result is ISubscribeAck ackResult)
|
||||
{
|
||||
return ackResult.Ack;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@ -1,20 +0,0 @@
|
||||
using Confluent.Kafka;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka.Producer
|
||||
{
|
||||
public interface IProducerService
|
||||
{
|
||||
Task ProduceAsync<TKey, TValue>(string topic, TKey key, TValue value) where TKey : notnull where TValue : class;
|
||||
|
||||
Task ProduceAsync<TValue>(string topic, TValue value) where TValue : class;
|
||||
|
||||
Task ProduceAsync<TKey, TValue>(string topic, TKey key, TValue value, int? partition, Action<DeliveryReport<TKey, TValue>>? deliveryHandler = null) where TKey : notnull where TValue : class;
|
||||
|
||||
Task ProduceAsync<TValue>(string topic, TValue value, int? partition = null, Action<DeliveryReport<Null, TValue>>? deliveryHandler = null) where TValue : class;
|
||||
}
|
||||
}
|
||||
@ -1,220 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using Confluent.Kafka;
|
||||
using JiShe.CollectBus.Kafka.Consumer;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
using YamlDotNet.Serialization;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka.Producer
|
||||
{
|
||||
public class ProducerService: IProducerService, IDisposable
|
||||
{
|
||||
private readonly ILogger<ProducerService> _logger;
|
||||
private readonly IConfiguration _configuration;
|
||||
private readonly ConcurrentDictionary<Type, object> _producerCache = new();
|
||||
private class KafkaProducer<TKey, TValue> where TKey : notnull where TValue : class { }
|
||||
private readonly KafkaOptionConfig _kafkaOptionConfig;
|
||||
public ProducerService(IConfiguration configuration,ILogger<ProducerService> logger, IOptions<KafkaOptionConfig> kafkaOptionConfig)
|
||||
{
|
||||
_configuration = configuration;
|
||||
_logger = logger;
|
||||
_kafkaOptionConfig = kafkaOptionConfig.Value;
|
||||
}
|
||||
|
||||
#region private 私有方法
|
||||
/// <summary>
|
||||
/// 创建生产者实例
|
||||
/// </summary>
|
||||
/// <typeparam name="TKey"></typeparam>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
/// <returns></returns>
|
||||
private IProducer<TKey, TValue> GetProducer<TKey, TValue>(Type typeKey)
|
||||
{
|
||||
return (IProducer<TKey, TValue>)_producerCache.GetOrAdd(typeKey, _ =>
|
||||
{
|
||||
var config = BuildProducerConfig();
|
||||
return new ProducerBuilder<TKey, TValue>(config)
|
||||
.SetValueSerializer(new JsonSerializer<TValue>()) // Value 使用自定义 JSON 序列化
|
||||
.SetLogHandler((_, msg) => _logger.Log(ConvertLogLevel(msg.Level), msg.Message))
|
||||
.Build();
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 配置
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
private ProducerConfig BuildProducerConfig()
|
||||
{
|
||||
var config = new ProducerConfig
|
||||
{
|
||||
BootstrapServers = _kafkaOptionConfig.BootstrapServers,
|
||||
AllowAutoCreateTopics = true,
|
||||
QueueBufferingMaxKbytes = 2_097_151, // 修改缓冲区最大为2GB,默认为1GB
|
||||
CompressionType = CompressionType.Lz4, // 配置使用压缩算法LZ4,其他:gzip/snappy/zstd
|
||||
BatchSize = 32_768, // 修改批次大小为32K
|
||||
LingerMs = 20, // 修改等待时间为20ms
|
||||
Acks = Acks.All, // 表明只有所有副本Broker都收到消息才算提交成功, 可以 Acks.Leader
|
||||
MessageSendMaxRetries = 50, // 消息发送失败最大重试50次
|
||||
MessageTimeoutMs = 120000, // 消息发送超时时间为2分钟,设置值MessageTimeoutMs > LingerMs
|
||||
};
|
||||
|
||||
if (_kafkaOptionConfig.EnableAuthorization)
|
||||
{
|
||||
config.SecurityProtocol = _kafkaOptionConfig.SecurityProtocol;
|
||||
config.SaslMechanism = _kafkaOptionConfig.SaslMechanism;
|
||||
config.SaslUsername = _kafkaOptionConfig.SaslUserName;
|
||||
config.SaslPassword = _kafkaOptionConfig.SaslPassword;
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static LogLevel ConvertLogLevel(SyslogLevel level) => level switch
|
||||
{
|
||||
SyslogLevel.Emergency => LogLevel.Critical,
|
||||
SyslogLevel.Alert => LogLevel.Critical,
|
||||
SyslogLevel.Critical => LogLevel.Critical,
|
||||
SyslogLevel.Error => LogLevel.Error,
|
||||
SyslogLevel.Warning => LogLevel.Warning,
|
||||
SyslogLevel.Notice => LogLevel.Information,
|
||||
SyslogLevel.Info => LogLevel.Information,
|
||||
SyslogLevel.Debug => LogLevel.Debug,
|
||||
_ => LogLevel.None
|
||||
};
|
||||
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// 发布消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TKey"></typeparam>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
/// <param name="topic"></param>
|
||||
/// <param name="key"></param>
|
||||
/// <param name="value"></param>
|
||||
/// <returns></returns>
|
||||
public async Task ProduceAsync<TKey, TValue>(string topic, TKey key, TValue value)where TKey : notnull where TValue : class
|
||||
{
|
||||
var typeKey = typeof(KafkaProducer<TKey, TValue>);
|
||||
var producer = GetProducer<TKey, TValue>(typeKey);
|
||||
var message = new Message<TKey, TValue>
|
||||
{
|
||||
Key = key,
|
||||
Value = value,
|
||||
Headers = new Headers{
|
||||
{ "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) }
|
||||
}
|
||||
};
|
||||
await producer.ProduceAsync(topic, message);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 发布消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
/// <param name="topic"></param>
|
||||
/// <param name="value"></param>
|
||||
/// <returns></returns>
|
||||
public async Task ProduceAsync<TValue>(string topic, TValue value) where TValue : class
|
||||
{
|
||||
var typeKey = typeof(KafkaProducer<string, TValue>);
|
||||
var producer = GetProducer<Null, TValue>(typeKey);
|
||||
var message = new Message<Null, TValue>
|
||||
{
|
||||
//Key= _kafkaOptionConfig.ServerTagName,
|
||||
Value = value,
|
||||
Headers = new Headers{
|
||||
{ "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) }
|
||||
}
|
||||
};
|
||||
await producer.ProduceAsync(topic, message);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 发布消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TKey"></typeparam>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
/// <param name="topic"></param>
|
||||
/// <param name="key"></param>
|
||||
/// <param name="value"></param>
|
||||
/// <param name="partition"></param>
|
||||
/// <param name="deliveryHandler"></param>
|
||||
/// <returns></returns>
|
||||
public async Task ProduceAsync<TKey, TValue>(string topic,TKey key,TValue value,int? partition=null, Action<DeliveryReport<TKey, TValue>>? deliveryHandler = null)where TKey : notnull where TValue : class
|
||||
{
|
||||
var message = new Message<TKey, TValue>
|
||||
{
|
||||
Key = key,
|
||||
Value = value,
|
||||
Headers = new Headers{
|
||||
{ "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) }
|
||||
}
|
||||
};
|
||||
var typeKey = typeof(KafkaProducer<TKey, TValue>);
|
||||
var producer = GetProducer<TKey, TValue>(typeKey);
|
||||
if (partition.HasValue)
|
||||
{
|
||||
var topicPartition = new TopicPartition(topic, partition.Value);
|
||||
producer.Produce(topicPartition, message, deliveryHandler);
|
||||
}
|
||||
else
|
||||
{
|
||||
producer.Produce(topic, message, deliveryHandler);
|
||||
}
|
||||
await Task.CompletedTask;
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 发布消息
|
||||
/// </summary>
|
||||
/// <typeparam name="TValue"></typeparam>
|
||||
/// <param name="topic"></param>
|
||||
/// <param name="key"></param>
|
||||
/// <param name="value"></param>
|
||||
/// <param name="partition"></param>
|
||||
/// <param name="deliveryHandler"></param>
|
||||
/// <returns></returns>
|
||||
public async Task ProduceAsync<TValue>(string topic, TValue value, int? partition=null, Action<DeliveryReport<Null, TValue>>? deliveryHandler = null) where TValue : class
|
||||
{
|
||||
var message = new Message<Null, TValue>
|
||||
{
|
||||
//Key = _kafkaOptionConfig.ServerTagName,
|
||||
Value = value,
|
||||
Headers = new Headers{
|
||||
{ "route-key", Encoding.UTF8.GetBytes(_kafkaOptionConfig.ServerTagName) }
|
||||
}
|
||||
};
|
||||
var typeKey = typeof(KafkaProducer<Null, TValue>);
|
||||
var producer = GetProducer<Null, TValue>(typeKey);
|
||||
if (partition.HasValue)
|
||||
{
|
||||
var topicPartition = new TopicPartition(topic, partition.Value);
|
||||
producer.Produce(topicPartition, message, deliveryHandler);
|
||||
}
|
||||
else
|
||||
{
|
||||
producer.Produce(topic, message, deliveryHandler);
|
||||
}
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
foreach (var producer in _producerCache.Values.OfType<IDisposable>())
|
||||
{
|
||||
producer.Dispose();
|
||||
}
|
||||
_producerCache.Clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,75 +0,0 @@
|
||||
using Confluent.Kafka;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using static System.Runtime.InteropServices.JavaScript.JSType;
|
||||
|
||||
namespace JiShe.CollectBus.Kafka
|
||||
{
|
||||
public class SubscribeResult: ISubscribeAck
|
||||
{
|
||||
/// <summary>
|
||||
/// 是否成功
|
||||
/// </summary>
|
||||
public bool Ack { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 消息
|
||||
/// </summary>
|
||||
public string? Msg { get; set; }
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 成功
|
||||
/// </summary>
|
||||
/// <param name="msg">消息</param>
|
||||
public SubscribeResult Success(string? msg = null)
|
||||
{
|
||||
Ack = true;
|
||||
Msg = msg;
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 失败
|
||||
/// </summary>
|
||||
/// <param name="code"></param>
|
||||
/// <param name="msg"></param>
|
||||
/// <param name="data"></param>
|
||||
/// <returns></returns>
|
||||
public SubscribeResult Fail(string? msg = null)
|
||||
{
|
||||
Msg = msg;
|
||||
Ack = false;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public static partial class SubscribeAck
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// 成功
|
||||
/// </summary>
|
||||
/// <param name="msg">消息</param>
|
||||
/// <returns></returns>
|
||||
public static ISubscribeAck Success(string? msg = null)
|
||||
{
|
||||
return new SubscribeResult().Success(msg);
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 失败
|
||||
/// </summary>
|
||||
/// <param name="msg">消息</param>
|
||||
/// <returns></returns>
|
||||
public static ISubscribeAck Fail(string? msg = null)
|
||||
{
|
||||
return new SubscribeResult().Fail(msg);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,94 +0,0 @@
|
||||
using JiShe.CollectBus.IotSystems.Devices;
|
||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||
using JiShe.CollectBus.IotSystems.Protocols;
|
||||
using JiShe.CollectBus.ShardingStrategy;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization;
|
||||
using MongoDB.Driver;
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
||||
using Volo.Abp.Data;
|
||||
using Volo.Abp.MongoDB;
|
||||
using Volo.Abp.MultiTenancy;
|
||||
|
||||
namespace JiShe.CollectBus.MongoDB;
|
||||
|
||||
[IgnoreMultiTenancy]
|
||||
[ConnectionStringName(CollectBusDbProperties.MongoDbConnectionStringName)]
|
||||
public class CollectBusMongoDbContext : AbpMongoDbContext, ICollectBusMongoDbContext
|
||||
{
|
||||
/* Add mongo collections here. Example:
|
||||
* public IMongoCollection<Question> Questions => Collection<Question>();
|
||||
*/
|
||||
|
||||
public IMongoCollection<MessageReceived> MessageReceiveds => Collection<MessageReceived>();
|
||||
public IMongoCollection<MessageReceivedLogin> MessageReceivedLogins => Collection<MessageReceivedLogin>();
|
||||
public IMongoCollection<MessageReceivedHeartbeat> MessageReceivedHeartbeats => Collection<MessageReceivedHeartbeat>();
|
||||
public IMongoCollection<Device> Devices => Collection<Device>();
|
||||
public IMongoCollection<ProtocolInfo> ProtocolInfos => Collection<ProtocolInfo>();
|
||||
|
||||
public IMongoCollection<MessageIssued> MessageIssueds => Collection<MessageIssued>();
|
||||
|
||||
|
||||
|
||||
protected override void CreateModel(IMongoModelBuilder modelBuilder)
|
||||
{
|
||||
//modelBuilder.Entity<MeterReadingRecords>(builder =>
|
||||
//{
|
||||
// builder.CreateCollectionOptions.Collation = new Collation(locale: "en_US", strength: CollationStrength.Secondary);
|
||||
// builder.ConfigureIndexes(indexes =>
|
||||
// {
|
||||
// indexes.CreateOne(
|
||||
// new CreateIndexModel<BsonDocument>(
|
||||
// Builders<BsonDocument>.IndexKeys.Ascending("MyProperty"),
|
||||
// new CreateIndexOptions { Unique = true }
|
||||
// )
|
||||
// );
|
||||
// }
|
||||
// );
|
||||
|
||||
// //// 创建索引
|
||||
// //builder.ConfigureIndexes(index =>
|
||||
// //{
|
||||
|
||||
|
||||
// // //List<CreateIndexModel<BsonDocument>> createIndexModels = new List<CreateIndexModel<BsonDocument>>();
|
||||
// // //createIndexModels.Add(new CreateIndexModel<BsonDocument>(
|
||||
// // // Builders<BsonDocument>.IndexKeys.Ascending(nameof(MeterReadingRecords)),
|
||||
// // // new CreateIndexOptions
|
||||
// // // {
|
||||
// // // Unique = true
|
||||
// // // }
|
||||
// // // ));
|
||||
|
||||
|
||||
// // //var indexKeys = Builders<BsonDocument>.IndexKeys
|
||||
// // //.Ascending("CreationTime")
|
||||
// // //.Ascending("OrderNumber");
|
||||
|
||||
// // //var indexOptions = new CreateIndexOptions
|
||||
// // //{
|
||||
// // // Background = true,
|
||||
// // // Name = "IX_CreationTime_OrderNumber"
|
||||
// // //};
|
||||
// // //index.CreateOne(
|
||||
// // //new CreateIndexModel<BsonDocument>(indexKeys, indexOptions));
|
||||
|
||||
// // //index.CreateOne(new CreateIndexModel<BsonDocument>(
|
||||
// // // Builders<BsonDocument>.IndexKeys.Ascending(nameof(MeterReadingRecords)),
|
||||
// // // new CreateIndexOptions
|
||||
// // // {
|
||||
// // // Unique = true
|
||||
// // // }
|
||||
// // // ));
|
||||
// //});
|
||||
|
||||
//});
|
||||
|
||||
base.CreateModel(modelBuilder);
|
||||
modelBuilder.ConfigureCollectBus();
|
||||
}
|
||||
}
|
||||
@ -1,60 +0,0 @@
|
||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||
using MongoDB.Driver;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Volo.Abp.Domain.Repositories;
|
||||
|
||||
namespace JiShe.CollectBus.Repository.MeterReadingRecord
|
||||
{
|
||||
/// <summary>
|
||||
/// 抄读仓储接口
|
||||
/// </summary>
|
||||
public interface IMeterReadingRecordRepository : IRepository<MeterReadingRecords, Guid>
|
||||
{
|
||||
/// <summary>
|
||||
/// 批量插入
|
||||
/// </summary>
|
||||
/// <param name="entities"></param>
|
||||
/// <param name="dateTime"></param>
|
||||
/// <returns></returns>
|
||||
Task InsertManyAsync(List<MeterReadingRecords> entities,
|
||||
DateTime? dateTime);
|
||||
|
||||
/// <summary>
|
||||
/// 单个插入
|
||||
/// </summary>
|
||||
/// <param name="entity"></param>
|
||||
/// <param name="dateTime"></param>
|
||||
/// <returns></returns>
|
||||
Task<MeterReadingRecords> InsertAsync(MeterReadingRecords entity, DateTime? dateTime);
|
||||
|
||||
/// <summary>
|
||||
/// 单条更新
|
||||
/// </summary>
|
||||
/// <param name="filter">过滤条件,示例:Builders<MeterReadingRecords>.Filter.Eq(x => x.Id, filter.Id)</param>
|
||||
/// <param name="update">包含待更新的内容,示例:Builders<MeterReadingRecords>.Update.Set(x => x.Processed, true).Set(x => x.ProcessedTime, Clock.Now)</param>
|
||||
/// <param name="entity">数据实体,用于获取对应的分片库</param>
|
||||
/// <returns></returns>
|
||||
Task<MeterReadingRecords> UpdateOneAsync(FilterDefinition<MeterReadingRecords> filter, UpdateDefinition<MeterReadingRecords> update, MeterReadingRecords entity);
|
||||
|
||||
/// <summary>
|
||||
/// 单个获取
|
||||
/// </summary>
|
||||
/// <param name="entity"></param>
|
||||
/// <param name="dateTime"></param>
|
||||
/// <returns></returns>
|
||||
Task<MeterReadingRecords> FirOrDefaultAsync(MeterReadingRecords entity, DateTime? dateTime);
|
||||
|
||||
/// <summary>
|
||||
/// 多集合数据查询
|
||||
/// </summary>
|
||||
/// <param name="startTime"></param>
|
||||
/// <param name="endTime"></param>
|
||||
/// <returns></returns>
|
||||
Task<List<MeterReadingRecords>> ParallelQueryAsync(DateTime startTime, DateTime endTime);
|
||||
}
|
||||
}
|
||||
@ -1,173 +0,0 @@
|
||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||
using JiShe.CollectBus.MongoDB;
|
||||
using JiShe.CollectBus.ShardingStrategy;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Volo.Abp.Domain.Entities;
|
||||
using Volo.Abp.Domain.Repositories.MongoDB;
|
||||
using Volo.Abp.MongoDB;
|
||||
using Volo.Abp.MongoDB.DistributedEvents;
|
||||
using Volo.Abp.Timing;
|
||||
using static System.Net.Mime.MediaTypeNames;
|
||||
|
||||
namespace JiShe.CollectBus.Repository.MeterReadingRecord
|
||||
{
|
||||
/// <summary>
|
||||
/// 抄读记录仓储
|
||||
/// </summary>
|
||||
public class MeterReadingRecordRepository : MongoDbRepository<CollectBusMongoDbContext, MeterReadingRecords, Guid>, IMeterReadingRecordRepository
|
||||
{
|
||||
|
||||
private readonly IShardingStrategy<MeterReadingRecords> _shardingStrategy;
|
||||
private readonly IMongoDbContextProvider<CollectBusMongoDbContext> _dbContextProvider;
|
||||
|
||||
public MeterReadingRecordRepository(
|
||||
IMongoDbContextProvider<CollectBusMongoDbContext> dbContextProvider,
|
||||
IShardingStrategy<MeterReadingRecords> shardingStrategy
|
||||
)
|
||||
: base(dbContextProvider)
|
||||
{
|
||||
_dbContextProvider = dbContextProvider;
|
||||
_shardingStrategy = shardingStrategy;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 批量插入
|
||||
/// </summary>
|
||||
/// <param name="entities"></param>
|
||||
/// <param name="cancellationToken"></param>
|
||||
/// <returns></returns>
|
||||
public override async Task<IEnumerable<MeterReadingRecords>> InsertManyAsync(IEnumerable<MeterReadingRecords> entities, bool autoSave = false, CancellationToken cancellationToken = default(CancellationToken))
|
||||
{
|
||||
var collection = await GetShardedCollection(DateTime.Now);
|
||||
await collection.InsertManyAsync(entities);
|
||||
|
||||
return entities;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 批量插入
|
||||
/// </summary>
|
||||
/// <param name="entities"></param>
|
||||
/// <param name="dateTime"></param>
|
||||
/// <returns></returns>
|
||||
public async Task InsertManyAsync(List<MeterReadingRecords> entities, DateTime? dateTime)
|
||||
{
|
||||
var collection = await GetShardedCollection(dateTime);
|
||||
await collection.InsertManyAsync(entities);
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 单条插入
|
||||
/// </summary>
|
||||
/// <param name="entity"></param>
|
||||
/// <param name="cancellationToken"></param>
|
||||
/// <returns></returns>
|
||||
public override async Task<MeterReadingRecords> InsertAsync(MeterReadingRecords entity, bool autoSave = false, CancellationToken cancellationToken = default(CancellationToken))
|
||||
{
|
||||
var collection = await GetShardedCollection(DateTime.Now);
|
||||
await collection.InsertOneAsync(entity);
|
||||
return entity;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 单条插入
|
||||
/// </summary>
|
||||
/// <param name="entity"></param>
|
||||
/// <param name="dateTime"></param>
|
||||
/// <returns></returns>
|
||||
public async Task<MeterReadingRecords> InsertAsync(MeterReadingRecords entity, DateTime? dateTime)
|
||||
{
|
||||
var collection = await GetShardedCollection(dateTime);
|
||||
await collection.InsertOneAsync(entity);
|
||||
return entity;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 单条更新
|
||||
/// </summary>
|
||||
/// <param name="filter">过滤条件,示例:Builders<MeterReadingRecords>.Filter.Eq(x => x.Id, filter.Id)</param>
|
||||
/// <param name="update">包含待更新的内容,示例:Builders<MeterReadingRecords>.Update.Set(x => x.Processed, true).Set(x => x.ProcessedTime, Clock.Now)</param>
|
||||
/// <param name="entity">数据实体,用于获取对应的分片库</param>
|
||||
/// <returns></returns>
|
||||
public async Task<MeterReadingRecords> UpdateOneAsync(FilterDefinition<MeterReadingRecords> filter, UpdateDefinition<MeterReadingRecords> update, MeterReadingRecords entity)
|
||||
{
|
||||
var collection = await GetShardedCollection(entity.CreationTime);
|
||||
|
||||
await collection.UpdateOneAsync(filter, update);
|
||||
return entity;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 单个获取
|
||||
/// </summary>
|
||||
/// <param name="entity"></param>
|
||||
/// <param name="dateTime"></param>
|
||||
/// <returns></returns>
|
||||
/// <exception cref="NotImplementedException"></exception>
|
||||
public async Task<MeterReadingRecords> FirOrDefaultAsync(MeterReadingRecords entity, DateTime? dateTime)
|
||||
{
|
||||
var collection = await GetShardedCollection(dateTime);
|
||||
var query = await collection.FindAsync(d => d.CreationTime == dateTime.Value && d.AFN == entity.AFN && d.Fn == entity.Fn && d.FocusAddress == entity.FocusAddress);
|
||||
return await query.FirstOrDefaultAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 多集合数据查询
|
||||
/// </summary>
|
||||
/// <param name="startTime"></param>
|
||||
/// <param name="endTime"></param>
|
||||
/// <returns></returns>
|
||||
public async Task<List<MeterReadingRecords>> ParallelQueryAsync(DateTime startTime, DateTime endTime)
|
||||
{
|
||||
var collectionNames = _shardingStrategy.GetQueryCollectionNames(startTime, endTime);
|
||||
var database = await GetDatabaseAsync();
|
||||
|
||||
|
||||
var tasks = collectionNames.Select(async name =>
|
||||
{
|
||||
var collection = database.GetCollection<MeterReadingRecords>(name);
|
||||
var filter = Builders<MeterReadingRecords>.Filter.And(
|
||||
Builders<MeterReadingRecords>.Filter.Gte(x => x.CreationTime, startTime),
|
||||
Builders<MeterReadingRecords>.Filter.Lte(x => x.CreationTime, endTime)
|
||||
);
|
||||
return await collection.Find(filter).ToListAsync();
|
||||
});
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
return results.SelectMany(r => r).ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获得分片集合
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
private async Task<IMongoCollection<MeterReadingRecords>> GetShardedCollection(DateTime? dateTime)
|
||||
{
|
||||
var database = await GetDatabaseAsync();
|
||||
string collectionName = string.Empty;
|
||||
|
||||
if (dateTime != null)
|
||||
{
|
||||
collectionName = _shardingStrategy.GetCollectionName(dateTime.Value);
|
||||
}
|
||||
else
|
||||
{
|
||||
collectionName = _shardingStrategy.GetCurrentCollectionName();
|
||||
}
|
||||
|
||||
return database.GetCollection<MeterReadingRecords>(collectionName);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@ -1,60 +0,0 @@
|
||||
using JiShe.CollectBus.Common.Extensions;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
|
||||
namespace JiShe.CollectBus.ShardingStrategy
|
||||
{
|
||||
/// <summary>
|
||||
/// 按天分表策略
|
||||
/// </summary>
|
||||
/// <typeparam name="TEntity"></typeparam>
|
||||
public class DayShardingStrategy<TEntity> : IShardingStrategy<TEntity>
|
||||
{
|
||||
/// <summary>
|
||||
/// 获取指定时间对应的集合名
|
||||
/// </summary>
|
||||
/// <param name="dateTime"></param>
|
||||
/// <returns></returns>
|
||||
public string GetCollectionName(DateTime dateTime)
|
||||
{
|
||||
var baseName = typeof(TEntity).Name;
|
||||
return $"{baseName}_{dateTime.GetDataTableShardingStrategy()}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获取当前时间对应的集合名
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public string GetCurrentCollectionName()
|
||||
{
|
||||
var baseName = typeof(TEntity).Name;
|
||||
return $"{baseName}_{DateTime.Now.GetDataTableShardingStrategy()}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 用于查询时确定目标集合
|
||||
/// </summary>
|
||||
/// <param name="startTime"></param>
|
||||
/// <param name="endTime"></param>
|
||||
/// <returns></returns>
|
||||
public IEnumerable<string> GetQueryCollectionNames(DateTime? startTime, DateTime? endTime)
|
||||
{
|
||||
var months = new List<string>();
|
||||
var current = startTime ?? DateTime.MinValue;
|
||||
var end = endTime ?? DateTime.MaxValue;
|
||||
var baseName = typeof(TEntity).Name;
|
||||
|
||||
while (current <= end)
|
||||
{
|
||||
months.Add($"{baseName}_{current.GetDataTableShardingStrategy()}");
|
||||
current = current.AddMonths(1);
|
||||
}
|
||||
|
||||
return months.Distinct();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,36 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.ShardingStrategy
|
||||
{
|
||||
/// <summary>
|
||||
/// 数据存储分片策略
|
||||
/// </summary>
|
||||
/// <typeparam name="TEntity"></typeparam>
|
||||
public interface IShardingStrategy<TEntity>
|
||||
{
|
||||
/// <summary>
|
||||
/// 获取指定时间对应的集合名
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
string GetCollectionName(DateTime dateTime);
|
||||
|
||||
/// <summary>
|
||||
/// 获取当前时间对应的集合名
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
string GetCurrentCollectionName();
|
||||
|
||||
/// <summary>
|
||||
/// 用于查询时确定目标集合
|
||||
/// </summary>
|
||||
/// <param name="startTime"></param>
|
||||
/// <param name="endTime"></param>
|
||||
/// <returns></returns>
|
||||
IEnumerable<string> GetQueryCollectionNames(DateTime? startTime = null,
|
||||
DateTime? endTime = null);
|
||||
}
|
||||
}
|
||||
@ -1,21 +0,0 @@
|
||||
using JiShe.CollectBus.Protocol.Contracts.Interfaces;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Volo.Abp;
|
||||
using Volo.Abp.Modularity;
|
||||
|
||||
namespace JiShe.CollectBus.Protocol.Test
|
||||
{
|
||||
public class JiSheCollectBusProtocolModule : AbpModule
|
||||
{
|
||||
public override void ConfigureServices(ServiceConfigurationContext context)
|
||||
{
|
||||
context.Services.AddKeyedSingleton<IProtocolPlugin, TestProtocolPlugin>(nameof(TestProtocolPlugin));
|
||||
}
|
||||
|
||||
public override void OnApplicationInitialization(ApplicationInitializationContext context)
|
||||
{
|
||||
var protocol = context.ServiceProvider.GetRequiredKeyedService<IProtocolPlugin>(nameof(TestProtocolPlugin));
|
||||
protocol.AddAsync();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,155 +0,0 @@
|
||||
using JiShe.CollectBus.Common.Enums;
|
||||
using JiShe.CollectBus.Common.Extensions;
|
||||
using JiShe.CollectBus.Common.Models;
|
||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||
using JiShe.CollectBus.IotSystems.Protocols;
|
||||
using JiShe.CollectBus.Protocol.Contracts.Abstracts;
|
||||
|
||||
namespace JiShe.CollectBus.Protocol.Test
|
||||
{
|
||||
public class TestProtocolPlugin : BaseProtocolPlugin
|
||||
{
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="TestProtocolPlugin"/> class.
|
||||
/// </summary>
|
||||
/// <param name="serviceProvider">The service provider.</param>
|
||||
public TestProtocolPlugin(IServiceProvider serviceProvider) : base(serviceProvider)
|
||||
{
|
||||
}
|
||||
|
||||
public sealed override ProtocolInfo Info => new(nameof(TestProtocolPlugin), "Test", "TCP", "Test协议", "DTS1980-Test");
|
||||
|
||||
public override async Task<T> AnalyzeAsync<T>(MessageReceived messageReceived, Action<byte[]>? sendAction = null)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
#region 上行命令
|
||||
|
||||
//68
|
||||
//32 00
|
||||
//32 00
|
||||
//68
|
||||
//C9 1100'1001. 控制域C。
|
||||
// D7=1, (终端发送)上行方向。
|
||||
// D6=1, 此帧来自启动站。
|
||||
// D5=0, (上行方向)要求访问位。表示终端无事件数据等待访问。
|
||||
// D4=0, 保留
|
||||
// D3~D0=9, 功能码。链路测试
|
||||
|
||||
//20 32 行政区划码
|
||||
//90 26 终端地址
|
||||
//00 主站地址和组地址标志。终端为单地址。 //3220 09 87 2
|
||||
// 终端启动的发送帧的 MSA 应为 0, 其主站响应帧的 MSA 也应为 0.
|
||||
//02 应用层功能码。AFN=2, 链路接口检测
|
||||
//70 0111'0000. 帧序列域。无时间标签、单帧、需要确认。
|
||||
//00 00 信息点。DA1和DA2全为“0”时,表示终端信息点。
|
||||
//01 00 信息类。F1, 登录。
|
||||
//44 帧尾,包含用户区数据校验和
|
||||
//16 帧结束标志
|
||||
|
||||
/// <summary>
|
||||
/// 解析上行命令
|
||||
/// </summary>
|
||||
/// <param name="cmd"></param>
|
||||
/// <returns></returns>
|
||||
public CommandReulst? AnalysisCmd(string cmd)
|
||||
{
|
||||
CommandReulst? commandReulst = null;
|
||||
var hexStringList = cmd.StringToPairs();
|
||||
|
||||
if (hexStringList.Count < hearderLen)
|
||||
{
|
||||
return commandReulst;
|
||||
}
|
||||
//验证起始字符
|
||||
if (!hexStringList[0].IsStartStr() || !hexStringList[5].IsStartStr())
|
||||
{
|
||||
return commandReulst;
|
||||
}
|
||||
|
||||
var lenHexStr = $"{hexStringList[2]}{hexStringList[1]}";
|
||||
var lenBin = lenHexStr.HexToBin();
|
||||
var len = lenBin.Remove(lenBin.Length - 2).BinToDec();
|
||||
//验证长度
|
||||
if (hexStringList.Count - 2 != hearderLen + len)
|
||||
return commandReulst;
|
||||
|
||||
var userDataIndex = hearderLen;
|
||||
var c = hexStringList[userDataIndex];//控制域 1字节
|
||||
userDataIndex += 1;
|
||||
|
||||
var aHexList = hexStringList.Skip(userDataIndex).Take(5).ToList();//地址域 5字节
|
||||
var a = AnalysisA(aHexList);
|
||||
var a3Bin = aHexList[4].HexToBin().PadLeft(8, '0');
|
||||
var mSA = a3Bin.Substring(0, 7).BinToDec();
|
||||
userDataIndex += 5;
|
||||
|
||||
var aFN = (AFN)hexStringList[userDataIndex].HexToDec();//1字节
|
||||
userDataIndex += 1;
|
||||
|
||||
var seq = hexStringList[userDataIndex].HexToBin().PadLeft(8, '0');
|
||||
var tpV = (TpV)Convert.ToInt32(seq.Substring(0, 1));
|
||||
var fIRFIN = (FIRFIN)Convert.ToInt32(seq.Substring(1, 2));
|
||||
var cON = (CON)Convert.ToInt32(seq.Substring(3, 1));
|
||||
var prseqBin = seq.Substring(4, 4);
|
||||
userDataIndex += 1;
|
||||
|
||||
// (DA2 - 1) * 8 + DA1 = pn
|
||||
var da1Bin = hexStringList[userDataIndex].HexToBin();
|
||||
var da1 = da1Bin == "0" ? 0 : da1Bin.Length;
|
||||
userDataIndex += 1;
|
||||
var da2 = hexStringList[userDataIndex].HexToDec();
|
||||
var pn = da2 == 0 ? 0 : (da2 - 1) * 8 + da1;
|
||||
userDataIndex += 1;
|
||||
//(DT2*8)+DT1=fn
|
||||
var dt1Bin = hexStringList[userDataIndex].HexToBin();
|
||||
var dt1 = dt1Bin != "0" ? dt1Bin.Length : 0;
|
||||
userDataIndex += 1;
|
||||
var dt2 = hexStringList[userDataIndex].HexToDec();
|
||||
var fn = dt2 * 8 + dt1;
|
||||
userDataIndex += 1;
|
||||
|
||||
//数据单元
|
||||
var datas = hexStringList.Skip(userDataIndex).Take(len + hearderLen - userDataIndex).ToList();
|
||||
|
||||
//EC
|
||||
//Tp
|
||||
commandReulst = new CommandReulst()
|
||||
{
|
||||
A = a,
|
||||
MSA = mSA,
|
||||
AFN = aFN,
|
||||
Seq = new Seq()
|
||||
{
|
||||
TpV = tpV,
|
||||
FIRFIN = fIRFIN,
|
||||
CON = cON,
|
||||
PRSEQ = prseqBin.BinToDec(),
|
||||
},
|
||||
CmdLength = len,
|
||||
Pn = pn,
|
||||
Fn = fn,
|
||||
HexDatas = datas
|
||||
};
|
||||
|
||||
return commandReulst;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 解析地址
|
||||
/// </summary>
|
||||
/// <param name="aHexList"></param>
|
||||
/// <returns></returns>
|
||||
private string AnalysisA(List<string> aHexList)
|
||||
{
|
||||
var a1 = aHexList[1] + aHexList[0];
|
||||
var a2 = aHexList[3] + aHexList[2];
|
||||
var a2Dec = a2.HexToDec();
|
||||
var a3 = aHexList[4];
|
||||
var a = $"{a1}{a2Dec.ToString().PadLeft(5, '0')}";
|
||||
return a;
|
||||
}
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@ -1,28 +0,0 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<BaseOutputPath></BaseOutputPath>
|
||||
<LangVersion>preview</LangVersion>
|
||||
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="DotNetCore.CAP" Version="8.3.1" />
|
||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="8.0.2" />
|
||||
<PackageReference Include="Volo.Abp.Core" Version="8.3.3" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\services\JiShe.CollectBus.Domain\JiShe.CollectBus.Domain.csproj" />
|
||||
<ProjectReference Include="..\..\shared\JiShe.CollectBus.Common\JiShe.CollectBus.Common.csproj" />
|
||||
<ProjectReference Include="..\..\protocols\JiShe.CollectBus.Protocol.Contracts\JiShe.CollectBus.Protocol.Contracts.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<Target Name="PostBuild" AfterTargets="PostBuildEvent">
|
||||
<Exec Command="copy $(TargetDir)JiShe.CollectBus.Protocol.dll $(ProjectDir)..\..\web\JiShe.CollectBus.Host\Plugins\" />
|
||||
</Target>
|
||||
|
||||
</Project>
|
||||
@ -17,7 +17,7 @@
|
||||
- V4 → V5核心升级:
|
||||
- 微服务化架构改造
|
||||
- 统一配置管理中心
|
||||
- 支持Kafka引擎
|
||||
- 支持Kafka/RabbitMQ双引擎
|
||||
- 新增边缘计算能力
|
||||
- 资源利用率提升40%
|
||||
|
||||
@ -82,7 +82,7 @@ Body:
|
||||
|----------------|------------|--------------|
|
||||
| 最大连接数 | 10,000 | 线性扩展 |
|
||||
| 数据处理延迟 | <50ms(p99) | - |
|
||||
| 吞吐量 | 20,000 TPS | 十万级TPS |
|
||||
| 吞吐量 | 20,000 TPS | 百万级TPS |
|
||||
| CPU利用率 | ≤70%@峰值 | 自动负载均衡 |
|
||||
|
||||
## 6. 高可用设计
|
||||
|
||||
@ -1,20 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.DataMigration
|
||||
{
|
||||
/// <summary>
|
||||
/// 数据迁移服务
|
||||
/// </summary>
|
||||
public interface IDataMigrationService
|
||||
{
|
||||
/// <summary>
|
||||
/// 开始迁移
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task StartMigrationAsync();
|
||||
}
|
||||
}
|
||||
@ -1,39 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.DataMigration.Options
|
||||
{
|
||||
/// <summary>
|
||||
/// 数据迁移配置
|
||||
/// </summary>
|
||||
public class DataMigrationOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// MongoDb每批处理量
|
||||
/// </summary>
|
||||
public int MongoDbDataBatchSize { get; set; } = 1000;
|
||||
|
||||
/// <summary>
|
||||
/// 批量处理通道容量
|
||||
/// </summary>
|
||||
public int ChannelCapacity { get; set; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// 数据库 每批处理量
|
||||
/// </summary>
|
||||
public int SqlBulkBatchSize { get; set; } = 1000;
|
||||
|
||||
/// <summary>
|
||||
/// 数据库 每批处理超时时间
|
||||
/// </summary>
|
||||
public int SqlBulkTimeout { get; set; } = 60;
|
||||
|
||||
/// <summary>
|
||||
/// 处理器数量
|
||||
/// </summary>
|
||||
public int ProcessorsCount { get; set; } = 4;
|
||||
}
|
||||
}
|
||||
@ -1,13 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using Volo.Abp.Application.Services;
|
||||
|
||||
namespace JiShe.CollectBus.EnergySystem
|
||||
{
|
||||
public interface ICacheAppService : IApplicationService
|
||||
{
|
||||
}
|
||||
}
|
||||
@ -1,176 +0,0 @@
|
||||
using JiShe.CollectBus.Common.Models;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace JiShe.CollectBus.Application.Contracts
|
||||
{
|
||||
/// <summary>
|
||||
/// 数据缓存服务接口
|
||||
/// </summary>
|
||||
public interface IRedisDataCacheService
|
||||
{
|
||||
/// <summary>
|
||||
/// 单个添加数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
/// <param name="data">待缓存数据</param>
|
||||
/// <returns></returns>
|
||||
Task InsertDataAsync<T>(
|
||||
string redisHashCacheKey,
|
||||
string redisSetIndexCacheKey,
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
T data) where T : DeviceCacheBasicModel;
|
||||
|
||||
/// <summary>
|
||||
/// 批量添加数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
/// <param name="items">待缓存数据集合</param>
|
||||
/// <returns></returns>
|
||||
Task BatchInsertDataAsync<T>(
|
||||
string redisHashCacheKey,
|
||||
string redisSetIndexCacheKey,
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
IEnumerable<T> items) where T : DeviceCacheBasicModel;
|
||||
|
||||
/// <summary>
|
||||
/// 删除缓存信息
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
/// <param name="data">已缓存数据</param>
|
||||
/// <returns></returns>
|
||||
Task RemoveCacheDataAsync<T>(
|
||||
string redisHashCacheKey,
|
||||
string redisSetIndexCacheKey,
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
T data) where T : DeviceCacheBasicModel;
|
||||
|
||||
/// <summary>
|
||||
/// 修改缓存信息,映射关系未发生改变
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
/// <param name="newData">待修改缓存数据</param>
|
||||
/// <returns></returns>
|
||||
Task ModifyDataAsync<T>(
|
||||
string redisHashCacheKey,
|
||||
string redisSetIndexCacheKey,
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
T newData) where T : DeviceCacheBasicModel;
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 修改缓存信息,映射关系已改变
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
|
||||
/// <param name="oldMemberId">旧的映射关系</param>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
/// <param name="newData">待修改缓存数据</param>
|
||||
/// <returns></returns>
|
||||
Task ModifyDataAsync<T>(
|
||||
string redisHashCacheKey,
|
||||
string redisSetIndexCacheKey,
|
||||
string oldMemberId,
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
T newData) where T : DeviceCacheBasicModel;
|
||||
|
||||
///// <summary>
|
||||
///// 通过集中器与表计信息排序索引获取数据
|
||||
///// </summary>
|
||||
///// <typeparam name="T"></typeparam>
|
||||
///// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
///// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
///// <param name="pageSize">分页尺寸</param>
|
||||
///// <param name="lastScore">最后一个索引</param>
|
||||
///// <param name="lastMember">最后一个唯一标识</param>
|
||||
///// <param name="descending">排序方式</param>
|
||||
///// <returns></returns>
|
||||
//Task<BusCacheGlobalPagedResult<T>> GetPagedData<T>(
|
||||
//string redisHashCacheKey,
|
||||
//string redisZSetScoresIndexCacheKey,
|
||||
//IEnumerable<int> focusIds,
|
||||
//int pageSize = 10,
|
||||
//decimal? lastScore = null,
|
||||
//string lastMember = null,
|
||||
//bool descending = true)
|
||||
//where T : DeviceCacheBasicModel;
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 通过ZSET索引获取数据,支持10万级别数据处理,控制在13秒以内。
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
/// <param name="pageSize">分页尺寸</param>
|
||||
/// <param name="lastScore">最后一个索引</param>
|
||||
/// <param name="lastMember">最后一个唯一标识</param>
|
||||
/// <param name="descending">排序方式</param>
|
||||
/// <returns></returns>
|
||||
Task<BusCacheGlobalPagedResult<T>> GetAllPagedData<T>(
|
||||
string redisHashCacheKey,
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
int pageSize = 1000,
|
||||
decimal? lastScore = null,
|
||||
string lastMember = null,
|
||||
bool descending = true)
|
||||
where T : DeviceCacheBasicModel;
|
||||
|
||||
|
||||
///// <summary>
|
||||
///// 游标分页查询
|
||||
///// </summary>
|
||||
///// <param name="redisZSetScoresIndexCacheKey">排序索引ZSET缓存Key</param>
|
||||
///// <param name="pageSize">分页数量</param>
|
||||
///// <param name="startScore">开始索引</param>
|
||||
///// <param name="excludeMember">开始唯一标识</param>
|
||||
///// <param name="descending">排序方式</param>
|
||||
///// <returns></returns>
|
||||
//Task<(List<string> Members, bool HasNext)> GetPagedMembers(
|
||||
// string redisZSetScoresIndexCacheKey,
|
||||
// int pageSize,
|
||||
// decimal? startScore,
|
||||
// string excludeMember,
|
||||
// bool descending);
|
||||
|
||||
///// <summary>
|
||||
///// 批量获取指定分页的数据
|
||||
///// </summary>
|
||||
///// <typeparam name="T"></typeparam>
|
||||
///// <param name="redisHashCacheKey">Hash表缓存key</param>
|
||||
///// <param name="members">Hash表字段集合</param>
|
||||
///// <returns></returns>
|
||||
//Task<Dictionary<string, T>> BatchGetData<T>(
|
||||
// string redisHashCacheKey,
|
||||
// IEnumerable<string> members)
|
||||
// where T : DeviceCacheBasicModel;
|
||||
|
||||
///// <summary>
|
||||
///// 获取下一页游标
|
||||
///// </summary>
|
||||
///// <param name="redisZSetScoresIndexCacheKey">排序索引ZSET缓存Key</param>
|
||||
///// <param name="lastMember">最后一个唯一标识</param>
|
||||
///// <param name="descending">排序方式</param>
|
||||
///// <returns></returns>
|
||||
//Task<decimal?> GetNextScore(
|
||||
// string redisZSetScoresIndexCacheKey,
|
||||
// string lastMember,
|
||||
// bool descending);
|
||||
}
|
||||
}
|
||||
@ -1,18 +0,0 @@
|
||||
using JiShe.CollectBus.Common.Attributes;
|
||||
using Volo.Abp.EventBus;
|
||||
|
||||
namespace JiShe.CollectBus.Samples;
|
||||
|
||||
[EventName("Sample.Kafka.Test")]
|
||||
[TopicName("Test1")]
|
||||
public class SampleDto
|
||||
{
|
||||
public int Value { get; set; }
|
||||
}
|
||||
|
||||
[EventName("Sample.Kafka.Test2")]
|
||||
[TopicName("Test2")]
|
||||
public class SampleDto2
|
||||
{
|
||||
public int Value { get; set; }
|
||||
}
|
||||
@ -1,89 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using JiShe.CollectBus.Ammeters;
|
||||
using JiShe.CollectBus.GatherItem;
|
||||
using JiShe.CollectBus.IotSystems.Watermeter;
|
||||
using Volo.Abp.Application.Services;
|
||||
|
||||
namespace JiShe.CollectBus.ScheduledMeterReading
|
||||
{
|
||||
/// <summary>
|
||||
/// 定时任务基础约束
|
||||
/// </summary>
|
||||
public interface IScheduledMeterReadingService : IApplicationService
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// 获取采集项列表
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task<List<GatherItemInfo>> GetGatherItemByDataTypes();
|
||||
|
||||
/// <summary>
|
||||
/// 构建待处理的下发指令任务处理
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task CreateToBeIssueTasks();
|
||||
|
||||
#region 电表采集处理
|
||||
/// <summary>
|
||||
/// 获取电表信息
|
||||
/// </summary>
|
||||
/// <param name="gatherCode">采集端Code</param>
|
||||
/// <returns></returns>
|
||||
Task<List<AmmeterInfo>> GetAmmeterInfoList(string gatherCode = "");
|
||||
|
||||
/// <summary>
|
||||
/// 初始化电表缓存数据
|
||||
/// </summary>
|
||||
/// <param name="gatherCode">采集端Code</param>
|
||||
/// <returns></returns>
|
||||
Task InitAmmeterCacheData(string gatherCode = "");
|
||||
|
||||
/// <summary>
|
||||
/// 1分钟采集电表数据,只获取任务数据下发,不构建任务
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task AmmeterScheduledMeterOneMinuteReading();
|
||||
|
||||
/// <summary>
|
||||
/// 5分钟采集电表数据,只获取任务数据下发,不构建任务
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task AmmeterScheduledMeterFiveMinuteReading();
|
||||
|
||||
/// <summary>
|
||||
/// 15分钟采集电表数据,只获取任务数据下发,不构建任务
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task AmmeterScheduledMeterFifteenMinuteReading();
|
||||
|
||||
#endregion
|
||||
|
||||
|
||||
#region 水表采集处理
|
||||
/// <summary>
|
||||
/// 获取水表信息
|
||||
/// </summary>
|
||||
/// <param name="gatherCode">采集端Code</param>
|
||||
/// <returns></returns>
|
||||
Task<List<WatermeterInfo>> GetWatermeterInfoList(string gatherCode = "");
|
||||
|
||||
/// <summary>
|
||||
/// 初始化水表缓存数据,只获取任务数据下发,不构建任务
|
||||
/// </summary>
|
||||
/// <param name="gatherCode">采集端Code</param>
|
||||
/// <returns></returns>
|
||||
Task InitWatermeterCacheData(string gatherCode = "");
|
||||
|
||||
/// <summary>
|
||||
/// 水表数据采集
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task WatermeterScheduledMeterAutoReading();
|
||||
|
||||
#endregion
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
@ -1,17 +0,0 @@
|
||||
using System.Threading.Tasks;
|
||||
using JiShe.CollectBus.Common.Models;
|
||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||
using JiShe.CollectBus.Kafka;
|
||||
using Volo.Abp.Application.Services;
|
||||
|
||||
namespace JiShe.CollectBus.Subscribers
|
||||
{
|
||||
public interface ISubscriberAppService : IApplicationService
|
||||
{
|
||||
Task<ISubscribeAck> LoginIssuedEvent(IssuedEventMessage issuedEventMessage);
|
||||
Task<ISubscribeAck> HeartbeatIssuedEvent(IssuedEventMessage issuedEventMessage);
|
||||
Task<ISubscribeAck> ReceivedEvent(MessageReceived receivedMessage);
|
||||
Task<ISubscribeAck> ReceivedHeartbeatEvent(MessageReceivedHeartbeat receivedHeartbeatMessage);
|
||||
Task<ISubscribeAck> ReceivedLoginEvent(MessageReceivedLogin receivedLoginMessage);
|
||||
}
|
||||
}
|
||||
@ -1,47 +0,0 @@
|
||||
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||
using JiShe.CollectBus.Kafka;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using Volo.Abp.Application.Services;
|
||||
|
||||
namespace JiShe.CollectBus.Subscribers
|
||||
{
|
||||
/// <summary>
|
||||
/// 定时抄读任务消息订阅
|
||||
/// </summary>
|
||||
public interface IWorkerSubscriberAppService : IApplicationService
|
||||
{
|
||||
|
||||
#region 电表消息采集
|
||||
|
||||
/// <summary>
|
||||
/// 1分钟采集电表数据下行消息消费订阅
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task<ISubscribeAck> AmmeterScheduledMeterOneMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage issuedEventMessage);
|
||||
|
||||
/// <summary>
|
||||
/// 5分钟采集电表数据下行消息消费订阅
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task<ISubscribeAck> AmmeterScheduledMeterFiveMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage issuedEventMessage);
|
||||
|
||||
/// <summary>
|
||||
/// 15分钟采集电表数据下行消息消费订阅
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task<ISubscribeAck> AmmeterScheduledMeterFifteenMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage issuedEventMessage);
|
||||
#endregion
|
||||
|
||||
#region 水表消息采集
|
||||
/// <summary>
|
||||
/// 1分钟采集水表数据下行消息消费订阅
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
Task<ISubscribeAck> WatermeterSubscriberWorkerAutoReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage issuedEventMessage);
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@ -1,224 +0,0 @@
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.Common.Enums;
|
||||
using JiShe.CollectBus.Common.Extensions;
|
||||
using JiShe.CollectBus.Common.Helpers;
|
||||
using JiShe.CollectBus.FreeSql;
|
||||
using JiShe.CollectBus.Localization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using JiShe.CollectBus.FreeRedis;
|
||||
using Volo.Abp.Application.Services;
|
||||
|
||||
namespace JiShe.CollectBus;
|
||||
|
||||
[ApiExplorerSettings(GroupName = CollectBusDomainSharedConsts.Business)]
|
||||
public abstract class CollectBusAppService : ApplicationService
|
||||
{
|
||||
public IFreeSqlProvider SqlProvider => LazyServiceProvider.LazyGetRequiredService<IFreeSqlProvider>();
|
||||
protected IFreeRedisProvider FreeRedisProvider => LazyServiceProvider.LazyGetService<IFreeRedisProvider>()!;
|
||||
|
||||
|
||||
protected CollectBusAppService()
|
||||
{
|
||||
LocalizationResource = typeof(CollectBusResource);
|
||||
ObjectMapperContext = typeof(CollectBusApplicationModule);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Lua脚本批量获取缓存的表计信息
|
||||
/// </summary>
|
||||
/// <typeparam name="T">表信息数据对象</typeparam>
|
||||
/// <param name="redisKeys">采集频率对应的缓存Key集合</param>
|
||||
/// <param name="systemType"><see cref="SystemTypeConst"/> 系统类型</param>
|
||||
/// <param name="serverTagName">服务器标识</param>
|
||||
/// <param name="timeDensity">采集频率,1分钟、5分钟、15分钟</param>
|
||||
/// <param name="meterType"><see cref="MeterTypeEnum"/> 表计类型</param>
|
||||
/// <returns></returns>
|
||||
protected async Task<Dictionary<string, Dictionary<string, T>>> GetMeterRedisCacheDictionaryData<T>(string[] redisKeys, string systemType, string serverTagName, string timeDensity, MeterTypeEnum meterType) where T : class
|
||||
{
|
||||
if (redisKeys == null || redisKeys.Length <= 0 || string.IsNullOrWhiteSpace(systemType) || string.IsNullOrWhiteSpace(serverTagName) || string.IsNullOrWhiteSpace(timeDensity))
|
||||
{
|
||||
throw new Exception($"{nameof(GetMeterRedisCacheDictionaryData)} 获取缓存的表计信息失败,参数异常,-101");
|
||||
}
|
||||
|
||||
var meterInfos = new Dictionary<string, Dictionary<string, T>>();
|
||||
var luaScript = @"
|
||||
local results = {}
|
||||
for i, key in ipairs(KEYS) do
|
||||
local data = redis.call('HGETALL', key)
|
||||
results[i] = {key, data}
|
||||
end
|
||||
return results";
|
||||
|
||||
// 分页参数:每页处理10000个键
|
||||
int pageSize = 10000;
|
||||
int totalPages = (int)Math.Ceiling(redisKeys.Length / (double)pageSize);
|
||||
|
||||
for (int page = 0; page < totalPages; page++)
|
||||
{
|
||||
// 分页获取当前批次的键
|
||||
var batchKeys = redisKeys
|
||||
.Skip(page * pageSize)
|
||||
.Take(pageSize)
|
||||
.ToArray();
|
||||
|
||||
// 执行Lua脚本获取当前批次数据
|
||||
var merterResult = await FreeRedisProvider.Instance.EvalAsync(luaScript, batchKeys);
|
||||
if (merterResult == null)
|
||||
{
|
||||
throw new Exception($"{nameof(GetMeterRedisCacheDictionaryData)} 获取缓存的表计信息失败,第 {page + 1} 页数据未返回,-102");
|
||||
}
|
||||
|
||||
// 解析当前批次的结果
|
||||
if (merterResult is object[] arr)
|
||||
{
|
||||
foreach (object[] item in arr)
|
||||
{
|
||||
string key = (string)item[0];
|
||||
object[] fieldsAndValues = (object[])item[1];
|
||||
var redisCacheKey = $"{string.Format(RedisConst.CacheMeterInfoHashKey, systemType, serverTagName, meterType, timeDensity)}";
|
||||
string focusAddress = key.Replace(redisCacheKey, "");
|
||||
|
||||
var meterHashs = new Dictionary<string, T>();
|
||||
for (int i = 0; i < fieldsAndValues.Length; i += 2)
|
||||
{
|
||||
string meterId = (string)fieldsAndValues[i];
|
||||
string meterStr = (string)fieldsAndValues[i + 1];
|
||||
|
||||
T meterInfo = default!;
|
||||
if (!string.IsNullOrWhiteSpace(meterStr))
|
||||
{
|
||||
meterInfo = meterStr.Deserialize<T>()!;
|
||||
}
|
||||
if (meterInfo != null)
|
||||
{
|
||||
meterHashs[meterId] = meterInfo;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new Exception($"{nameof(GetMeterRedisCacheDictionaryData)} 缓存表计数据异常,集中器 {key} 的表计 {meterId} 解析失败,-103");
|
||||
}
|
||||
}
|
||||
|
||||
// 合并到总结果,若存在重复key则覆盖
|
||||
if (meterInfos.ContainsKey(focusAddress))
|
||||
{
|
||||
foreach (var kvp in meterHashs)
|
||||
{
|
||||
meterInfos[focusAddress][kvp.Key] = kvp.Value;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
meterInfos[focusAddress] = meterHashs;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new Exception($"{nameof(GetMeterRedisCacheDictionaryData)} 第 {page + 1} 页数据解析失败,返回类型不符,-104");
|
||||
}
|
||||
}
|
||||
|
||||
return meterInfos;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Lua脚本批量获取缓存的表计信息
|
||||
/// </summary>
|
||||
/// <typeparam name="T">表信息数据对象</typeparam>
|
||||
/// <param name="redisKeys">采集频率对应的缓存Key集合</param>
|
||||
/// <param name="systemType"><see cref="SystemTypeConst"/> 系统类型</param>
|
||||
/// <param name="serverTagName">服务器标识</param>
|
||||
/// <param name="timeDensity">采集频率,1分钟、5分钟、15分钟</param>
|
||||
/// <param name="meterType"><see cref="MeterTypeEnum"/> 表计类型</param>
|
||||
/// <returns></returns>
|
||||
protected async Task<List<T>> GetMeterRedisCacheListData<T>(string[] redisKeys, string systemType, string serverTagName, string timeDensity, MeterTypeEnum meterType) where T : class
|
||||
{
|
||||
if (redisKeys == null || redisKeys.Length <= 0 ||
|
||||
string.IsNullOrWhiteSpace(systemType) ||
|
||||
string.IsNullOrWhiteSpace(serverTagName) ||
|
||||
string.IsNullOrWhiteSpace(timeDensity))
|
||||
{
|
||||
throw new Exception($"{nameof(GetMeterRedisCacheListData)} 参数异常,-101");
|
||||
}
|
||||
|
||||
var meterInfos = new List<T>();
|
||||
var luaScript = @"
|
||||
local results = {}
|
||||
for i, key in ipairs(KEYS) do
|
||||
local data = redis.call('HGETALL', key)
|
||||
results[i] = {key, data}
|
||||
end
|
||||
return results";
|
||||
|
||||
// 分页参数:每页10000个键
|
||||
int pageSize = 10000;
|
||||
int totalPages = (int)Math.Ceiling(redisKeys.Length / (double)pageSize);
|
||||
|
||||
for (int page = 0; page < totalPages; page++)
|
||||
{
|
||||
// 分页获取当前批次键
|
||||
var batchKeys = redisKeys
|
||||
.Skip(page * pageSize)
|
||||
.Take(pageSize)
|
||||
.ToArray();
|
||||
|
||||
// 执行Lua脚本获取当前页数据
|
||||
var merterResult = await FreeRedisProvider.Instance.EvalAsync(luaScript, batchKeys);
|
||||
if (merterResult == null)
|
||||
{
|
||||
throw new Exception($"{nameof(GetMeterRedisCacheListData)} 第 {page + 1} 页数据未返回,-102");
|
||||
}
|
||||
|
||||
// 解析当前页结果
|
||||
if (merterResult is object[] arr)
|
||||
{
|
||||
foreach (object[] item in arr)
|
||||
{
|
||||
string key = (string)item[0];
|
||||
object[] fieldsAndValues = (object[])item[1];
|
||||
var redisCacheKey = string.Format(
|
||||
RedisConst.CacheMeterInfoHashKey,
|
||||
systemType,
|
||||
serverTagName,
|
||||
meterType,
|
||||
timeDensity
|
||||
);
|
||||
string focusAddress = key.Replace(redisCacheKey, "");
|
||||
|
||||
for (int i = 0; i < fieldsAndValues.Length; i += 2)
|
||||
{
|
||||
string meterId = (string)fieldsAndValues[i];
|
||||
string meterStr = (string)fieldsAndValues[i + 1];
|
||||
|
||||
T meterInfo = default!;
|
||||
if (!string.IsNullOrWhiteSpace(meterStr))
|
||||
{
|
||||
meterInfo = meterStr.Deserialize<T>()!;
|
||||
}
|
||||
if (meterInfo != null)
|
||||
{
|
||||
meterInfos.Add(meterInfo);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new Exception(
|
||||
$"{nameof(GetMeterRedisCacheListData)} 表计 {meterId} 解析失败(页 {page + 1}),-103"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new Exception($"{nameof(GetMeterRedisCacheListData)} 第 {page + 1} 页数据格式错误,-104");
|
||||
}
|
||||
}
|
||||
|
||||
return meterInfos;
|
||||
}
|
||||
}
|
||||
@ -1,84 +0,0 @@
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.Common.Extensions;
|
||||
using JiShe.CollectBus.FreeSql;
|
||||
using JiShe.CollectBus.Kafka;
|
||||
using JiShe.CollectBus.Kafka.AdminClient;
|
||||
using JiShe.CollectBus.Protocol.Contracts;
|
||||
using JiShe.CollectBus.ScheduledMeterReading;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Threading.Tasks;
|
||||
using JiShe.CollectBus.Cassandra;
|
||||
using JiShe.CollectBus.FreeRedis;
|
||||
using JiShe.CollectBus.IoTDB;
|
||||
using Volo.Abp;
|
||||
using Volo.Abp.Application;
|
||||
using Volo.Abp.Autofac;
|
||||
using Volo.Abp.AutoMapper;
|
||||
using Volo.Abp.BackgroundWorkers;
|
||||
using Volo.Abp.BackgroundWorkers.Hangfire;
|
||||
using Volo.Abp.EventBus;
|
||||
using Volo.Abp.Modularity;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace JiShe.CollectBus;
|
||||
|
||||
[DependsOn(
|
||||
typeof(CollectBusDomainModule),
|
||||
typeof(CollectBusApplicationContractsModule),
|
||||
typeof(AbpDddApplicationModule),
|
||||
typeof(AbpAutoMapperModule),
|
||||
typeof(AbpAutofacModule),
|
||||
typeof(AbpBackgroundWorkersHangfireModule),
|
||||
typeof(CollectBusFreeRedisModule),
|
||||
typeof(CollectBusFreeSqlModule),
|
||||
typeof(CollectBusKafkaModule),
|
||||
typeof(CollectBusIoTDBModule),
|
||||
typeof(CollectBusCassandraModule)
|
||||
)]
|
||||
public class CollectBusApplicationModule : AbpModule
|
||||
{
|
||||
public override void ConfigureServices(ServiceConfigurationContext context)
|
||||
{
|
||||
var configuration = context.Services.GetConfiguration();
|
||||
|
||||
context.Services.AddAutoMapperObjectMapper<CollectBusApplicationModule>();
|
||||
Configure<AbpAutoMapperOptions>(options =>
|
||||
{
|
||||
options.AddMaps<CollectBusApplicationModule>(validate: true);
|
||||
});
|
||||
}
|
||||
|
||||
public override async Task OnApplicationInitializationAsync(
|
||||
ApplicationInitializationContext context)
|
||||
{
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
var types = assembly.GetTypes().Where(t => typeof(ICollectWorker).IsAssignableFrom(t) && !t.IsInterface).ToList();
|
||||
foreach (var type in types)
|
||||
{
|
||||
await context.AddBackgroundWorkerAsync(type);
|
||||
}
|
||||
|
||||
//默认初始化表计信息
|
||||
var dbContext = context.ServiceProvider.GetRequiredService<EnergySystemScheduledMeterReadingService>();
|
||||
//await dbContext.InitAmmeterCacheData();
|
||||
//await dbContext.InitWatermeterCacheData();
|
||||
|
||||
//初始化主题信息
|
||||
var kafkaAdminClient = context.ServiceProvider.GetRequiredService<IAdminClientService>();
|
||||
var configuration = context.ServiceProvider.GetRequiredService<IConfiguration>();
|
||||
var kafkaOptions = context.ServiceProvider.GetRequiredService<IOptions<KafkaOptionConfig>>();
|
||||
|
||||
List<string> topics = ProtocolConstExtensions.GetAllTopicNamesByIssued();
|
||||
topics.AddRange(ProtocolConstExtensions.GetAllTopicNamesByReceived());
|
||||
|
||||
foreach (var item in topics)
|
||||
{
|
||||
await kafkaAdminClient.CreateTopicAsync(item, kafkaOptions.Value.NumPartitions, kafkaOptions.Value.KafkaReplicationFactor);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,40 +0,0 @@
|
||||
using System;
|
||||
using System.Threading.Tasks;
|
||||
using JiShe.CollectBus.Common.Enums;
|
||||
using JiShe.CollectBus.Common.Models;
|
||||
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
||||
using MassTransit;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using TouchSocket.Sockets;
|
||||
using Volo.Abp.Domain.Repositories;
|
||||
|
||||
namespace JiShe.CollectBus.Consumers
|
||||
{
|
||||
/// <summary>
|
||||
/// 定时抄读任务消费者
|
||||
/// </summary>
|
||||
public class ScheduledMeterReadingConsumer : IConsumer<ScheduledMeterReadingIssuedEventMessage>
|
||||
{
|
||||
private readonly ILogger<ScheduledMeterReadingConsumer> _logger;
|
||||
private readonly ITcpService _tcpService;
|
||||
|
||||
/// <summary>
|
||||
/// WorkerConsumer
|
||||
/// </summary>
|
||||
/// <param name="logger"></param>
|
||||
/// <param name="tcpService"></param>
|
||||
public ScheduledMeterReadingConsumer(ILogger<ScheduledMeterReadingConsumer> logger,
|
||||
ITcpService tcpService)
|
||||
{
|
||||
_logger = logger;
|
||||
_tcpService = tcpService;
|
||||
}
|
||||
|
||||
|
||||
public async Task Consume(ConsumeContext<ScheduledMeterReadingIssuedEventMessage> context)
|
||||
{
|
||||
_logger.LogError($"{nameof(ScheduledMeterReadingConsumer)} 集中器的消息消费{context.Message.FocusAddress}");
|
||||
await _tcpService.SendAsync(context.Message.FocusAddress, context.Message.MessageHexString);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,153 +0,0 @@
|
||||
using JiShe.CollectBus.DataMigration.Options;
|
||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||
using LiteDB;
|
||||
using Microsoft.Extensions.Options;
|
||||
using System;
|
||||
using System.Data;
|
||||
using System.Linq;
|
||||
using System.Threading.Channels;
|
||||
using System.Threading.Tasks;
|
||||
using Volo.Abp.Domain.Repositories;
|
||||
|
||||
namespace JiShe.CollectBus.DataMigration
|
||||
{
|
||||
/// <summary>
|
||||
/// 数据迁移服务
|
||||
/// </summary>
|
||||
public class DataMigrationService: CollectBusAppService, IDataMigrationService
|
||||
{
|
||||
private readonly IRepository<MeterReadingRecords, Guid> _meterReadingRecordsRepository;
|
||||
private readonly DataMigrationOptions _options;
|
||||
|
||||
|
||||
public DataMigrationService(IOptions<DataMigrationOptions> options,
|
||||
IRepository<MeterReadingRecords, Guid> meterReadingRecordsRepository)
|
||||
{
|
||||
_options = options.Value;
|
||||
_meterReadingRecordsRepository = meterReadingRecordsRepository;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 开始迁移
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public async Task StartMigrationAsync()
|
||||
{
|
||||
var rawDataChannel = Channel.CreateBounded<MeterReadingRecords[]>(new BoundedChannelOptions(_options.ChannelCapacity)
|
||||
{
|
||||
SingleWriter = false,
|
||||
SingleReader = false,
|
||||
FullMode = BoundedChannelFullMode.Wait
|
||||
});
|
||||
|
||||
var cleanDataChannel = Channel.CreateBounded<DataTable>(new BoundedChannelOptions(_options.ChannelCapacity)
|
||||
{
|
||||
SingleWriter = false,
|
||||
SingleReader = false,
|
||||
FullMode = BoundedChannelFullMode.Wait
|
||||
});
|
||||
|
||||
// 启动生产者和消费者
|
||||
var producer = Task.Run(() => ProduceDataAsync(rawDataChannel.Writer));
|
||||
|
||||
var processors = Enumerable.Range(0, _options.ProcessorsCount)
|
||||
.Select(_ => Task.Run(() => ProcessDataAsync(rawDataChannel.Reader, cleanDataChannel.Writer)))
|
||||
.ToArray();
|
||||
|
||||
var consumer = Task.Run(() => ConsumeDataAsync(cleanDataChannel.Reader));
|
||||
|
||||
await Task.WhenAll(new[] { producer }.Union(processors).Union(new[] { consumer }));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 生产者,生产数据,主要是从MongoDB中读取数据
|
||||
/// </summary>
|
||||
/// <param name="writer"></param>
|
||||
/// <returns></returns>
|
||||
private async Task ProduceDataAsync(ChannelWriter<MeterReadingRecords[]> writer)
|
||||
{
|
||||
//while (true)
|
||||
//{
|
||||
// var queryable = await _meterReadingRecordsRepository.GetQueryableAsync();
|
||||
// var batchRecords = queryable.Where(d => d.MigrationStatus == Common.Enums.RecordsDataMigrationStatusEnum.NotStarted)
|
||||
// .Take(_options.MongoDbDataBatchSize)
|
||||
// .ToArray();
|
||||
|
||||
// if (batchRecords == null || batchRecords.Length == 0)
|
||||
// {
|
||||
// writer.Complete();
|
||||
// break;
|
||||
// }
|
||||
|
||||
// await writer.WriteAsync(batchRecords);
|
||||
//}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 清洗数据
|
||||
/// </summary>
|
||||
/// <param name="reader"></param>
|
||||
/// <param name="writer"></param>
|
||||
/// <returns></returns>
|
||||
private async Task ProcessDataAsync(ChannelReader<MeterReadingRecords[]> reader, ChannelWriter<DataTable> writer)
|
||||
{
|
||||
await foreach (var batch in reader.ReadAllAsync())
|
||||
{
|
||||
//var dataTable = new DataTable();
|
||||
//dataTable.Columns.Add("Id", typeof(string));
|
||||
//dataTable.Columns.Add("CleanName", typeof(string));
|
||||
//dataTable.Columns.Add("ProcessedTime", typeof(DateTime));
|
||||
|
||||
//foreach (var doc in batch)
|
||||
//{
|
||||
// // 业务清洗逻辑
|
||||
// var cleanName = doc["name"].AsString.Trim().ToUpper();
|
||||
// dataTable.Rows.Add(
|
||||
// doc["_id"].ToString(),
|
||||
// cleanName,
|
||||
// DateTime.UtcNow);
|
||||
//}
|
||||
|
||||
//await writer.WriteAsync(dataTable);
|
||||
|
||||
// 批量更新标记
|
||||
//var ids = batch.Select(d => d.Id).ToArray();
|
||||
//foreach (var item in batch)
|
||||
//{
|
||||
// item.MigrationStatus = Common.Enums.RecordsDataMigrationStatusEnum.InProgress;
|
||||
// item.MigrationTime = DateTime.Now;
|
||||
//}
|
||||
|
||||
//await _meterReadingRecordsRepository.UpdateManyAsync(batch);
|
||||
}
|
||||
writer.Complete();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 消费清洗后的数据入库
|
||||
/// </summary>
|
||||
/// <param name="reader"></param>
|
||||
/// <returns></returns>
|
||||
private async Task ConsumeDataAsync(ChannelReader<DataTable> reader)
|
||||
{
|
||||
//await using var connection = new SqlConnection(_sqlConnectionString);
|
||||
//await connection.OpenAsync();
|
||||
|
||||
//await foreach (var dataTable in reader.ReadAllAsync())
|
||||
//{
|
||||
// using var bulkCopy = new SqlBulkCopy(connection)
|
||||
// {
|
||||
// DestinationTableName = "CleanData",
|
||||
// BatchSize = 5000,
|
||||
// BulkCopyTimeout = 300
|
||||
// };
|
||||
|
||||
// bulkCopy.ColumnMappings.Add("Id", "Id");
|
||||
// bulkCopy.ColumnMappings.Add("CleanName", "CleanName");
|
||||
// bulkCopy.ColumnMappings.Add("ProcessedTime", "ProcessedTime");
|
||||
|
||||
// await bulkCopy.WriteToServerAsync(dataTable);
|
||||
//}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,33 +0,0 @@
|
||||
using JiShe.CollectBus.IotSystems.Records;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.EnergySystems.TableViews;
|
||||
using JiShe.CollectBus.FreeSql;
|
||||
using Volo.Abp.Domain.Repositories;
|
||||
using JiShe.CollectBus.IotSystems.PrepayModel;
|
||||
using JiShe.CollectBus.Ammeters;
|
||||
using JiShe.CollectBus.Common.BuildSendDatas;
|
||||
using JiShe.CollectBus.Common.Enums;
|
||||
using JiShe.CollectBus.Common.Helpers;
|
||||
|
||||
namespace JiShe.CollectBus.EnergySystem
|
||||
{
|
||||
public class CacheAppService : CollectBusAppService, ICacheAppService
|
||||
{
|
||||
public async Task SetHashByKey(string key)
|
||||
{
|
||||
var data = await SqlProvider.Instance.Change(DbEnum.EnergyDB).Select<V_FocusAmmeter>().ToListAsync();
|
||||
|
||||
var groupData = data.GroupBy(a => $"{a.FocusAreaCode}{a.FocusAddress}").ToList();
|
||||
|
||||
foreach (var group in groupData)
|
||||
{
|
||||
await FreeRedisProvider.Instance.HSetAsync($"{RedisConst.CacheAmmeterFocusKey}:{group.Key}", group.ToDictionary(a => $"{a.ID}_{a.Address}", b => b));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,46 +0,0 @@
|
||||
using JiShe.CollectBus.Samples;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
using Volo.Abp.EventBus.Distributed;
|
||||
|
||||
namespace JiShe.CollectBus.Handlers
|
||||
{
|
||||
public class SampleHandler : IDistributedEventHandler<SampleDto>,
|
||||
ITransientDependency
|
||||
{
|
||||
|
||||
private readonly ILogger<SampleHandler> _logger;
|
||||
|
||||
public SampleHandler(ILogger<SampleHandler> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task HandleEventAsync(SampleDto eventData)
|
||||
{
|
||||
_logger.LogWarning($"topic Test1 message: {eventData.Value.ToString()}");
|
||||
}
|
||||
}
|
||||
|
||||
public class SampleHandler2 : IDistributedEventHandler<SampleDto2>,
|
||||
ITransientDependency
|
||||
{
|
||||
|
||||
private readonly ILogger<SampleHandler2> _logger;
|
||||
|
||||
public SampleHandler2(ILogger<SampleHandler2> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task HandleEventAsync(SampleDto2 eventData)
|
||||
{
|
||||
_logger.LogWarning($"topic Test2 message: {eventData.Value.ToString()}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,278 +0,0 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading.Tasks;
|
||||
using DeviceDetectorNET.Parser.Device;
|
||||
using DotNetCore.CAP;
|
||||
using JiShe.CollectBus.Ammeters;
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.Common.Enums;
|
||||
using JiShe.CollectBus.Common.Extensions;
|
||||
using JiShe.CollectBus.Common.Helpers;
|
||||
using JiShe.CollectBus.Enums;
|
||||
using JiShe.CollectBus.Interceptors;
|
||||
using JiShe.CollectBus.IotSystems.Devices;
|
||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||
using JiShe.CollectBus.Kafka.Producer;
|
||||
using JiShe.CollectBus.Protocol.Contracts;
|
||||
using MassTransit;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using TouchSocket.Core;
|
||||
using TouchSocket.Sockets;
|
||||
using Volo.Abp.Caching;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
using Volo.Abp.Domain.Entities;
|
||||
using Volo.Abp.Domain.Repositories;
|
||||
using static FreeSql.Internal.GlobalFilter;
|
||||
|
||||
namespace JiShe.CollectBus.Plugins
|
||||
{
|
||||
public partial class TcpMonitor : PluginBase, ITransientDependency, ITcpReceivedPlugin, ITcpConnectingPlugin, ITcpConnectedPlugin, ITcpClosedPlugin
|
||||
{
|
||||
private readonly ICapPublisher _producerBus;
|
||||
private readonly IProducerService _producerService;
|
||||
private readonly ILogger<TcpMonitor> _logger;
|
||||
private readonly IRepository<Device, Guid> _deviceRepository;
|
||||
private readonly IDistributedCache<AmmeterInfo> _ammeterInfoCache;
|
||||
|
||||
/// <summary>
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="producerService"></param>
|
||||
/// <param name="logger"></param>
|
||||
/// <param name="deviceRepository"></param>
|
||||
/// <param name="ammeterInfoCache"></param>
|
||||
public TcpMonitor(ICapPublisher producerBus, IProducerService producerService,
|
||||
ILogger<TcpMonitor> logger,
|
||||
IRepository<Device, Guid> deviceRepository,
|
||||
IDistributedCache<AmmeterInfo> ammeterInfoCache)
|
||||
{
|
||||
_producerBus = producerBus;
|
||||
_producerService = producerService;
|
||||
_logger = logger;
|
||||
_deviceRepository = deviceRepository;
|
||||
_ammeterInfoCache = ammeterInfoCache;
|
||||
}
|
||||
|
||||
public async Task OnTcpReceived(ITcpSession client, ReceivedDataEventArgs e)
|
||||
{
|
||||
var messageHexString = Convert.ToHexString(e.ByteBlock.Span);
|
||||
var hexStringList = messageHexString.StringToPairs();
|
||||
var aFn = (int?)hexStringList.GetAnalyzeValue(CommandChunkEnum.AFN);
|
||||
var fn = (int?)hexStringList.GetAnalyzeValue(CommandChunkEnum.FN);
|
||||
var aTuple = (Tuple<string, int>)hexStringList.GetAnalyzeValue(CommandChunkEnum.A);
|
||||
if (aFn.HasValue && fn.HasValue && aTuple != null && !string.IsNullOrWhiteSpace(aTuple.Item1))
|
||||
{
|
||||
var tcpSessionClient = (ITcpSessionClient)client;
|
||||
|
||||
if ((AFN)aFn == AFN.链路接口检测)
|
||||
{
|
||||
switch (fn)
|
||||
{
|
||||
case 1:
|
||||
await OnTcpLoginReceived(tcpSessionClient, messageHexString, aTuple.Item1);
|
||||
break;
|
||||
case 3:
|
||||
//心跳帧有两种情况:
|
||||
//1. 集中器先有登录帧,再有心跳帧
|
||||
//2. 集中器没有登录帧,只有心跳帧
|
||||
await OnTcpHeartbeatReceived(tcpSessionClient, messageHexString, aTuple.Item1);
|
||||
break;
|
||||
default:
|
||||
_logger.LogError($"指令初步解析失败,指令内容:{messageHexString}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
await OnTcpNormalReceived(tcpSessionClient, messageHexString, aTuple.Item1,aFn.ToString()!.PadLeft(2,'0'));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogError($"指令初步解析失败,指令内容:{messageHexString}");
|
||||
}
|
||||
|
||||
await e.InvokeNext();
|
||||
}
|
||||
|
||||
//[GeneratorPlugin(typeof(ITcpConnectingPlugin))]
|
||||
public async Task OnTcpConnecting(ITcpSession client, ConnectingEventArgs e)
|
||||
{
|
||||
var tcpSessionClient = (ITcpSessionClient)client;
|
||||
|
||||
_logger.LogInformation($"[TCP] ID:{tcpSessionClient.Id} IP:{client.GetIPPort()}正在连接中...");
|
||||
await e.InvokeNext();
|
||||
}
|
||||
|
||||
//[GeneratorPlugin(typeof(ITcpConnectedPlugin))]
|
||||
public async Task OnTcpConnected(ITcpSession client, ConnectedEventArgs e)
|
||||
{
|
||||
var tcpSessionClient = (ITcpSessionClient)client;
|
||||
|
||||
|
||||
_logger.LogInformation($"[TCP] ID:{tcpSessionClient.Id} IP:{client.GetIPPort()}已连接");
|
||||
await e.InvokeNext();
|
||||
}
|
||||
|
||||
//[GeneratorPlugin(typeof(ITcpClosedPlugin))]//ITcpSessionClient
|
||||
public async Task OnTcpClosed(ITcpSession client, ClosedEventArgs e)
|
||||
{
|
||||
|
||||
var tcpSessionClient = (ITcpSessionClient)client;
|
||||
var entity = await _deviceRepository.FindAsync(a => a.ClientId == tcpSessionClient.Id);
|
||||
if (entity != null)
|
||||
{
|
||||
entity.UpdateByOnClosed();
|
||||
await _deviceRepository.UpdateAsync(entity);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogWarning($"[TCP] ID:{tcpSessionClient.Id} IP:{client.GetIPPort()}已关闭连接,但采集程序检索失败");
|
||||
}
|
||||
|
||||
await e.InvokeNext();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 登录帧处理
|
||||
/// </summary>
|
||||
/// <param name="client"></param>
|
||||
/// <param name="messageHexString"></param>
|
||||
/// <param name="deviceNo">集中器编号</param>
|
||||
/// <returns></returns>
|
||||
private async Task OnTcpLoginReceived(ITcpSessionClient client, string messageHexString, string deviceNo)
|
||||
{
|
||||
string oldClientId = $"{client.Id}";
|
||||
|
||||
await client.ResetIdAsync(deviceNo);
|
||||
|
||||
var deviceInfoList= await _deviceRepository.GetListAsync(a => a.Number == deviceNo);
|
||||
if (deviceInfoList != null && deviceInfoList.Count > 1)
|
||||
{
|
||||
//todo 推送集中器编号重复预警
|
||||
_logger.LogError($"集中器编号:{deviceNo},存在多个集中器,请检查集中器编号是否重复");
|
||||
return;
|
||||
}
|
||||
|
||||
var entity = deviceInfoList?.FirstOrDefault(a => a.Number == deviceNo);
|
||||
if (entity == null)
|
||||
{
|
||||
await _deviceRepository.InsertAsync(new Device(deviceNo, oldClientId, DateTime.Now, DateTime.Now, DeviceStatus.Online));
|
||||
}
|
||||
else
|
||||
{
|
||||
entity.UpdateByLoginAndHeartbeat(oldClientId);
|
||||
await _deviceRepository.UpdateAsync(entity);
|
||||
}
|
||||
|
||||
var messageReceivedLoginEvent = new MessageReceivedLogin
|
||||
{
|
||||
ClientId = deviceNo,
|
||||
ClientIp = client.IP,
|
||||
ClientPort = client.Port,
|
||||
MessageHexString = messageHexString,
|
||||
DeviceNo = deviceNo,
|
||||
MessageId = NewId.NextGuid().ToString()
|
||||
};
|
||||
|
||||
//await _producerBus.PublishAsync(ProtocolConst.SubscriberLoginReceivedEventName, messageReceivedLoginEvent);
|
||||
|
||||
|
||||
await _producerService.ProduceAsync(ProtocolConst.SubscriberLoginReceivedEventName, messageReceivedLoginEvent);
|
||||
|
||||
//await _producerBus.Publish( messageReceivedLoginEvent);
|
||||
}
|
||||
|
||||
private async Task OnTcpHeartbeatReceived(ITcpSessionClient client, string messageHexString, string deviceNo)
|
||||
{
|
||||
string clientId = deviceNo;
|
||||
string oldClientId = $"{client.Id}";
|
||||
|
||||
var deviceInfoList = await _deviceRepository.GetListAsync(a => a.Number == deviceNo);
|
||||
if (deviceInfoList != null && deviceInfoList.Count > 1)
|
||||
{
|
||||
//todo 推送集中器编号重复预警
|
||||
_logger.LogError($"集中器编号:{deviceNo},存在多个集中器,请检查集中器编号是否重复");
|
||||
return;
|
||||
}
|
||||
|
||||
var entity = deviceInfoList?.FirstOrDefault(a => a.Number == deviceNo);
|
||||
if (entity == null) //没有登录帧的设备,只有心跳帧
|
||||
{
|
||||
await client.ResetIdAsync(clientId);
|
||||
await _deviceRepository.InsertAsync(new Device(deviceNo, oldClientId, DateTime.Now, DateTime.Now, DeviceStatus.Online));
|
||||
}
|
||||
else
|
||||
{
|
||||
if (clientId != oldClientId)
|
||||
{
|
||||
entity.UpdateByLoginAndHeartbeat(oldClientId);
|
||||
}
|
||||
else
|
||||
{
|
||||
entity.UpdateByLoginAndHeartbeat();
|
||||
}
|
||||
|
||||
await _deviceRepository.UpdateAsync(entity);
|
||||
}
|
||||
|
||||
var messageReceivedHeartbeatEvent = new MessageReceivedHeartbeat
|
||||
{
|
||||
ClientId = clientId,
|
||||
ClientIp = client.IP,
|
||||
ClientPort = client.Port,
|
||||
MessageHexString = messageHexString,
|
||||
DeviceNo = deviceNo,
|
||||
MessageId = NewId.NextGuid().ToString()
|
||||
};
|
||||
//await _producerBus.PublishAsync(ProtocolConst.SubscriberHeartbeatReceivedEventName, messageReceivedHeartbeatEvent);
|
||||
|
||||
await _producerService.ProduceAsync(ProtocolConst.SubscriberHeartbeatReceivedEventName, messageReceivedHeartbeatEvent);
|
||||
//await _producerBus.Publish(messageReceivedHeartbeatEvent);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 正常帧处理,将不同的AFN进行分发
|
||||
/// </summary>
|
||||
/// <param name="client"></param>
|
||||
/// <param name="messageHexString"></param>
|
||||
/// <param name="deviceNo"></param>
|
||||
/// <param name="aFn"></param>
|
||||
/// <returns></returns>
|
||||
private async Task OnTcpNormalReceived(ITcpSessionClient client, string messageHexString, string deviceNo,string aFn)
|
||||
{
|
||||
//await _producerBus.Publish(new MessageReceived
|
||||
//{
|
||||
// ClientId = client.Id,
|
||||
// ClientIp = client.IP,
|
||||
// ClientPort = client.Port,
|
||||
// MessageHexString = messageHexString,
|
||||
// DeviceNo = deviceNo,
|
||||
// MessageId = NewId.NextGuid().ToString()
|
||||
//});
|
||||
|
||||
|
||||
//string topicName = string.Format(ProtocolConst.AFNTopicNameFormat, aFn);
|
||||
//todo 如何确定时标?目前集中器的采集频率,都是固定,数据上报的时候,根据当前时间,往后推测出应当采集的时间点作为时标。但是如果由于网络问题,数据一直没上报的情况改怎么计算?
|
||||
//await _producerBus.PublishAsync(ProtocolConst.SubscriberReceivedEventName, new MessageReceived
|
||||
//{
|
||||
// ClientId = client.Id,
|
||||
// ClientIp = client.IP,
|
||||
// ClientPort = client.Port,
|
||||
// MessageHexString = messageHexString,
|
||||
// DeviceNo = deviceNo,
|
||||
// MessageId = NewId.NextGuid().ToString()
|
||||
//});
|
||||
await _producerService.ProduceAsync(ProtocolConst.SubscriberReceivedEventName, new MessageReceived
|
||||
{
|
||||
ClientId = client.Id,
|
||||
ClientIp = client.IP,
|
||||
ClientPort = client.Port,
|
||||
MessageHexString = messageHexString,
|
||||
DeviceNo = deviceNo,
|
||||
MessageId = NewId.NextGuid().ToString()
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,762 +0,0 @@
|
||||
using Confluent.Kafka;
|
||||
using FreeRedis;
|
||||
using JiShe.CollectBus.Application.Contracts;
|
||||
using JiShe.CollectBus.Common.Extensions;
|
||||
using JiShe.CollectBus.Common.Helpers;
|
||||
using JiShe.CollectBus.Common.Models;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using JiShe.CollectBus.FreeRedis;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
using static FreeSql.Internal.GlobalFilter;
|
||||
using static System.Runtime.InteropServices.JavaScript.JSType;
|
||||
using static Volo.Abp.UI.Navigation.DefaultMenuNames.Application;
|
||||
|
||||
namespace JiShe.CollectBus.RedisDataCache
|
||||
{
|
||||
/// <summary>
|
||||
/// 数据缓存服务接口
|
||||
/// </summary>
|
||||
public class RedisDataCacheService : IRedisDataCacheService, ITransientDependency
|
||||
{
|
||||
private readonly IFreeRedisProvider _freeRedisProvider;
|
||||
private readonly ILogger<RedisDataCacheService> _logger;
|
||||
private RedisClient Instance { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 数据缓存服务接口
|
||||
/// </summary>
|
||||
/// <param name="freeRedisProvider"></param>
|
||||
/// <param name="logger"></param>
|
||||
public RedisDataCacheService(IFreeRedisProvider freeRedisProvider,
|
||||
ILogger<RedisDataCacheService> logger)
|
||||
{
|
||||
this._freeRedisProvider = freeRedisProvider;
|
||||
this._logger = logger;
|
||||
|
||||
Instance = _freeRedisProvider.Instance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 单个添加数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
/// <param name="data">待缓存数据</param>
|
||||
/// <returns></returns>
|
||||
public async Task InsertDataAsync<T>(
|
||||
string redisHashCacheKey,
|
||||
string redisSetIndexCacheKey,
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
T data) where T : DeviceCacheBasicModel
|
||||
{
|
||||
// 参数校验增强
|
||||
if (data == null || string.IsNullOrWhiteSpace(redisHashCacheKey)
|
||||
|| string.IsNullOrWhiteSpace(redisSetIndexCacheKey)
|
||||
|| string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
|
||||
{
|
||||
_logger.LogError($"{nameof(InsertDataAsync)} 参数异常,-101");
|
||||
return;
|
||||
}
|
||||
|
||||
// 使用事务保证原子性
|
||||
using (var trans = Instance.Multi())
|
||||
{
|
||||
// 主数据存储Hash
|
||||
trans.HSet(redisHashCacheKey, data.MemberId, data.Serialize());
|
||||
|
||||
// 集中器号分组索引Set缓存
|
||||
trans.SAdd(redisSetIndexCacheKey, data.MemberId);
|
||||
|
||||
// 集中器与表计信息排序索引ZSET缓存Key
|
||||
trans.ZAdd(redisZSetScoresIndexCacheKey, data.ScoreValue, data.MemberId);
|
||||
|
||||
var results = trans.Exec();
|
||||
|
||||
if (results == null || results.Length <= 0)
|
||||
{
|
||||
_logger.LogError($"{nameof(InsertDataAsync)} 添加事务提交失败,-102");
|
||||
}
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 批量添加数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
/// <param name="items">待缓存数据集合</param>
|
||||
/// <returns></returns>
|
||||
public async Task BatchInsertDataAsync<T>(
|
||||
string redisHashCacheKey,
|
||||
string redisSetIndexCacheKey,
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
IEnumerable<T> items) where T : DeviceCacheBasicModel
|
||||
{
|
||||
if (items == null
|
||||
|| items.Count() <= 0
|
||||
|| string.IsNullOrWhiteSpace(redisHashCacheKey)
|
||||
|| string.IsNullOrWhiteSpace(redisSetIndexCacheKey)
|
||||
|| string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
|
||||
{
|
||||
_logger.LogError($"{nameof(BatchInsertDataAsync)} 参数异常,-101");
|
||||
return;
|
||||
}
|
||||
|
||||
const int BATCH_SIZE = 1000; // 每批1000条
|
||||
var semaphore = new SemaphoreSlim(Environment.ProcessorCount * 2);
|
||||
|
||||
foreach (var batch in items.Batch(BATCH_SIZE))
|
||||
{
|
||||
await semaphore.WaitAsync();
|
||||
|
||||
_ = Task.Run(() =>
|
||||
{
|
||||
using (var pipe = Instance.StartPipe())
|
||||
{
|
||||
foreach (var item in batch)
|
||||
{
|
||||
// 主数据存储Hash
|
||||
pipe.HSet(redisHashCacheKey, item.MemberId, item.Serialize());
|
||||
|
||||
// Set索引缓存
|
||||
pipe.SAdd(redisSetIndexCacheKey, item.MemberId);
|
||||
|
||||
// ZSET索引缓存Key
|
||||
pipe.ZAdd(redisZSetScoresIndexCacheKey, item.ScoreValue, item.MemberId);
|
||||
}
|
||||
pipe.EndPipe();
|
||||
}
|
||||
semaphore.Release();
|
||||
});
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 删除缓存信息
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
/// <param name="data">已缓存数据</param>
|
||||
/// <returns></returns>
|
||||
public async Task RemoveCacheDataAsync<T>(
|
||||
string redisHashCacheKey,
|
||||
string redisSetIndexCacheKey,
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
T data) where T : DeviceCacheBasicModel
|
||||
{
|
||||
if (data == null
|
||||
|| string.IsNullOrWhiteSpace(redisHashCacheKey)
|
||||
|| string.IsNullOrWhiteSpace(redisSetIndexCacheKey)
|
||||
|| string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
|
||||
{
|
||||
_logger.LogError($"{nameof(RemoveCacheDataAsync)} 参数异常,-101");
|
||||
return;
|
||||
}
|
||||
|
||||
const string luaScript = @"
|
||||
local hashCacheKey = KEYS[1]
|
||||
local setIndexCacheKey = KEYS[2]
|
||||
local zsetScoresIndexCacheKey = KEYS[3]
|
||||
local member = ARGV[1]
|
||||
|
||||
local deleted = 0
|
||||
if redis.call('HDEL', hashCacheKey, member) > 0 then
|
||||
deleted = 1
|
||||
end
|
||||
|
||||
redis.call('SREM', setIndexCacheKey, member)
|
||||
redis.call('ZREM', zsetScoresIndexCacheKey, member)
|
||||
return deleted
|
||||
";
|
||||
|
||||
var keys = new[]
|
||||
{
|
||||
redisHashCacheKey,
|
||||
redisSetIndexCacheKey,
|
||||
redisZSetScoresIndexCacheKey
|
||||
};
|
||||
|
||||
var result = await Instance.EvalAsync(luaScript, keys, new[] { data.MemberId });
|
||||
|
||||
if ((int)result == 0)
|
||||
{
|
||||
_logger.LogError($"{nameof(RemoveCacheDataAsync)} 删除指定Key{redisHashCacheKey}的{data.MemberId}数据失败,-102");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 修改缓存信息,映射关系未发生改变
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
/// <param name="newData">待修改缓存数据</param>
|
||||
/// <returns></returns>
|
||||
public async Task ModifyDataAsync<T>(
|
||||
string redisHashCacheKey,
|
||||
string redisSetIndexCacheKey,
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
T newData) where T : DeviceCacheBasicModel
|
||||
{
|
||||
if (newData == null
|
||||
|| string.IsNullOrWhiteSpace(redisHashCacheKey)
|
||||
|| string.IsNullOrWhiteSpace(redisSetIndexCacheKey)
|
||||
|| string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
|
||||
{
|
||||
_logger.LogError($"{nameof(ModifyDataAsync)} 参数异常,-101");
|
||||
return;
|
||||
}
|
||||
|
||||
var luaScript = @"
|
||||
local hashCacheKey = KEYS[1]
|
||||
local member = ARGV[1]
|
||||
local newData = ARGV[2]
|
||||
|
||||
-- 校验存在性
|
||||
if redis.call('HEXISTS', hashCacheKey, member) == 0 then
|
||||
return 0
|
||||
end
|
||||
|
||||
-- 更新主数据
|
||||
redis.call('HSET', hashCacheKey, member, newData)
|
||||
|
||||
return 1
|
||||
";
|
||||
|
||||
|
||||
var result = await Instance.EvalAsync(luaScript,
|
||||
new[]
|
||||
{
|
||||
redisHashCacheKey
|
||||
},
|
||||
new object[]
|
||||
{
|
||||
newData.MemberId,
|
||||
newData.Serialize()
|
||||
});
|
||||
|
||||
if ((int)result == 0)
|
||||
{
|
||||
_logger.LogError($"{nameof(ModifyDataAsync)} 更新指定Key{redisHashCacheKey}的{newData.MemberId}数据失败,-102");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 修改缓存信息,映射关系已经改变
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisSetIndexCacheKey">Set索引缓存Key</param>
|
||||
/// <param name="oldMemberId">旧的映射关系</param>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
/// <param name="newData">待修改缓存数据</param>
|
||||
/// <returns></returns>
|
||||
public async Task ModifyDataAsync<T>(
|
||||
string redisHashCacheKey,
|
||||
string redisSetIndexCacheKey,
|
||||
string oldMemberId,
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
T newData) where T : DeviceCacheBasicModel
|
||||
{
|
||||
if (newData == null
|
||||
|| string.IsNullOrWhiteSpace(redisHashCacheKey)
|
||||
|| string.IsNullOrWhiteSpace(redisSetIndexCacheKey)
|
||||
|| string.IsNullOrWhiteSpace(oldMemberId)
|
||||
|| string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
|
||||
{
|
||||
_logger.LogError($"{nameof(ModifyDataAsync)} 参数异常,-101");
|
||||
return;
|
||||
}
|
||||
|
||||
var luaScript = @"
|
||||
local hashCacheKey = KEYS[1]
|
||||
local setIndexCacheKey = KEYS[2]
|
||||
local zsetScoresIndexCacheKey = KEYS[3]
|
||||
local member = ARGV[1]
|
||||
local oldMember = ARGV[2]
|
||||
local newData = ARGV[3]
|
||||
local newScore = ARGV[4]
|
||||
|
||||
-- 校验存在性
|
||||
if redis.call('HEXISTS', hashCacheKey, oldMember) == 0 then
|
||||
return 0
|
||||
end
|
||||
|
||||
-- 删除旧数据
|
||||
redis.call('HDEL', hashCacheKey, oldMember)
|
||||
|
||||
-- 插入新主数据
|
||||
redis.call('HSET', hashCacheKey, member, newData)
|
||||
|
||||
-- 处理变更
|
||||
if newScore ~= '' then
|
||||
-- 删除旧索引
|
||||
redis.call('SREM', setIndexCacheKey, oldMember)
|
||||
redis.call('ZREM', zsetScoresIndexCacheKey, oldMember)
|
||||
|
||||
-- 添加新索引
|
||||
redis.call('SADD', setIndexCacheKey, member)
|
||||
redis.call('ZADD', zsetScoresIndexCacheKey, newScore, member)
|
||||
end
|
||||
|
||||
return 1
|
||||
";
|
||||
|
||||
var result = await Instance.EvalAsync(luaScript,
|
||||
new[]
|
||||
{
|
||||
redisHashCacheKey,
|
||||
redisSetIndexCacheKey,
|
||||
redisZSetScoresIndexCacheKey
|
||||
},
|
||||
new object[]
|
||||
{
|
||||
newData.MemberId,
|
||||
oldMemberId,
|
||||
newData.Serialize(),
|
||||
newData.ScoreValue.ToString() ?? "",
|
||||
});
|
||||
|
||||
if ((int)result == 0)
|
||||
{
|
||||
_logger.LogError($"{nameof(ModifyDataAsync)} 更新指定Key{redisHashCacheKey}的{newData.MemberId}数据失败,-102");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 通过集中器与表计信息排序索引获取指定集中器号集合数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisCacheFocusScoresIndexKey">集中器与表计信息排序索引ZSET缓存Key</param>
|
||||
/// <param name="focusIds">集中器Id</param>
|
||||
/// <param name="pageSize">分页尺寸</param>
|
||||
/// <param name="lastScore">最后一个索引</param>
|
||||
/// <param name="lastMember">最后一个唯一标识</param>
|
||||
/// <param name="descending">排序方式</param>
|
||||
/// <returns></returns>
|
||||
public async Task<BusCacheGlobalPagedResult<T>> GetPagedData<T>(
|
||||
string redisCacheKey,
|
||||
string redisCacheFocusScoresIndexKey,
|
||||
IEnumerable<int> focusIds,
|
||||
int pageSize = 10,
|
||||
decimal? lastScore = null,
|
||||
string lastMember = null,
|
||||
bool descending = true)
|
||||
where T : DeviceCacheBasicModel
|
||||
{
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 通过ZSET索引获取数据,支持10万级别数据处理,控制在13秒以内。
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
/// <param name="pageSize">分页尺寸</param>
|
||||
/// <param name="lastScore">最后一个索引</param>
|
||||
/// <param name="lastMember">最后一个唯一标识</param>
|
||||
/// <param name="descending">排序方式</param>
|
||||
/// <returns></returns>
|
||||
public async Task<BusCacheGlobalPagedResult<T>> GetAllPagedData<T>(
|
||||
string redisHashCacheKey,
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
int pageSize = 1000,
|
||||
decimal? lastScore = null,
|
||||
string lastMember = null,
|
||||
bool descending = true)
|
||||
where T : DeviceCacheBasicModel
|
||||
{
|
||||
// 参数校验增强
|
||||
if (string.IsNullOrWhiteSpace(redisHashCacheKey) ||
|
||||
string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
|
||||
{
|
||||
_logger.LogError($"{nameof(GetAllPagedData)} 参数异常,-101");
|
||||
return new BusCacheGlobalPagedResult<T> { Items = new List<T>() };
|
||||
}
|
||||
|
||||
pageSize = Math.Clamp(pageSize, 1, 10000);
|
||||
|
||||
var luaScript = @"
|
||||
local command = ARGV[1]
|
||||
local range_start = ARGV[2]
|
||||
local range_end = ARGV[3]
|
||||
local limit = tonumber(ARGV[4])
|
||||
local last_score = ARGV[5]
|
||||
local last_member = ARGV[6]
|
||||
|
||||
-- 获取原始数据
|
||||
local members
|
||||
if command == 'ZRANGEBYSCORE' then
|
||||
members = redis.call(command, KEYS[1], range_start, range_end, 'WITHSCORES', 'LIMIT', 0, limit * 2)
|
||||
else
|
||||
members = redis.call('ZREVRANGEBYSCORE', KEYS[1], range_start, range_end, 'WITHSCORES', 'LIMIT', 0, limit * 2)
|
||||
end
|
||||
|
||||
-- 过滤数据
|
||||
local filtered_members = {}
|
||||
local count = 0
|
||||
for i = 1, #members, 2 do
|
||||
local member = members[i]
|
||||
local score = members[i+1]
|
||||
local include = true
|
||||
if last_score ~= '' and last_member ~= '' then
|
||||
if command == 'ZRANGEBYSCORE' then
|
||||
-- 升序:score > last_score 或 (score == last_score 且 member > last_member)
|
||||
if score == last_score then
|
||||
include = member > last_member
|
||||
else
|
||||
include = tonumber(score) > tonumber(last_score)
|
||||
end
|
||||
else
|
||||
-- 降序:score < last_score 或 (score == last_score 且 member < last_member)
|
||||
if score == last_score then
|
||||
include = member < last_member
|
||||
else
|
||||
include = tonumber(score) < tonumber(last_score)
|
||||
end
|
||||
end
|
||||
end
|
||||
if include then
|
||||
table.insert(filtered_members, member)
|
||||
table.insert(filtered_members, score)
|
||||
count = count + 1
|
||||
if count >= limit then
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- 提取有效数据
|
||||
local result_members, result_scores = {}, {}
|
||||
for i=1,#filtered_members,2 do
|
||||
table.insert(result_members, filtered_members[i])
|
||||
table.insert(result_scores, filtered_members[i+1])
|
||||
end
|
||||
|
||||
if #result_members == 0 then return {0,{},{},{}} end
|
||||
|
||||
-- 获取Hash数据
|
||||
local hash_data = redis.call('HMGET', KEYS[2], unpack(result_members))
|
||||
return {#result_members, result_members, result_scores, hash_data}";
|
||||
|
||||
// 调整范围构造逻辑(移除排他符号)
|
||||
string rangeStart, rangeEnd;
|
||||
if (descending)
|
||||
{
|
||||
rangeStart = lastScore.HasValue ? lastScore.Value.ToString() : "+inf";
|
||||
rangeEnd = "-inf";
|
||||
}
|
||||
else
|
||||
{
|
||||
rangeStart = lastScore.HasValue ? lastScore.Value.ToString() : "-inf";
|
||||
rangeEnd = "+inf";
|
||||
}
|
||||
|
||||
var scriptResult = (object[])await Instance.EvalAsync(luaScript,
|
||||
new[] { redisZSetScoresIndexCacheKey, redisHashCacheKey },
|
||||
new object[]
|
||||
{
|
||||
descending ? "ZREVRANGEBYSCORE" : "ZRANGEBYSCORE",
|
||||
rangeStart,
|
||||
rangeEnd,
|
||||
(pageSize + 1).ToString(), // 获取pageSize+1条以判断是否有下一页
|
||||
lastScore?.ToString() ?? "",
|
||||
lastMember ?? ""
|
||||
});
|
||||
|
||||
if ((long)scriptResult[0] == 0)
|
||||
return new BusCacheGlobalPagedResult<T> { Items = new List<T>() };
|
||||
|
||||
// 处理结果集
|
||||
var members = ((object[])scriptResult[1]).Cast<string>().ToList();
|
||||
var scores = ((object[])scriptResult[2]).Cast<string>().Select(decimal.Parse).ToList();
|
||||
var hashData = ((object[])scriptResult[3]).Cast<string>().ToList();
|
||||
|
||||
var validItems = members.AsParallel()
|
||||
.Select((m, i) =>
|
||||
{
|
||||
try { return BusJsonSerializer.Deserialize<T>(hashData[i]); }
|
||||
catch { return null; }
|
||||
})
|
||||
.Where(x => x != null)
|
||||
.ToList();
|
||||
|
||||
var hasNext = validItems.Count > pageSize;
|
||||
var actualItems = hasNext ? validItems.Take(pageSize) : validItems;
|
||||
|
||||
//分页锚点索引
|
||||
decimal? nextScore = null;
|
||||
string nextMember = null;
|
||||
if (hasNext && actualItems.Any())
|
||||
{
|
||||
var lastIndex = actualItems.Count() - 1; // 使用actualItems的最后一个索引
|
||||
nextScore = scores[lastIndex];
|
||||
nextMember = members[lastIndex];
|
||||
}
|
||||
|
||||
return new BusCacheGlobalPagedResult<T>
|
||||
{
|
||||
Items = actualItems.ToList(),
|
||||
HasNext = hasNext,
|
||||
NextScore = nextScore,
|
||||
NextMember = nextMember,
|
||||
TotalCount = await GetTotalCount(redisZSetScoresIndexCacheKey),
|
||||
PageSize = pageSize,
|
||||
};
|
||||
}
|
||||
|
||||
///// <summary>
|
||||
///// 通过集中器与表计信息排序索引获取数据
|
||||
///// </summary>
|
||||
///// <typeparam name="T"></typeparam>
|
||||
///// <param name="redisHashCacheKey">主数据存储Hash缓存Key</param>
|
||||
///// <param name="redisZSetScoresIndexCacheKey">ZSET索引缓存Key</param>
|
||||
///// <param name="pageSize">分页尺寸</param>
|
||||
///// <param name="lastScore">最后一个索引</param>
|
||||
///// <param name="lastMember">最后一个唯一标识</param>
|
||||
///// <param name="descending">排序方式</param>
|
||||
///// <returns></returns>
|
||||
//public async Task<BusCacheGlobalPagedResult<T>> GetAllPagedData<T>(
|
||||
//string redisHashCacheKey,
|
||||
//string redisZSetScoresIndexCacheKey,
|
||||
//int pageSize = 1000,
|
||||
//decimal? lastScore = null,
|
||||
//string lastMember = null,
|
||||
//bool descending = true)
|
||||
//where T : DeviceCacheBasicModel
|
||||
//{
|
||||
// // 参数校验增强
|
||||
// if (string.IsNullOrWhiteSpace(redisHashCacheKey) || string.IsNullOrWhiteSpace(redisZSetScoresIndexCacheKey))
|
||||
// {
|
||||
// _logger.LogError($"{nameof(GetAllPagedData)} 参数异常,-101");
|
||||
// return null;
|
||||
// }
|
||||
|
||||
// if (pageSize < 1 || pageSize > 10000)
|
||||
// {
|
||||
// _logger.LogError($"{nameof(GetAllPagedData)} 分页大小应在1-10000之间,-102");
|
||||
// return null;
|
||||
// }
|
||||
|
||||
// //// 分页参数解析
|
||||
// //var (startScore, excludeMember) = descending
|
||||
// // ? (lastScore ?? decimal.MaxValue, lastMember)
|
||||
// // : (lastScore ?? 0, lastMember);
|
||||
|
||||
// //执行分页查询(整合游标处理)
|
||||
// var pageResult = await GetPagedMembers(
|
||||
// redisZSetScoresIndexCacheKey,
|
||||
// pageSize,
|
||||
// lastScore,
|
||||
// lastMember,
|
||||
// descending);
|
||||
|
||||
// // 批量获取数据(优化内存分配)
|
||||
// var dataDict = await BatchGetData<T>(redisHashCacheKey, pageResult.Members);
|
||||
|
||||
// return new BusCacheGlobalPagedResult<T>
|
||||
// {
|
||||
// Items = pageResult.Members.Select(m => dataDict.TryGetValue(m, out var v) ? v : default)
|
||||
// .Where(x => x != null).ToList(),
|
||||
// HasNext = pageResult.HasNext,
|
||||
// NextScore = pageResult.NextScore,
|
||||
// NextMember = pageResult.NextMember,
|
||||
// TotalCount = await GetTotalCount(redisZSetScoresIndexCacheKey),
|
||||
// PageSize = pageSize,
|
||||
// };
|
||||
//}
|
||||
|
||||
/// <summary>
|
||||
/// 游标分页查询
|
||||
/// </summary>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">排序索引ZSET缓存Key</param>
|
||||
/// <param name="pageSize">分页数量</param>
|
||||
/// <param name="lastScore">上一个索引</param>
|
||||
/// <param name="lastMember">上一个标识</param>
|
||||
/// <param name="descending">排序方式</param>
|
||||
/// <returns></returns>
|
||||
private async Task<(List<string> Members, bool HasNext, decimal? NextScore, string NextMember)> GetPagedMembers(
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
int pageSize,
|
||||
decimal? lastScore,
|
||||
string lastMember,
|
||||
bool descending)
|
||||
{
|
||||
// 根据排序方向初始化参数
|
||||
long initialScore = descending ? long.MaxValue : 0;
|
||||
decimal? currentScore = lastScore ?? initialScore;
|
||||
string currentMember = lastMember;
|
||||
var members = new List<string>(pageSize + 1);
|
||||
|
||||
// 使用游标分页查询
|
||||
while (members.Count < pageSize + 1 && currentScore.HasValue)
|
||||
{
|
||||
var (batch, hasMore) = await GetNextBatch(
|
||||
redisZSetScoresIndexCacheKey,
|
||||
pageSize + 1 - members.Count,
|
||||
currentScore.Value,
|
||||
currentMember,
|
||||
descending);
|
||||
|
||||
if (!batch.Any()) break;
|
||||
|
||||
members.AddRange(batch);
|
||||
|
||||
// 更新游标
|
||||
currentMember = batch.LastOrDefault();
|
||||
currentScore = await GetNextScore(redisZSetScoresIndexCacheKey, currentMember, descending);
|
||||
}
|
||||
|
||||
// 处理分页结果
|
||||
bool hasNext = members.Count > pageSize;
|
||||
var resultMembers = members.Take(pageSize).ToList();
|
||||
|
||||
return (
|
||||
resultMembers,
|
||||
hasNext,
|
||||
currentScore,
|
||||
currentMember
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 批量获取指定分页的数据
|
||||
/// </summary>
|
||||
/// <typeparam name="T"></typeparam>
|
||||
/// <param name="redisHashCacheKey">Hash表缓存key</param>
|
||||
/// <param name="members">Hash表字段集合</param>
|
||||
/// <returns></returns>
|
||||
private async Task<Dictionary<string, T>> BatchGetData<T>(
|
||||
string redisHashCacheKey,
|
||||
IEnumerable<string> members)
|
||||
where T : DeviceCacheBasicModel
|
||||
{
|
||||
using var pipe = Instance.StartPipe();
|
||||
|
||||
foreach (var member in members)
|
||||
{
|
||||
pipe.HGet<T>(redisHashCacheKey, member);
|
||||
}
|
||||
|
||||
var results = pipe.EndPipe();
|
||||
return await Task.FromResult(members.Zip(results, (k, v) => new { k, v })
|
||||
.ToDictionary(x => x.k, x => (T)x.v));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 处理下一个分页数据
|
||||
/// </summary>
|
||||
/// <param name="zsetKey"></param>
|
||||
/// <param name="limit"></param>
|
||||
/// <param name="score"></param>
|
||||
/// <param name="excludeMember"></param>
|
||||
/// <param name="descending"></param>
|
||||
/// <returns></returns>
|
||||
private async Task<(string[] Batch, bool HasMore)> GetNextBatch(
|
||||
string zsetKey,
|
||||
int limit,
|
||||
decimal score,
|
||||
string excludeMember,
|
||||
bool descending)
|
||||
{
|
||||
var query = descending
|
||||
? await Instance.ZRevRangeByScoreAsync(
|
||||
zsetKey,
|
||||
max: score,
|
||||
min: 0,
|
||||
offset: 0,
|
||||
count: limit)
|
||||
: await Instance.ZRangeByScoreAsync(
|
||||
zsetKey,
|
||||
min: score,
|
||||
max: long.MaxValue,
|
||||
offset: 0,
|
||||
count: limit);
|
||||
|
||||
return (query, query.Length >= limit);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获取下一页游标
|
||||
/// </summary>
|
||||
/// <param name="redisZSetScoresIndexCacheKey">排序索引ZSET缓存Key</param>
|
||||
/// <param name="lastMember">最后一个唯一标识</param>
|
||||
/// <param name="descending">排序方式</param>
|
||||
/// <returns></returns>
|
||||
private async Task<decimal?> GetNextScore(
|
||||
string redisZSetScoresIndexCacheKey,
|
||||
string lastMember,
|
||||
bool descending)
|
||||
{
|
||||
if (string.IsNullOrEmpty(lastMember)) return null;
|
||||
|
||||
var score = await Instance.ZScoreAsync(redisZSetScoresIndexCacheKey, lastMember);
|
||||
if (!score.HasValue) return null;
|
||||
|
||||
// 根据排序方向调整score
|
||||
return descending
|
||||
? score.Value - 1 // 降序时下页查询小于当前score
|
||||
: score.Value + 1; // 升序时下页查询大于当前score
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获取指定ZSET区间内的总数量
|
||||
/// </summary>
|
||||
/// <param name="zsetKey"></param>
|
||||
/// <param name="min"></param>
|
||||
/// <param name="max"></param>
|
||||
/// <returns></returns>
|
||||
public async Task<long> GetCount(string zsetKey, long min, long max)
|
||||
{
|
||||
// 缓存计数优化
|
||||
var cacheKey = $"{zsetKey}_count_{min}_{max}";
|
||||
var cached = await Instance.GetAsync<long?>(cacheKey);
|
||||
|
||||
if (cached.HasValue)
|
||||
return cached.Value;
|
||||
|
||||
var count = await Instance.ZCountAsync(zsetKey, min, max);
|
||||
await Instance.SetExAsync(cacheKey, 60, count); // 缓存60秒
|
||||
return count;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获取指定ZSET的总数量
|
||||
/// </summary>
|
||||
/// <param name="redisZSetScoresIndexCacheKey"></param>
|
||||
/// <returns></returns>
|
||||
private async Task<long> GetTotalCount(string redisZSetScoresIndexCacheKey)
|
||||
{
|
||||
// 缓存计数优化
|
||||
var cacheKey = $"{redisZSetScoresIndexCacheKey}_total_count";
|
||||
var cached = await Instance.GetAsync<long?>(cacheKey);
|
||||
|
||||
if (cached.HasValue)
|
||||
return cached.Value;
|
||||
|
||||
var count = await Instance.ZCountAsync(redisZSetScoresIndexCacheKey, 0, decimal.MaxValue);
|
||||
await Instance.SetExAsync(cacheKey, 30, count); // 缓存30秒
|
||||
return count;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,290 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using Apache.IoTDB.DataStructure;
|
||||
using Apache.IoTDB;
|
||||
using Confluent.Kafka;
|
||||
using JiShe.CollectBus.Ammeters;
|
||||
using JiShe.CollectBus.FreeSql;
|
||||
using JiShe.CollectBus.IotSystems.PrepayModel;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using JiShe.CollectBus.IotSystems.AFNEntity;
|
||||
using JiShe.CollectBus.Protocol.Contracts.Interfaces;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.Common.Enums;
|
||||
using System.Diagnostics.Metrics;
|
||||
using JiShe.CollectBus.Common.DeviceBalanceControl;
|
||||
using JiShe.CollectBus.Kafka.Attributes;
|
||||
using System.Text.Json;
|
||||
using JiShe.CollectBus.Kafka;
|
||||
using JiShe.CollectBus.Application.Contracts;
|
||||
using JiShe.CollectBus.Common.Models;
|
||||
using System.Diagnostics;
|
||||
using JiShe.CollectBus.IoTDB.Context;
|
||||
using JiShe.CollectBus.IoTDB.Interface;
|
||||
using JiShe.CollectBus.IoTDB.Options;
|
||||
|
||||
namespace JiShe.CollectBus.Samples;
|
||||
|
||||
public class SampleAppService : CollectBusAppService, ISampleAppService, IKafkaSubscribe
|
||||
{
|
||||
private readonly ILogger<SampleAppService> _logger;
|
||||
private readonly IIoTDBProvider _iotDBProvider;
|
||||
private readonly IoTDBRuntimeContext _dbContext;
|
||||
private readonly IoTDBOptions _options;
|
||||
private readonly IRedisDataCacheService _redisDataCacheService;
|
||||
|
||||
public SampleAppService(IIoTDBProvider iotDBProvider, IOptions<IoTDBOptions> options,
|
||||
IoTDBRuntimeContext dbContext, ILogger<SampleAppService> logger, IRedisDataCacheService redisDataCacheService)
|
||||
{
|
||||
_iotDBProvider = iotDBProvider;
|
||||
_options = options.Value;
|
||||
_dbContext = dbContext;
|
||||
_logger = logger;
|
||||
_redisDataCacheService = redisDataCacheService;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 测试 UseSessionPool
|
||||
/// </summary>
|
||||
/// <param name="timestamps"></param>
|
||||
/// <returns></returns>
|
||||
[HttpGet]
|
||||
public async Task UseSessionPool(long timestamps)
|
||||
{
|
||||
string? messageHexString = null;
|
||||
if (timestamps == 0)
|
||||
{
|
||||
timestamps = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds();
|
||||
_logger.LogError($"timestamps_{timestamps}");
|
||||
}
|
||||
else
|
||||
{
|
||||
messageHexString = messageHexString + timestamps;
|
||||
}
|
||||
|
||||
ElectricityMeter meter = new ElectricityMeter()
|
||||
{
|
||||
SystemName = "energy",
|
||||
DeviceId = "402440506",
|
||||
DeviceType = "Ammeter",
|
||||
Current = 10,
|
||||
MeterModel = "DDZY-1980",
|
||||
ProjectCode = "10059",
|
||||
Voltage = 10,
|
||||
IssuedMessageHexString = messageHexString,
|
||||
Timestamps = timestamps,
|
||||
};
|
||||
await _iotDBProvider.InsertAsync(meter);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 测试Session切换
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
[HttpGet]
|
||||
public async Task UseTableSessionPool()
|
||||
{
|
||||
ElectricityMeter meter2 = new ElectricityMeter()
|
||||
{
|
||||
SystemName = "energy",
|
||||
DeviceId = "402440506",
|
||||
DeviceType = "Ammeter",
|
||||
Current = 10,
|
||||
MeterModel = "DDZY-1980",
|
||||
ProjectCode = "10059",
|
||||
Voltage = 10,
|
||||
Timestamps = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds(),
|
||||
};
|
||||
|
||||
await _iotDBProvider.InsertAsync(meter2);
|
||||
|
||||
_dbContext.UseTableSessionPool = true;
|
||||
|
||||
ElectricityMeter meter = new ElectricityMeter()
|
||||
{
|
||||
SystemName = "energy",
|
||||
DeviceId = "402440506",
|
||||
DeviceType = "Ammeter",
|
||||
Current = 10,
|
||||
MeterModel = "DDZY-1980",
|
||||
ProjectCode = "10059",
|
||||
Voltage = 10,
|
||||
Timestamps = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds(),
|
||||
};
|
||||
await _iotDBProvider.InsertAsync(meter);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 测试设备分组均衡控制算法
|
||||
/// </summary>
|
||||
/// <param name="deviceCount"></param>
|
||||
/// <returns></returns>
|
||||
[HttpGet]
|
||||
public async Task TestDeviceGroupBalanceControl(int deviceCount = 200000)
|
||||
{
|
||||
//var deviceList = new List<string>();
|
||||
//for (int i = 0; i < deviceCount; i++)
|
||||
//{
|
||||
// deviceList.Add($"Device_{Guid.NewGuid()}");
|
||||
//}
|
||||
|
||||
//// 初始化缓存
|
||||
//DeviceGroupBalanceControl.InitializeCache(deviceList);
|
||||
|
||||
var timeDensity = "15";
|
||||
//获取缓存中的电表信息
|
||||
var redisKeyList = $"{string.Format(RedisConst.CacheMeterInfoHashKey, "Energy", "JiSheCollectBus", MeterTypeEnum.Ammeter.ToString(), timeDensity)}*";
|
||||
|
||||
var oneMinutekeyList = await FreeRedisProvider.Instance.KeysAsync(redisKeyList);
|
||||
var meterInfos = await GetMeterRedisCacheListData<AmmeterInfo>(oneMinutekeyList, "Energy", "JiSheCollectBus", timeDensity, MeterTypeEnum.Ammeter);
|
||||
List<string> focusAddressDataLista = new List<string>();
|
||||
foreach (var item in meterInfos)
|
||||
{
|
||||
focusAddressDataLista.Add(item.FocusAddress);
|
||||
}
|
||||
|
||||
DeviceGroupBalanceControl.InitializeCache(focusAddressDataLista);
|
||||
|
||||
// 打印分布统计
|
||||
DeviceGroupBalanceControl.PrintDistributionStats();
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 测试设备分组均衡控制算法获取分组Id
|
||||
/// </summary>
|
||||
/// <param name="deviceAddress"></param>
|
||||
/// <returns></returns>
|
||||
[HttpGet]
|
||||
public async Task TestGetDeviceGroupBalanceControl(string deviceAddress)
|
||||
{
|
||||
var groupId = DeviceGroupBalanceControl.GetDeviceGroupId(deviceAddress);
|
||||
Console.WriteLine(groupId);
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 测试单个测点数据项
|
||||
/// </summary>
|
||||
/// <param name="measuring"></param>
|
||||
/// <returns></returns>
|
||||
[HttpGet]
|
||||
public async Task TestSingleMeasuringAFNData(string measuring, string value)
|
||||
{
|
||||
var meter = new SingleMeasuringAFNDataEntity<string>()
|
||||
{
|
||||
SystemName = "energy",
|
||||
DeviceId = "402440506",
|
||||
DeviceType = "Ammeter",
|
||||
ProjectCode = "10059",
|
||||
Timestamps = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds(),
|
||||
SingleMeasuring = new Tuple<string, string>(measuring, value)
|
||||
};
|
||||
await _iotDBProvider.InsertAsync(meter);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 测试Redis批量读取10万条数据性能
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
[HttpGet]
|
||||
public async Task TestRedisCacheGetAllPagedData()
|
||||
{
|
||||
var timeDensity = "15";
|
||||
string SystemType = "Energy";
|
||||
string ServerTagName = "JiSheCollectBus2";
|
||||
var redisCacheMeterInfoHashKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoHashKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
|
||||
var redisCacheMeterInfoSetIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoSetIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
|
||||
var redisCacheMeterInfoZSetScoresIndexKeyTemp = $"{string.Format(RedisConst.CacheMeterInfoZSetScoresIndexKey, SystemType, ServerTagName, MeterTypeEnum.Ammeter, timeDensity)}";
|
||||
|
||||
var timer1 = Stopwatch.StartNew();
|
||||
decimal? cursor = null;
|
||||
string member = null;
|
||||
bool hasNext;
|
||||
List<AmmeterInfo> meterInfos = new List<AmmeterInfo>();
|
||||
do
|
||||
{
|
||||
var page = await _redisDataCacheService.GetAllPagedData<AmmeterInfo>(
|
||||
redisCacheMeterInfoHashKeyTemp,
|
||||
redisCacheMeterInfoZSetScoresIndexKeyTemp,
|
||||
pageSize: 1000,
|
||||
lastScore: cursor,
|
||||
lastMember: member);
|
||||
|
||||
meterInfos.AddRange(page.Items);
|
||||
cursor = page.HasNext ? page.NextScore : null;
|
||||
member = page.HasNext ? page.NextMember : null;
|
||||
hasNext = page.HasNext;
|
||||
} while (hasNext);
|
||||
|
||||
timer1.Stop();
|
||||
_logger.LogError($"读取数据更花费时间{timer1.ElapsedMilliseconds}毫秒");
|
||||
|
||||
List<string> focusAddressDataLista = new List<string>();
|
||||
foreach (var item in meterInfos)
|
||||
{
|
||||
focusAddressDataLista.Add(item.FocusAddress);
|
||||
}
|
||||
|
||||
DeviceGroupBalanceControl.InitializeCache(focusAddressDataLista);
|
||||
|
||||
// 打印分布统计
|
||||
DeviceGroupBalanceControl.PrintDistributionStats();
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
|
||||
public Task<SampleDto> GetAsync()
|
||||
{
|
||||
return Task.FromResult(
|
||||
new SampleDto
|
||||
{
|
||||
Value = 42
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
[Authorize]
|
||||
public Task<SampleDto> GetAuthorizedAsync()
|
||||
{
|
||||
return Task.FromResult(
|
||||
new SampleDto
|
||||
{
|
||||
Value = 42
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
[AllowAnonymous]
|
||||
public async Task<List<Vi_BaseAmmeterInfo>> Test()
|
||||
{
|
||||
|
||||
var ammeterList = await SqlProvider.Instance.Change(DbEnum.PrepayDB).Select<Vi_BaseAmmeterInfo>().Where(d => d.TB_CustomerID == 5).Take(10).ToListAsync();
|
||||
return ammeterList;
|
||||
}
|
||||
|
||||
[AllowAnonymous]
|
||||
public bool GetTestProtocol()
|
||||
{
|
||||
var aa = LazyServiceProvider.GetKeyedService<IProtocolPlugin>("TestProtocolPlugin");
|
||||
return aa == null;
|
||||
}
|
||||
|
||||
[KafkaSubscribe(ProtocolConst.TESTTOPIC)]
|
||||
|
||||
public async Task<ISubscribeAck> KafkaSubscribeAsync(object obj)
|
||||
{
|
||||
_logger.LogWarning($"收到订阅消息: {obj}");
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,125 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using Apache.IoTDB.DataStructure;
|
||||
using Apache.IoTDB;
|
||||
using Confluent.Kafka;
|
||||
using JiShe.CollectBus.Ammeters;
|
||||
using JiShe.CollectBus.FreeSql;
|
||||
using JiShe.CollectBus.IotSystems.PrepayModel;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using JiShe.CollectBus.Common.Helpers;
|
||||
using JiShe.CollectBus.IotSystems.AFNEntity;
|
||||
using JiShe.CollectBus.Protocol.Contracts.Interfaces;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using JiShe.CollectBus.Cassandra;
|
||||
using JiShe.CollectBus.Common.Enums;
|
||||
using JiShe.CollectBus.Common.Extensions;
|
||||
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
||||
using Volo.Abp.Application.Services;
|
||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||
using Volo.Abp.Domain.Repositories;
|
||||
using System.Diagnostics;
|
||||
using System.Linq;
|
||||
using Cassandra;
|
||||
|
||||
namespace JiShe.CollectBus.Samples;
|
||||
|
||||
[AllowAnonymous]
|
||||
public class TestAppService : CollectBusAppService
|
||||
{
|
||||
private readonly ILogger<TestAppService> _logger;
|
||||
private readonly ICassandraRepository<MessageIssued, string> _messageReceivedCassandraRepository;
|
||||
private readonly ICassandraProvider _cassandraProvider;
|
||||
|
||||
|
||||
|
||||
public TestAppService(
|
||||
ILogger<TestAppService> logger,
|
||||
ICassandraRepository<MessageIssued, string> messageReceivedCassandraRepository, ICassandraProvider cassandraProvider)
|
||||
{
|
||||
_logger = logger;
|
||||
_messageReceivedCassandraRepository = messageReceivedCassandraRepository;
|
||||
_cassandraProvider = cassandraProvider;
|
||||
}
|
||||
public async Task AddMessageOfCassandra()
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
for (int i = 1; i <= 10000; i++)
|
||||
{
|
||||
var str = Guid.NewGuid().ToString();
|
||||
await _messageReceivedCassandraRepository.InsertAsync(new MessageIssued
|
||||
{
|
||||
ClientId = str,
|
||||
DeviceNo = i.ToString(),
|
||||
MessageId = str,
|
||||
Type = IssuedEventType.Data,
|
||||
Id = str,
|
||||
Message = str.GetBytes()
|
||||
});
|
||||
}
|
||||
stopwatch.Stop();
|
||||
_logger.LogWarning($"插入 {10000} 条记录完成,耗时: {stopwatch.ElapsedMilliseconds} 毫秒");
|
||||
}
|
||||
|
||||
public async Task AddMessageOfBulkInsertCassandra()
|
||||
{
|
||||
var records = new List<MessageIssued>();
|
||||
var prepared = await _cassandraProvider.Session.PrepareAsync(
|
||||
$"INSERT INTO {_cassandraProvider.CassandraConfig.Keyspace}.{nameof(MessageIssued)} (id, clientid, message, deviceno,type,messageid) VALUES (?, ?, ?, ?, ?, ?)");
|
||||
|
||||
for (int i = 1; i <= 100000; i++)
|
||||
{
|
||||
var str = Guid.NewGuid().ToString();
|
||||
records.Add(new MessageIssued
|
||||
{
|
||||
ClientId = str,
|
||||
DeviceNo = i.ToString(),
|
||||
MessageId = str,
|
||||
Type = IssuedEventType.Data,
|
||||
Id = str,
|
||||
Message = str.GetBytes()
|
||||
});
|
||||
}
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
await BulkInsertAsync(_cassandraProvider.Session, prepared, records);
|
||||
stopwatch.Stop();
|
||||
_logger.LogWarning($"插入 {100000} 条记录完成,耗时: {stopwatch.ElapsedMilliseconds} 毫秒");
|
||||
}
|
||||
|
||||
private static async Task BulkInsertAsync(ISession session, PreparedStatement prepared, List<MessageIssued> records)
|
||||
{
|
||||
var tasks = new List<Task>();
|
||||
var batch = new BatchStatement();
|
||||
|
||||
for (int i = 0; i < records.Count; i++)
|
||||
{
|
||||
var record = records[i];
|
||||
var boundStatement = prepared.Bind(
|
||||
record.Id,
|
||||
record.ClientId,
|
||||
record.Message,
|
||||
record.DeviceNo,
|
||||
(int)record.Type,
|
||||
record.MessageId);
|
||||
|
||||
// 设置一致性级别为ONE以提高性能
|
||||
boundStatement.SetConsistencyLevel(ConsistencyLevel.One);
|
||||
|
||||
batch.Add(boundStatement);
|
||||
|
||||
// 当达到批处理大小时执行
|
||||
if (batch.Statements.Count() >= 1000 || i == records.Count - 1)
|
||||
{
|
||||
tasks.Add(session.ExecuteAsync(batch));
|
||||
batch = new BatchStatement();
|
||||
}
|
||||
}
|
||||
|
||||
// 等待所有批处理完成
|
||||
await Task.WhenAll(tasks);
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,216 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using Confluent.Kafka;
|
||||
using DotNetCore.CAP;
|
||||
using JiShe.CollectBus.Ammeters;
|
||||
using JiShe.CollectBus.Application.Contracts;
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.Common.DeviceBalanceControl;
|
||||
using JiShe.CollectBus.Common.Helpers;
|
||||
using JiShe.CollectBus.FreeSql;
|
||||
using JiShe.CollectBus.GatherItem;
|
||||
using JiShe.CollectBus.IoTDB.Interface;
|
||||
using JiShe.CollectBus.IotSystems.Devices;
|
||||
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||
using JiShe.CollectBus.IotSystems.Watermeter;
|
||||
using JiShe.CollectBus.Kafka;
|
||||
using JiShe.CollectBus.Kafka.Producer;
|
||||
using JiShe.CollectBus.Repository;
|
||||
using JiShe.CollectBus.Repository.MeterReadingRecord;
|
||||
using MassTransit;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Volo.Abp.Domain.Repositories;
|
||||
using Volo.Abp.Uow;
|
||||
|
||||
namespace JiShe.CollectBus.ScheduledMeterReading
|
||||
{
|
||||
/// <summary>
|
||||
/// 能耗系统定时采集服务
|
||||
/// </summary>
|
||||
[AllowAnonymous]
|
||||
//[Route($"/energy/app/scheduled")]
|
||||
public class EnergySystemScheduledMeterReadingService : BasicScheduledMeterReadingService
|
||||
{
|
||||
string serverTagName = string.Empty;
|
||||
public EnergySystemScheduledMeterReadingService(
|
||||
ILogger<EnergySystemScheduledMeterReadingService> logger,
|
||||
IIoTDBProvider dbProvider,
|
||||
IMeterReadingRecordRepository meterReadingRecordRepository,
|
||||
IOptions<KafkaOptionConfig> kafkaOptions,
|
||||
IProducerService producerService,
|
||||
IRedisDataCacheService redisDataCacheService)
|
||||
: base(logger,
|
||||
meterReadingRecordRepository,
|
||||
producerService,
|
||||
redisDataCacheService,
|
||||
dbProvider,
|
||||
kafkaOptions)
|
||||
{
|
||||
serverTagName = kafkaOptions.Value.ServerTagName;
|
||||
}
|
||||
|
||||
public sealed override string SystemType => SystemTypeConst.Energy;
|
||||
|
||||
public sealed override string ServerTagName => serverTagName;
|
||||
|
||||
/// <summary>
|
||||
/// 获取采集项列表
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public override async Task<List<GatherItemInfo>> GetGatherItemByDataTypes()
|
||||
{
|
||||
try
|
||||
{
|
||||
string sql = $"SELECT DataType,ItemCode FROM TB_GatherItem(NOLOCK) WHERE [State]=0";
|
||||
return await SqlProvider.Instance.Change(DbEnum.EnergyDB)
|
||||
.Ado
|
||||
.QueryAsync<GatherItemInfo>(sql, null);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获取电表信息
|
||||
/// </summary>
|
||||
/// <param name="gatherCode">采集端Code</param>
|
||||
/// <returns></returns>
|
||||
//[HttpGet]
|
||||
//[Route($"ammeter/list")]
|
||||
public override async Task<List<AmmeterInfo>> GetAmmeterInfoList(string gatherCode = "V4-Gather-8890")
|
||||
{
|
||||
|
||||
//List<AmmeterInfo> ammeterInfos = new List<AmmeterInfo>();
|
||||
//ammeterInfos.Add(new AmmeterInfo()
|
||||
//{
|
||||
// Baudrate = 2400,
|
||||
// FocusAddress = "402440506",
|
||||
// Name = "张家祠工务(三相电表)",
|
||||
// FocusId = 95780,
|
||||
// DatabaseBusiID = 1,
|
||||
// MeteringCode = 1,
|
||||
// AmmerterAddress = "402410040506",
|
||||
// MeterId = 127035,
|
||||
// TypeName = 3,
|
||||
// DataTypes = "449,503,581,582,583,584,585,586,587,588,589,590,591,592,593,594,597,598,599,600,601,602,603,604,605,606,607,608,661,663,677,679",
|
||||
// TimeDensity = 15,
|
||||
//});
|
||||
//ammeterInfos.Add(new AmmeterInfo()
|
||||
//{
|
||||
// Baudrate = 2400,
|
||||
// FocusAddress = "542400504",
|
||||
// Name = "五号配(长芦二所四排)(单相电表)",
|
||||
// FocusId = 69280,
|
||||
// DatabaseBusiID = 1,
|
||||
// MeteringCode = 2,
|
||||
// AmmerterAddress = "542410000504",
|
||||
// MeterId = 95594,
|
||||
// TypeName = 1,
|
||||
// DataTypes = "581,589,592,597,601",
|
||||
// TimeDensity = 15,
|
||||
//});
|
||||
|
||||
//return ammeterInfos;
|
||||
|
||||
string sql = $@"SELECT C.ID as MeterId,C.Name,C.FocusID as FocusId,C.SingleRate,C.MeteringCode,C.Code AS BrandType,C.Baudrate,C.Password,C.MeteringPort,C.[Address] AS AmmerterAddress,C.TypeName,C.Protocol,C.TripState,C.[State],B.[Address],B.AreaCode,B.AutomaticReport,D.DataTypes,B.TimeDensity,A.GatherCode,C.Special,C.[ProjectID],B.AbnormalState,B.LastTime,CONCAT(B.AreaCode, B.[Address]) AS FocusAddress,(select top 1 DatabaseBusiID from TB_Project where ID = B.ProjectID) AS DatabaseBusiID
|
||||
FROM TB_GatherInfo(NOLOCK) AS A
|
||||
INNER JOIN TB_FocusInfo(NOLOCK) AS B ON A.ID = B.GatherInfoID AND B.RemoveState >= 0 AND B.State>=0
|
||||
INNER JOIN TB_AmmeterInfo(NOLOCK) AS C ON B.ID = C.FocusID AND C.State>= 0 AND C.State<100
|
||||
INNER JOIN TB_AmmeterGatherItem(NOLOCK) AS D ON C.ID = D.AmmeterID AND D.State>=0
|
||||
WHERE 1=1 and C.Special = 0 ";
|
||||
//TODO 记得移除特殊表过滤
|
||||
|
||||
//if (!string.IsNullOrWhiteSpace(gatherCode))
|
||||
//{
|
||||
// sql = $@"{sql} AND A.GatherCode = '{gatherCode}'";
|
||||
//}
|
||||
return await SqlProvider.Instance.Change(DbEnum.EnergyDB)
|
||||
.Ado
|
||||
.QueryAsync<AmmeterInfo>(sql);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 获取水表信息
|
||||
/// </summary>
|
||||
/// <param name="gatherCode">采集端Code</param>
|
||||
/// <returns></returns>
|
||||
//[HttpGet]
|
||||
//[Route($"ammeter/list")]
|
||||
public override async Task<List<WatermeterInfo>> GetWatermeterInfoList(string gatherCode = "V4-Gather-8890")
|
||||
{
|
||||
string sql = $@"SELECT
|
||||
A.ID as MeterId,
|
||||
A.Name,
|
||||
A.FocusID as FocusId,
|
||||
A.MeteringCode,
|
||||
A.Baudrate,
|
||||
A.MeteringPort,
|
||||
A.[Address] AS MeterAddress,
|
||||
A.[Password],
|
||||
A.TypeName,
|
||||
A.Protocol,
|
||||
A.Code,
|
||||
A.LinkType,
|
||||
A.HaveValve,
|
||||
A.MeterType AS MeterTypeName,
|
||||
A.MeterBrand,
|
||||
A.TimesRate,
|
||||
A.TimeDensity,
|
||||
A.TripState,
|
||||
B.[Address],
|
||||
B.AreaCode,
|
||||
B.AutomaticReport,
|
||||
A.[State],
|
||||
C.GatherCode,
|
||||
A.[ProjectID],
|
||||
B.AbnormalState,
|
||||
B.LastTime,
|
||||
CONCAT(B.AreaCode, B.[Address]) AS FocusAddress,
|
||||
(select top 1 DatabaseBusiID from TB_Project where ID = b.ProjectID) AS DatabaseBusiID
|
||||
FROM [dbo].[TB_WatermeterInfo](NOLOCK) AS A
|
||||
INNER JOIN [dbo].[TB_FocusInfo](NOLOCK) AS B ON A.FocusID=B.ID AND B.RemoveState >= 0 AND B.State>=0
|
||||
INNER JOIN [dbo].[TB_GatherInfo](NOLOCK) AS C ON B.GatherInfoID=C.ID
|
||||
WHERE A.State>=0 AND A.State<100 ";
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(gatherCode))
|
||||
{
|
||||
sql = $@"{sql} AND C.GatherCode= '{gatherCode}'";
|
||||
}
|
||||
return await SqlProvider.Instance.Change(DbEnum.EnergyDB)
|
||||
.Ado
|
||||
.QueryAsync<WatermeterInfo>(sql);
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 测试设备分组均衡控制算法
|
||||
/// </summary>
|
||||
/// <param name="deviceCount"></param>
|
||||
/// <returns></returns>
|
||||
[HttpGet]
|
||||
public async Task TestDeviceGroupBalanceControl(int deviceCount = 200000)
|
||||
{
|
||||
var deviceList = new List<string>();
|
||||
for (int i = 0; i < deviceCount; i++)
|
||||
{
|
||||
deviceList.Add($"Device_{Guid.NewGuid()}");
|
||||
}
|
||||
|
||||
// 初始化缓存
|
||||
DeviceGroupBalanceControl.InitializeCache(deviceList);
|
||||
|
||||
// 打印分布统计
|
||||
DeviceGroupBalanceControl.PrintDistributionStats();
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,224 +0,0 @@
|
||||
using DotNetCore.CAP;
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.Common.Enums;
|
||||
using JiShe.CollectBus.Common.Helpers;
|
||||
using JiShe.CollectBus.Common.Models;
|
||||
using JiShe.CollectBus.IotSystems.Devices;
|
||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||
using JiShe.CollectBus.Kafka;
|
||||
using JiShe.CollectBus.Kafka.Attributes;
|
||||
using JiShe.CollectBus.Protocol.Contracts;
|
||||
using JiShe.CollectBus.Protocol.Contracts.Interfaces;
|
||||
using JiShe.CollectBus.Protocol.Contracts.Models;
|
||||
using JiShe.CollectBus.Repository.MeterReadingRecord;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using JiShe.CollectBus.IoTDB.Interface;
|
||||
using TouchSocket.Sockets;
|
||||
using Volo.Abp.Domain.Repositories;
|
||||
|
||||
namespace JiShe.CollectBus.Subscribers
|
||||
{
|
||||
public class SubscriberAppService : CollectBusAppService, ISubscriberAppService, ICapSubscribe, IKafkaSubscribe
|
||||
{
|
||||
private readonly ILogger<SubscriberAppService> _logger;
|
||||
private readonly ITcpService _tcpService;
|
||||
private readonly IServiceProvider _serviceProvider;
|
||||
private readonly IRepository<MessageReceivedLogin, Guid> _messageReceivedLoginEventRepository;
|
||||
private readonly IRepository<MessageReceivedHeartbeat, Guid> _messageReceivedHeartbeatEventRepository;
|
||||
private readonly IRepository<MessageReceived, Guid> _messageReceivedEventRepository;
|
||||
private readonly IRepository<Device, Guid> _deviceRepository;
|
||||
private readonly IMeterReadingRecordRepository _meterReadingRecordsRepository;
|
||||
private readonly IIoTDBProvider _dbProvider;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="SubscriberAppService"/> class.
|
||||
/// </summary>
|
||||
/// <param name="logger">The logger.</param>
|
||||
/// <param name="tcpService">The TCP service.</param>
|
||||
/// <param name="serviceProvider">The service provider.</param>
|
||||
/// <param name="messageReceivedLoginEventRepository">The message received login event repository.</param>
|
||||
/// <param name="messageReceivedHeartbeatEventRepository">The message received heartbeat event repository.</param>
|
||||
/// <param name="messageReceivedEventRepository">The message received event repository.</param>
|
||||
/// <param name="deviceRepository">The device repository.</param>
|
||||
/// <param name="meterReadingRecordsRepository">The device repository.</param>
|
||||
public SubscriberAppService(ILogger<SubscriberAppService> logger,
|
||||
ITcpService tcpService, IServiceProvider serviceProvider,
|
||||
IRepository<MessageReceivedLogin, Guid> messageReceivedLoginEventRepository,
|
||||
IRepository<MessageReceivedHeartbeat, Guid> messageReceivedHeartbeatEventRepository,
|
||||
IRepository<MessageReceived, Guid> messageReceivedEventRepository,
|
||||
IRepository<Device, Guid> deviceRepository,
|
||||
IIoTDBProvider dbProvider,
|
||||
IMeterReadingRecordRepository meterReadingRecordsRepository)
|
||||
{
|
||||
_logger = logger;
|
||||
_tcpService = tcpService;
|
||||
_serviceProvider = serviceProvider;
|
||||
_messageReceivedLoginEventRepository = messageReceivedLoginEventRepository;
|
||||
_messageReceivedHeartbeatEventRepository = messageReceivedHeartbeatEventRepository;
|
||||
_messageReceivedEventRepository = messageReceivedEventRepository;
|
||||
_deviceRepository = deviceRepository;
|
||||
_meterReadingRecordsRepository = meterReadingRecordsRepository;
|
||||
_dbProvider = dbProvider;
|
||||
}
|
||||
|
||||
[KafkaSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.SubscriberLoginIssuedEventName)]
|
||||
public async Task<ISubscribeAck> LoginIssuedEvent(IssuedEventMessage issuedEventMessage)
|
||||
{
|
||||
bool isAck = false;
|
||||
switch (issuedEventMessage.Type)
|
||||
{
|
||||
case IssuedEventType.Heartbeat:
|
||||
break;
|
||||
case IssuedEventType.Login:
|
||||
_logger.LogWarning($"集中器地址{issuedEventMessage.ClientId} 登录回复下发内容:{issuedEventMessage.Serialize()}");
|
||||
var loginEntity = await _messageReceivedLoginEventRepository.GetAsync(a => a.MessageId == issuedEventMessage.MessageId);
|
||||
loginEntity.AckTime = Clock.Now;
|
||||
loginEntity.IsAck = true;
|
||||
await _messageReceivedLoginEventRepository.UpdateAsync(loginEntity);
|
||||
isAck = true;
|
||||
break;
|
||||
case IssuedEventType.Data:
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentOutOfRangeException();
|
||||
}
|
||||
|
||||
//var device = await _deviceRepository.FindAsync(a => a.Number == issuedEventMessage.DeviceNo);
|
||||
//if (device != null)
|
||||
//{
|
||||
// await _tcpService.SendAsync(device.ClientId, issuedEventMessage.Message);
|
||||
//}
|
||||
|
||||
await _tcpService.SendAsync(issuedEventMessage.ClientId, issuedEventMessage.Message);
|
||||
return isAck? SubscribeAck.Success(): SubscribeAck.Fail();
|
||||
}
|
||||
|
||||
[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.SubscriberHeartbeatIssuedEventName)]
|
||||
public async Task<ISubscribeAck> HeartbeatIssuedEvent(IssuedEventMessage issuedEventMessage)
|
||||
{
|
||||
bool isAck = false;
|
||||
switch (issuedEventMessage.Type)
|
||||
{
|
||||
case IssuedEventType.Heartbeat:
|
||||
_logger.LogWarning($"集中器地址{issuedEventMessage.ClientId} 心跳回复下发内容:{issuedEventMessage.Serialize()}");
|
||||
var heartbeatEntity = await _messageReceivedHeartbeatEventRepository.GetAsync(a => a.MessageId == issuedEventMessage.MessageId);
|
||||
heartbeatEntity.AckTime = Clock.Now;
|
||||
heartbeatEntity.IsAck = true;
|
||||
await _messageReceivedHeartbeatEventRepository.UpdateAsync(heartbeatEntity);
|
||||
isAck = true;
|
||||
break;
|
||||
case IssuedEventType.Data:
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentOutOfRangeException();
|
||||
}
|
||||
|
||||
//var device = await _deviceRepository.FindAsync(a => a.Number == issuedEventMessage.DeviceNo);
|
||||
//if (device != null)
|
||||
//{
|
||||
// await _tcpService.SendAsync(device.ClientId, issuedEventMessage.Message);
|
||||
//}
|
||||
|
||||
await _tcpService.SendAsync(issuedEventMessage.ClientId, issuedEventMessage.Message);
|
||||
return isAck ? SubscribeAck.Success() : SubscribeAck.Fail();
|
||||
}
|
||||
|
||||
[KafkaSubscribe(ProtocolConst.SubscriberReceivedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.SubscriberReceivedEventName)]
|
||||
public async Task<ISubscribeAck> ReceivedEvent(MessageReceived receivedMessage)
|
||||
{
|
||||
var currentTime = Clock.Now;
|
||||
|
||||
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
|
||||
if (protocolPlugin == null)
|
||||
{
|
||||
_logger.LogError("协议不存在!");
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
//todo 会根据不同的协议进行解析,然后做业务处理
|
||||
TB3761 fN = await protocolPlugin.AnalyzeAsync<TB3761>(receivedMessage);
|
||||
if(fN == null)
|
||||
{
|
||||
Logger.LogError($"数据解析失败:{receivedMessage.Serialize()}");
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
var tb3761FN = fN.FnList.FirstOrDefault();
|
||||
if (tb3761FN == null)
|
||||
{
|
||||
Logger.LogError($"数据解析失败:{receivedMessage.Serialize()}");
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
|
||||
//报文入库
|
||||
var entity = new MeterReadingRecords()
|
||||
{
|
||||
ReceivedMessageHexString = receivedMessage.MessageHexString,
|
||||
AFN = fN.Afn,
|
||||
Fn = tb3761FN.Fn,
|
||||
Pn = 0,
|
||||
FocusAddress = "",
|
||||
MeterAddress = "",
|
||||
};
|
||||
|
||||
//如果没数据,则插入,有数据则更新
|
||||
var updateEntity = await _meterReadingRecordsRepository.FirOrDefaultAsync(entity, currentTime);
|
||||
if (updateEntity == null)
|
||||
{
|
||||
await _meterReadingRecordsRepository.InsertAsync(entity, currentTime);
|
||||
}
|
||||
|
||||
|
||||
//_dbProvider.InsertAsync();
|
||||
//todo 查找是否有下发任务
|
||||
|
||||
//await _messageReceivedEventRepository.InsertAsync(receivedMessage);
|
||||
|
||||
|
||||
}
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
|
||||
[KafkaSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.SubscriberHeartbeatReceivedEventName)]
|
||||
public async Task<ISubscribeAck> ReceivedHeartbeatEvent(MessageReceivedHeartbeat receivedHeartbeatMessage)
|
||||
{
|
||||
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
|
||||
if (protocolPlugin == null)
|
||||
{
|
||||
_logger.LogError("协议不存在!");
|
||||
}
|
||||
else
|
||||
{
|
||||
await protocolPlugin.HeartbeatAsync(receivedHeartbeatMessage);
|
||||
await _messageReceivedHeartbeatEventRepository.InsertAsync(receivedHeartbeatMessage);
|
||||
}
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
|
||||
[KafkaSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.SubscriberLoginReceivedEventName)]
|
||||
public async Task<ISubscribeAck> ReceivedLoginEvent(MessageReceivedLogin receivedLoginMessage)
|
||||
{
|
||||
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
|
||||
if (protocolPlugin == null)
|
||||
{
|
||||
_logger.LogError("协议不存在!");
|
||||
}
|
||||
else
|
||||
{
|
||||
await protocolPlugin.LoginAsync(receivedLoginMessage);
|
||||
await _messageReceivedLoginEventRepository.InsertAsync(receivedLoginMessage);
|
||||
}
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,189 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using DeviceDetectorNET.Parser.Device;
|
||||
using DotNetCore.CAP;
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.Common.Enums;
|
||||
using JiShe.CollectBus.IotSystems.Devices;
|
||||
using JiShe.CollectBus.IotSystems.MessageIssueds;
|
||||
using JiShe.CollectBus.IotSystems.MessageReceiveds;
|
||||
using JiShe.CollectBus.IotSystems.MeterReadingRecords;
|
||||
using JiShe.CollectBus.Kafka;
|
||||
using JiShe.CollectBus.Kafka.Attributes;
|
||||
using JiShe.CollectBus.Protocol.Contracts;
|
||||
using JiShe.CollectBus.Protocol.Contracts.Interfaces;
|
||||
using JiShe.CollectBus.Repository.MeterReadingRecord;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using TouchSocket.Sockets;
|
||||
using Volo.Abp.Caching;
|
||||
using Volo.Abp.Domain.Repositories;
|
||||
|
||||
namespace JiShe.CollectBus.Subscribers
|
||||
{
|
||||
/// <summary>
|
||||
/// 定时抄读任务消息消费订阅
|
||||
/// </summary>
|
||||
[Route($"/worker/app/subscriber")]
|
||||
public class WorkerSubscriberAppService : CollectBusAppService, IWorkerSubscriberAppService, ICapSubscribe, IKafkaSubscribe
|
||||
{
|
||||
private readonly ILogger<WorkerSubscriberAppService> _logger;
|
||||
private readonly ITcpService _tcpService;
|
||||
private readonly IServiceProvider _serviceProvider;
|
||||
private readonly IRepository<Device, Guid> _deviceRepository;
|
||||
private readonly IMeterReadingRecordRepository _meterReadingRecordsRepository;
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="WorkerSubscriberAppService"/> class.
|
||||
/// </summary>
|
||||
/// <param name="logger">The logger.</param>
|
||||
/// <param name="tcpService">The TCP service.</param>
|
||||
/// <param name="deviceRepository">The Device pepository.</param>
|
||||
/// <param name="serviceProvider">The service provider.</param>
|
||||
public WorkerSubscriberAppService(ILogger<WorkerSubscriberAppService> logger,
|
||||
ITcpService tcpService,
|
||||
IRepository<Device, Guid> deviceRepository,
|
||||
IMeterReadingRecordRepository meterReadingRecordsRepository,
|
||||
IServiceProvider serviceProvider)
|
||||
{
|
||||
_logger = logger;
|
||||
_tcpService = tcpService;
|
||||
_serviceProvider = serviceProvider;
|
||||
_deviceRepository = deviceRepository;
|
||||
_meterReadingRecordsRepository = meterReadingRecordsRepository;
|
||||
}
|
||||
|
||||
|
||||
#region 电表消息采集
|
||||
|
||||
/// <summary>
|
||||
/// 一分钟定时抄读任务消息消费订阅
|
||||
/// </summary>
|
||||
/// <param name="receivedMessage"></param>
|
||||
/// <returns></returns>
|
||||
[HttpPost]
|
||||
[Route("ammeter/oneminute/issued-event")]
|
||||
[KafkaSubscribe(ProtocolConst.AmmeterSubscriberWorkerOneMinuteIssuedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.AmmeterSubscriberWorkerOneMinuteIssuedEventName)]
|
||||
public async Task<ISubscribeAck> AmmeterScheduledMeterOneMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage receivedMessage)
|
||||
{
|
||||
_logger.LogInformation("1分钟采集电表数据下行消息消费队列开始处理");
|
||||
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
|
||||
if (protocolPlugin == null)
|
||||
{
|
||||
_logger.LogError("【1分钟采集电表数据下行消息消费队列开始处理】协议不存在!");
|
||||
}
|
||||
else
|
||||
{
|
||||
var device = await _deviceRepository.FirstOrDefaultAsync(a => a.Number == receivedMessage.FocusAddress);
|
||||
if (device != null)
|
||||
{
|
||||
await _tcpService.SendAsync(device.ClientId, Convert.FromHexString(receivedMessage.MessageHexString));
|
||||
|
||||
}
|
||||
}
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 5分钟采集电表数据下行消息消费订阅
|
||||
/// </summary>
|
||||
/// <param name="receivedMessage"></param>
|
||||
/// <returns></returns>
|
||||
[HttpPost]
|
||||
[Route("ammeter/fiveminute/issued-event")]
|
||||
[KafkaSubscribe(ProtocolConst.AmmeterSubscriberWorkerFiveMinuteIssuedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.AmmeterSubscriberWorkerFiveMinuteIssuedEventName)]
|
||||
public async Task<ISubscribeAck> AmmeterScheduledMeterFiveMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage receivedMessage)
|
||||
{
|
||||
_logger.LogInformation("5分钟采集电表数据下行消息消费队列开始处理");
|
||||
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
|
||||
if (protocolPlugin == null)
|
||||
{
|
||||
_logger.LogError("【5分钟采集电表数据下行消息消费队列开始处理】协议不存在!");
|
||||
}
|
||||
else
|
||||
{
|
||||
var device = await _deviceRepository.FirstOrDefaultAsync(a => a.Number == receivedMessage.FocusAddress);
|
||||
if (device != null)
|
||||
{
|
||||
await _tcpService.SendAsync(device.ClientId, Convert.FromHexString(receivedMessage.MessageHexString));
|
||||
|
||||
}
|
||||
}
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// 15分钟采集电表数据下行消息消费订阅
|
||||
/// </summary>
|
||||
/// <param name="receivedMessage"></param>
|
||||
/// <returns></returns>
|
||||
[HttpPost]
|
||||
[Route("ammeter/fifteenminute/issued-event")]
|
||||
[KafkaSubscribe(ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.AmmeterSubscriberWorkerFifteenMinuteIssuedEventName)]
|
||||
public async Task<ISubscribeAck> AmmeterScheduledMeterFifteenMinuteReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage receivedMessage)
|
||||
{
|
||||
_logger.LogInformation("15分钟采集电表数据下行消息消费队列开始处理");
|
||||
try
|
||||
{
|
||||
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
|
||||
if (protocolPlugin == null)
|
||||
{
|
||||
_logger.LogError("【15分钟采集电表数据下行消息消费队列开始处理】协议不存在!");
|
||||
}
|
||||
else
|
||||
{
|
||||
var device = await _deviceRepository.FirstOrDefaultAsync(a => a.Number == receivedMessage.FocusAddress);
|
||||
if (device != null)
|
||||
{
|
||||
await _tcpService.SendAsync(device.ClientId, Convert.FromHexString(receivedMessage.MessageHexString));
|
||||
|
||||
}
|
||||
}
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region 水表消息采集
|
||||
|
||||
/// <summary>
|
||||
/// 水表数据下行消息消费订阅
|
||||
/// </summary>
|
||||
/// <param name="receivedMessage"></param>
|
||||
/// <returns></returns>
|
||||
[HttpPost]
|
||||
[Route("watermeter/fifteenminute/issued-event")]
|
||||
[KafkaSubscribe(ProtocolConst.WatermeterSubscriberWorkerAutoReadingIssuedEventName)]
|
||||
//[CapSubscribe(ProtocolConst.WatermeterSubscriberWorkerAutoReadingIssuedEventName)]
|
||||
public async Task<ISubscribeAck> WatermeterSubscriberWorkerAutoReadingIssuedEvent(ScheduledMeterReadingIssuedEventMessage receivedMessage)
|
||||
{
|
||||
_logger.LogInformation("15分钟采集水表数据下行消息消费队列开始处理");
|
||||
var protocolPlugin = _serviceProvider.GetKeyedService<IProtocolPlugin>("StandardProtocolPlugin");
|
||||
if (protocolPlugin == null)
|
||||
{
|
||||
_logger.LogError("【15分钟采集水表数据下行消息消费队列开始处理】协议不存在!");
|
||||
}
|
||||
else
|
||||
{
|
||||
var device = await _deviceRepository.FindAsync(a => a.Number == receivedMessage.FocusAddress);
|
||||
if (device != null)
|
||||
{
|
||||
await _tcpService.SendAsync(device.ClientId, Convert.FromHexString(receivedMessage.MessageHexString));
|
||||
}
|
||||
}
|
||||
return SubscribeAck.Success();
|
||||
}
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@ -1,40 +0,0 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Hangfire;
|
||||
using JiShe.CollectBus.Common.Consts;
|
||||
using JiShe.CollectBus.ScheduledMeterReading;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Volo.Abp.BackgroundWorkers.Hangfire;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
using Volo.Abp.Uow;
|
||||
|
||||
namespace JiShe.CollectBus.Workers
|
||||
{
|
||||
/// <summary>
|
||||
/// 构建待处理的下发指令任务处理
|
||||
/// </summary>
|
||||
public class CreateToBeIssueTaskWorker : HangfireBackgroundWorkerBase, ITransientDependency, ICollectWorker
|
||||
{
|
||||
private readonly ILogger<CreateToBeIssueTaskWorker> _logger;
|
||||
private readonly IScheduledMeterReadingService _scheduledMeterReadingService;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="CreateToBeIssueTaskWorker"/> class.
|
||||
/// </summary>
|
||||
/// <param name="logger">The logger.</param>
|
||||
/// <param name="scheduledMeterReadingService">定时任务</param>
|
||||
public CreateToBeIssueTaskWorker(ILogger<CreateToBeIssueTaskWorker> logger, IScheduledMeterReadingService scheduledMeterReadingService)
|
||||
{
|
||||
_logger = logger;
|
||||
RecurringJobId = nameof(CreateToBeIssueTaskWorker);
|
||||
CronExpression = "* 10 * * * *";
|
||||
this._scheduledMeterReadingService = scheduledMeterReadingService;
|
||||
}
|
||||
|
||||
|
||||
public override async Task DoWorkAsync(CancellationToken cancellationToken = new CancellationToken())
|
||||
{
|
||||
// await _scheduledMeterReadingService.CreateToBeIssueTasks();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,46 +0,0 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Hangfire;
|
||||
using JiShe.CollectBus.ScheduledMeterReading;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Volo.Abp.BackgroundWorkers.Hangfire;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
using Volo.Abp.Uow;
|
||||
|
||||
namespace JiShe.CollectBus.Workers
|
||||
{
|
||||
/// <summary>
|
||||
/// 15分钟采集数据
|
||||
/// </summary>
|
||||
public class SubscriberFifteenMinuteWorker : HangfireBackgroundWorkerBase, ITransientDependency, ICollectWorker
|
||||
{
|
||||
private readonly ILogger<SubscriberFifteenMinuteWorker> _logger;
|
||||
private readonly IScheduledMeterReadingService _scheduledMeterReadingService;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="SubscriberFifteenMinuteWorker"/> class.
|
||||
/// </summary>
|
||||
/// <param name="logger">The logger.</param>
|
||||
/// <param name="scheduledMeterReadingService">定时任务</param>
|
||||
public SubscriberFifteenMinuteWorker(ILogger<SubscriberFifteenMinuteWorker> logger, IScheduledMeterReadingService scheduledMeterReadingService)
|
||||
{
|
||||
_logger = logger;
|
||||
RecurringJobId = nameof(SubscriberFifteenMinuteWorker);
|
||||
CronExpression = "* 15 * * * *";
|
||||
this._scheduledMeterReadingService = scheduledMeterReadingService;
|
||||
}
|
||||
|
||||
|
||||
public override async Task DoWorkAsync(CancellationToken cancellationToken = new CancellationToken())
|
||||
{
|
||||
//await _scheduledMeterReadingService.AmmeterScheduledMeterFifteenMinuteReading();
|
||||
//await _scheduledMeterReadingService.WatermeterScheduledMeterFifteenMinuteReading();
|
||||
|
||||
//using (var uow = LazyServiceProvider.LazyGetRequiredService<IUnitOfWorkManager>().Begin())
|
||||
//{
|
||||
// Logger.LogInformation("Executed MyLogWorker..!");
|
||||
// return Task.CompletedTask;
|
||||
//}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,40 +0,0 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Hangfire;
|
||||
using JiShe.CollectBus.ScheduledMeterReading;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Volo.Abp.BackgroundWorkers.Hangfire;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
using Volo.Abp.Uow;
|
||||
|
||||
namespace JiShe.CollectBus.Workers
|
||||
{
|
||||
/// <summary>
|
||||
/// 5分钟采集数据
|
||||
/// </summary>
|
||||
public class SubscriberFiveMinuteWorker : HangfireBackgroundWorkerBase, ITransientDependency,ICollectWorker
|
||||
{
|
||||
private readonly ILogger<SubscriberFiveMinuteWorker> _logger;
|
||||
private readonly IScheduledMeterReadingService _scheduledMeterReadingService;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="SubscriberFiveMinuteWorker"/> class.
|
||||
/// </summary>
|
||||
/// <param name="logger">The logger.</param>
|
||||
/// <param name="scheduledMeterReadingService">定时任务</param>
|
||||
public SubscriberFiveMinuteWorker(ILogger<SubscriberFiveMinuteWorker> logger, IScheduledMeterReadingService scheduledMeterReadingService)
|
||||
{
|
||||
_logger = logger;
|
||||
RecurringJobId = nameof(SubscriberFiveMinuteWorker);
|
||||
CronExpression = "* 5 * * * *";
|
||||
this._scheduledMeterReadingService = scheduledMeterReadingService;
|
||||
}
|
||||
|
||||
|
||||
public override async Task DoWorkAsync(CancellationToken cancellationToken = new CancellationToken())
|
||||
{
|
||||
//await _scheduledMeterReadingService.AmmeterScheduledMeterFiveMinuteReading();
|
||||
//await _scheduledMeterReadingService.WatermeterScheduledMeterFiveMinuteReading();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,42 +0,0 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Hangfire;
|
||||
using JiShe.CollectBus.ScheduledMeterReading;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Volo.Abp.BackgroundWorkers.Hangfire;
|
||||
using Volo.Abp.DependencyInjection;
|
||||
using Volo.Abp.Uow;
|
||||
|
||||
namespace JiShe.CollectBus.Workers
|
||||
{
|
||||
/// <summary>
|
||||
/// 1分钟采集数据
|
||||
/// </summary>
|
||||
public class SubscriberOneMinuteWorker : HangfireBackgroundWorkerBase, ITransientDependency,ICollectWorker
|
||||
{
|
||||
private readonly ILogger<SubscriberOneMinuteWorker> _logger;
|
||||
private readonly IScheduledMeterReadingService _scheduledMeterReadingService;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="SubscriberOneMinuteWorker"/> class.
|
||||
/// </summary>
|
||||
/// <param name="logger">The logger.</param>
|
||||
/// <param name="scheduledMeterReadingService">定时任务</param>
|
||||
public SubscriberOneMinuteWorker(ILogger<SubscriberOneMinuteWorker> logger, IScheduledMeterReadingService scheduledMeterReadingService)
|
||||
{
|
||||
_logger = logger;
|
||||
RecurringJobId = nameof(SubscriberOneMinuteWorker);
|
||||
CronExpression = "* 1 * * * *";
|
||||
this._scheduledMeterReadingService = scheduledMeterReadingService;
|
||||
}
|
||||
|
||||
|
||||
public override async Task DoWorkAsync(CancellationToken cancellationToken = new CancellationToken())
|
||||
{
|
||||
//await _scheduledMeterReadingService.AmmeterScheduledMeterOneMinuteReading();
|
||||
|
||||
//await _scheduledMeterReadingService.WatermeterScheduledMeterOneMinuteReading();
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,37 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using JiShe.CollectBus.IoTDB.Attribute;
|
||||
using JiShe.CollectBus.IoTDB.Provider;
|
||||
|
||||
namespace JiShe.CollectBus.Ammeters
|
||||
{
|
||||
public class ElectricityMeter : IoTEntity
|
||||
{
|
||||
[ATTRIBUTEColumn]
|
||||
public string MeterModel { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 下发消息内容
|
||||
/// </summary>
|
||||
[FIELDColumn]
|
||||
public string IssuedMessageHexString { get; set; }
|
||||
|
||||
///// <summary>
|
||||
///// 下发消息Id
|
||||
///// </summary>
|
||||
//[FIELDColumn]
|
||||
//public string IssuedMessageId { get; set; }
|
||||
|
||||
[FIELDColumn]
|
||||
public double Voltage { get; set; }
|
||||
|
||||
[FIELDColumn]
|
||||
public double Current { get; set; }
|
||||
|
||||
[FIELDColumn]
|
||||
public double Power => Voltage * Current;
|
||||
}
|
||||
}
|
||||
@ -1,264 +0,0 @@
|
||||
using System;
|
||||
using FreeSql.DataAnnotations;
|
||||
|
||||
namespace JiShe.CollectBus.EnergySystems.Entities
|
||||
{
|
||||
public class TB_AmmeterInfo
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// 电表信息
|
||||
/// </summary>
|
||||
public int ID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 电表编号、电表型号
|
||||
/// </summary>
|
||||
public string Code { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 区域信息外键
|
||||
/// </summary>
|
||||
public int AreaID { get; set; }
|
||||
/// <summary>
|
||||
/// 区域名
|
||||
/// </summary>
|
||||
[Column(IsIgnore = true)]
|
||||
public string AreaName { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 电表别名
|
||||
/// </summary>
|
||||
public string Name { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 电表类别 (1单相、2三相三线、3三相四线)
|
||||
/// </summary>
|
||||
public int TypeName { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 电表安装地址
|
||||
/// </summary>
|
||||
public string Location { get; set; }
|
||||
/// <summary>
|
||||
/// 电表安装时间
|
||||
/// </summary>
|
||||
public DateTime? InstallTime { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 电表密码
|
||||
/// </summary>
|
||||
public string Password { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 电表通信地址
|
||||
/// </summary>
|
||||
public string Address { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 采集器地址
|
||||
/// </summary>
|
||||
public string CollectorAddress { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 电压变比
|
||||
/// 电压互感器(PT)
|
||||
/// </summary>
|
||||
public double TimesV { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 电流变比
|
||||
/// 电流互感器(CT)
|
||||
/// </summary>
|
||||
public double TimesA { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 是否总表
|
||||
/// </summary>
|
||||
public int IsSum { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 总表ID
|
||||
/// </summary>
|
||||
public int ParentID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Explain
|
||||
/// </summary>
|
||||
public string Explain { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// AddDate
|
||||
/// </summary>
|
||||
public DateTime AddDate { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// State表状态: (对应枚举 MeterStateEnum)
|
||||
/// 0新装(未下发),1运行(档案下发成功时设置状态值1), 2暂停, 100销表(销表后是否重新启用);
|
||||
/// 特定State: -1 已删除
|
||||
/// </summary>
|
||||
public int State { get; set; }
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// 费率类型,单、多 (SingleRate :单费率(单相表1),多费率(其他0) ,与TypeName字段无关)
|
||||
/// SingleRate ? "单" : "复"
|
||||
/// [SingleRate] --0 复费率 false , 1 单费率 true (与PayPlanID保持一致)
|
||||
///对应 TB_PayPlan.Type: 1复费率,2单费率
|
||||
/// </summary>
|
||||
public bool SingleRate { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 0 未下发 (false), 1 已下发 (true)
|
||||
/// </summary>
|
||||
public bool IsSend { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 创建人ID
|
||||
/// </summary>
|
||||
public int CreateUserID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 波特率 default(2400)
|
||||
/// </summary>
|
||||
public int Baudrate { get; set; }
|
||||
/// <summary>
|
||||
/// 规约 -电表default(30)
|
||||
/// </summary>
|
||||
public int? Protocol { get; set; }
|
||||
/// <summary>
|
||||
/// 一个集中器下的[MeteringCode]必须唯一。
|
||||
/// </summary>
|
||||
public int MeteringCode { get; set; }
|
||||
/// <summary>
|
||||
/// MeteringPort 端口就几个可以枚举。
|
||||
/// </summary>
|
||||
public int MeteringPort { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 对应[TB_PayPlan]
|
||||
/// </summary>
|
||||
public int PayPlanID { get; set; }
|
||||
|
||||
|
||||
public int ProjectID { get; set; }
|
||||
|
||||
public int FocusID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 集中器名称(扩展字段)
|
||||
/// </summary>
|
||||
[Column(IsIgnore = true)]
|
||||
public string FocusName { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 跳合闸状态字段: 0 合闸,1 跳闸
|
||||
/// 电表:TripState (0 合闸-通电, 1 断开、跳闸);
|
||||
/// </summary>
|
||||
public int TripState { get; set; }
|
||||
/// <summary>
|
||||
/// 最近阀控时间
|
||||
/// </summary>
|
||||
public DateTime? TripTime { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 排序字段
|
||||
/// </summary>
|
||||
public int Sort { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 电子表0 ,
|
||||
/// 机械表1(德力西机械电表-Code)
|
||||
/// (原有数据都默认:电子电表)
|
||||
/// </summary>
|
||||
[Column(IsIgnore = true)]
|
||||
public int MeterKind { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 采集方案ID
|
||||
/// </summary>
|
||||
[Column(IsIgnore = true)]
|
||||
public int GatherPlanID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 采集项
|
||||
/// </summary>
|
||||
public string ReadClass { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 修改日期
|
||||
/// </summary>
|
||||
public DateTime? EditDate { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 修改用ID
|
||||
/// </summary>
|
||||
public int? EditUserID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 删除时间
|
||||
/// </summary>
|
||||
public DateTime? RemoveDate { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 删除用户ID
|
||||
/// </summary>
|
||||
public int? RemoveUserID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 掉电状态 (未上电=1,上电掉电中=2)
|
||||
/// </summary>
|
||||
public int? PowerDownStatus { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 电流规格
|
||||
/// </summary>
|
||||
public string CurrentSpec { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 电压规格
|
||||
/// </summary>
|
||||
public string VoltageSpec { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 通讯状态 1:在线 0:离线
|
||||
/// </summary>
|
||||
public int LineState { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 特殊表 1:是 0:否
|
||||
/// </summary>
|
||||
public int Special { get; set; }
|
||||
|
||||
/*
|
||||
/// <summary>
|
||||
/// 采集项总数
|
||||
/// </summary>
|
||||
public int GatherTotal { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// 采集项
|
||||
/// </summary>
|
||||
public string GatherDataTypes { get; set; }
|
||||
*/
|
||||
|
||||
/// <summary>
|
||||
/// 复费率类型(四费率=4,八费率=8)
|
||||
/// </summary>
|
||||
public int? MultipleRateType { get; set; }
|
||||
}
|
||||
|
||||
public class VMAmmeterInfo : TB_AmmeterInfo
|
||||
{
|
||||
public decimal? Rate { get; set; }
|
||||
public decimal? Rate1 { get; set; }
|
||||
public decimal? Rate2 { get; set; }
|
||||
public decimal? Rate3 { get; set; }
|
||||
|
||||
public decimal? Rate4 { get; set; }
|
||||
|
||||
public decimal? Rate5 { get; set; }
|
||||
|
||||
public decimal? Rate6 { get; set; }
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user