diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 00000000..08fa753b
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,99 @@
+# editorconfig.org
+
+[*.cs]
+# Naming rules for fields
+dotnet_naming_rule.private_fields_with_underscore.symbols = private_field
+dotnet_naming_rule.private_fields_with_underscore.style = prefix_underscore
+dotnet_naming_rule.private_fields_with_underscore.severity = warning
+
+dotnet_naming_symbols.private_field.applicable_kinds = field
+dotnet_naming_symbols.private_field.applicable_accessibilities = private
+dotnet_naming_symbols.private_field.required_modifiers =
+
+dotnet_naming_style.prefix_underscore.capitalization = camel_case
+dotnet_naming_style.prefix_underscore.required_prefix = _
+
+# Naming rules for properties
+dotnet_naming_rule.private_properties_with_underscore.symbols = private_property
+dotnet_naming_rule.private_properties_with_underscore.style = prefix_underscore
+dotnet_naming_rule.private_properties_with_underscore.severity = warning
+
+dotnet_naming_symbols.private_property.applicable_kinds = property
+dotnet_naming_symbols.private_property.applicable_accessibilities = private
+dotnet_naming_symbols.private_property.required_modifiers =
+
+dotnet_naming_style.prefix_underscore.capitalization = camel_case
+dotnet_naming_style.prefix_underscore.required_prefix = _
+
+# Do not use 'this.' for private fields
+dotnet_diagnostic.DOTNET_Naming_Style_DoNotUseThisForPrivateFields.severity = warning
+dotnet_diagnostic.DOTNET_Naming_Style_DoNotUseThisForPrivateFields.symbols = field_like
+
+dotnet_diagnostic.CA2007.severity = none
+dotnet_diagnostic.CS1591.severity = none
+
+dotnet_naming_rule.style_dotnet_naming_rule_DotNetNamingStyle.DoNotUseThisForPrivateFields.symbols = field_like
+dotnet_naming_rule.style_dotnet_naming_rule_DotNetNamingStyle.DoNotUseThisForPrivateFields.style = this_prefix
+dotnet_naming_rule.style_dotnet_naming_rule_DotNetNamingStyle.DoNotUseThisForPrivateFields.severity = warning
+
+# name all constant fields using PascalCase
+dotnet_naming_rule.constant_fields_should_be_pascal_case.severity = warning
+dotnet_naming_rule.constant_fields_should_be_pascal_case.symbols = constant_fields
+dotnet_naming_rule.constant_fields_should_be_pascal_case.style = pascal_case_style
+
+dotnet_naming_symbols.constant_fields.applicable_kinds = field
+dotnet_naming_symbols.constant_fields.required_modifiers = const
+
+dotnet_naming_style.pascal_case_style.capitalization = pascal_case
+
+# static fields should have s_ prefix
+dotnet_naming_rule.static_fields_should_have_prefix.severity = warning
+dotnet_naming_rule.static_fields_should_have_prefix.symbols = static_fields
+dotnet_naming_rule.static_fields_should_have_prefix.style = static_prefix_style
+
+dotnet_naming_symbols.static_fields.applicable_kinds = field
+dotnet_naming_symbols.static_fields.required_modifiers = static
+
+dotnet_naming_symbols.static_fields.applicable_accessibilities = private, internal, private_protected
+dotnet_naming_style.static_prefix_style.required_prefix = s_
+
+dotnet_naming_style.static_prefix_style.capitalization = camel_case
+
+csharp_indent_labels = one_less_than_current
+csharp_using_directive_placement = outside_namespace:silent
+csharp_prefer_simple_using_statement = true:suggestion
+csharp_prefer_braces = true:silent
+csharp_style_namespace_declarations = block_scoped:silent
+csharp_style_namespace_declarations = file_scoped:error
+csharp_style_prefer_method_group_conversion = true:silent
+csharp_style_prefer_top_level_statements = true:silent
+csharp_style_prefer_primary_constructors = true:suggestion
+csharp_style_expression_bodied_methods = false:silent
+csharp_style_expression_bodied_constructors = false:silent
+csharp_style_expression_bodied_operators = false:silent
+csharp_style_expression_bodied_properties = true:silent
+csharp_style_expression_bodied_indexers = true:silent
+csharp_style_expression_bodied_accessors = true:silent
+csharp_style_expression_bodied_lambdas = true:silent
+csharp_style_expression_bodied_local_functions = false:silent
+
+[*.{cs,vb}]
+dotnet_style_operator_placement_when_wrapping = beginning_of_line
+tab_width = 4
+indent_size = 4
+end_of_line = crlf
+dotnet_style_coalesce_expression = true:suggestion
+dotnet_style_null_propagation = true:suggestion
+dotnet_style_prefer_is_null_check_over_reference_equality_method = true:suggestion
+dotnet_style_prefer_auto_properties = true:silent
+dotnet_style_object_initializer = true:suggestion
+dotnet_style_prefer_collection_expression = true:suggestion
+dotnet_style_collection_initializer = true:suggestion
+dotnet_style_prefer_simplified_boolean_expressions = true:suggestion
+dotnet_style_prefer_conditional_expression_over_assignment = true:silent
+dotnet_style_prefer_conditional_expression_over_return = true:silent
+dotnet_style_explicit_tuple_names = true:suggestion
+dotnet_style_prefer_inferred_tuple_names = true:suggestion
+dotnet_style_prefer_inferred_anonymous_type_member_names = true:suggestion
+dotnet_style_prefer_compound_assignment = true:suggestion
+dotnet_style_prefer_simplified_interpolation = true:suggestion
\ No newline at end of file
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index e6784451..36283b53 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -39,22 +39,22 @@ jobs:
echo "Branch name: $BRANCH_NAME"
echo "Repository name: $REPOSITORY_NAME"
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{github.event.pull_request.head.ref}}
repository: ${{github.event.pull_request.head.repo.full_name}}
- name: Setup .NET
- uses: actions/setup-dotnet@v1
+ uses: actions/setup-dotnet@v3
with:
dotnet-version: 6.0.x
- name: Restore dependencies
- run: dotnet restore src/KafkaFlow.Retry.sln
+ run: dotnet restore KafkaFlow.Retry.sln
- name: Build
- run: dotnet build --no-restore -c Release src/KafkaFlow.Retry.sln
+ run: dotnet build --no-restore -c Release KafkaFlow.Retry.sln
- name: Start SqlServer
run: docker run -d -p 1433:1433 -e ACCEPT_EULA=${{ env.ACCEPT_EULA }} -e SA_PASSWORD=${{ env.SQLSERVER_SA_PASSWORD }} -e MSSQL_PID=Developer mcr.microsoft.com/mssql/server:2017-latest
@@ -78,4 +78,4 @@ jobs:
auto create topic: "true" # Optional, auto create kafka topic
- name: Test
- run: dotnet test --no-build -c Release --verbosity normal --collect:"XPlat Code Coverage" --results-directory:"../../coverage-outputs" -m:1 src/KafkaFlow.Retry.sln
+ run: dotnet test --no-build -c Release --verbosity normal --collect:"XPlat Code Coverage" --results-directory:"../../coverage-outputs" -m:1 KafkaFlow.Retry.sln
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
index 55b3e873..be22e822 100644
--- a/.github/workflows/publish.yml
+++ b/.github/workflows/publish.yml
@@ -13,12 +13,12 @@ jobs:
NUGET_SOURCE: https://api.nuget.org/v3/index.json
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- name: Setup .NET
- uses: actions/setup-dotnet@v1
+ uses: actions/setup-dotnet@v3
with:
- dotnet-version: 3.1.x
+ dotnet-version: 6.0.x
- name: Update project version
uses: roryprimrose/set-vs-sdk-project-version@v1
diff --git a/src/Directory.Build.props b/Directory.Build.props
similarity index 83%
rename from src/Directory.Build.props
rename to Directory.Build.props
index 758dfb02..4912d9e9 100644
--- a/src/Directory.Build.props
+++ b/Directory.Build.props
@@ -1,5 +1,6 @@
+ 10.0
https://raw.githubusercontent.com/Farfetch/.github/master/images/fuse-logo-128.png
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.sln b/KafkaFlow.Retry.sln
similarity index 73%
rename from src/KafkaFlow.Retry.sln
rename to KafkaFlow.Retry.sln
index 85547a26..99d8cab8 100644
--- a/src/KafkaFlow.Retry.sln
+++ b/KafkaFlow.Retry.sln
@@ -7,45 +7,49 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Admin", "Admin", "{5AE4E956
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Framework", "Framework", "{1DF1AB0D-37CB-4AFC-B701-8F0F2B260E54}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{F92E5E32-4611-44A1-9EF6-0091F337877B}"
-EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Repositories", "Repositories", "{90B43DD5-C3CE-4BF8-B63E-3A34B962DC96}"
EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry", "KafkaFlow.Retry\KafkaFlow.Retry.csproj", "{9DC50EFE-C511-4DEC-A8EA-199795CEFE01}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry", "src\KafkaFlow.Retry\KafkaFlow.Retry.csproj", "{9DC50EFE-C511-4DEC-A8EA-199795CEFE01}"
EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Samples", "Samples", "{B1D2A20A-0742-4D8E-A773-66EB95560152}"
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "samples", "samples", "{B1D2A20A-0742-4D8E-A773-66EB95560152}"
EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.Sample", "..\samples\KafkaFlow.Retry.Sample\KafkaFlow.Retry.Sample.csproj", "{745CB854-1FFE-4C60-AD15-69A3A784CC9F}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.Sample", "samples\KafkaFlow.Retry.Sample\KafkaFlow.Retry.Sample.csproj", "{745CB854-1FFE-4C60-AD15-69A3A784CC9F}"
EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.SqlServer", "KafkaFlow.Retry.SqlServer\KafkaFlow.Retry.SqlServer.csproj", "{C61CCF7F-E7C8-4FEF-9E7E-22AB3ECD148D}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.SqlServer", "src\KafkaFlow.Retry.SqlServer\KafkaFlow.Retry.SqlServer.csproj", "{C61CCF7F-E7C8-4FEF-9E7E-22AB3ECD148D}"
EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.MongoDb", "KafkaFlow.Retry.MongoDb\KafkaFlow.Retry.MongoDb.csproj", "{F06DD63E-8965-4C43-BEEE-4ABBB5914FF8}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.MongoDb", "src\KafkaFlow.Retry.MongoDb\KafkaFlow.Retry.MongoDb.csproj", "{F06DD63E-8965-4C43-BEEE-4ABBB5914FF8}"
EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.API", "KafkaFlow.Retry.API\KafkaFlow.Retry.API.csproj", "{D3664EBB-D77B-42C2-AF90-7B2F3E354C3F}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.API", "src\KafkaFlow.Retry.API\KafkaFlow.Retry.API.csproj", "{D3664EBB-D77B-42C2-AF90-7B2F3E354C3F}"
EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.API.Sample", "..\samples\KafkaFlow.Retry.API.Sample\KafkaFlow.Retry.API.Sample.csproj", "{F27309CD-D796-425B-B5D6-780B7B57E9C7}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.API.Sample", "samples\KafkaFlow.Retry.API.Sample\KafkaFlow.Retry.API.Sample.csproj", "{F27309CD-D796-425B-B5D6-780B7B57E9C7}"
EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.UnitTests", "KafkaFlow.Retry.UnitTests\KafkaFlow.Retry.UnitTests.csproj", "{9E3B34BA-E309-4DA4-93D4-C0DD72D4711D}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.UnitTests", "tests\KafkaFlow.Retry.UnitTests\KafkaFlow.Retry.UnitTests.csproj", "{9E3B34BA-E309-4DA4-93D4-C0DD72D4711D}"
EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.IntegrationTests", "KafkaFlow.Retry.IntegrationTests\KafkaFlow.Retry.IntegrationTests.csproj", "{A25A5E30-8D5A-40DB-BA21-7A5B4FB44DE0}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.IntegrationTests", "tests\KafkaFlow.Retry.IntegrationTests\KafkaFlow.Retry.IntegrationTests.csproj", "{A25A5E30-8D5A-40DB-BA21-7A5B4FB44DE0}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{9557F908-472F-4872-BCF8-8EC028EFDA9B}"
ProjectSection(SolutionItems) = preProject
- CodeCoverage.runsettings = CodeCoverage.runsettings
Directory.Build.props = Directory.Build.props
EndProjectSection
EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.SchemaRegistry.Sample", "..\samples\KafkaFlow.Retry.SchemaRegistry.Sample\KafkaFlow.Retry.SchemaRegistry.Sample.csproj", "{510D65E8-B62C-402C-9CE3-47C7055A29FF}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.SchemaRegistry.Sample", "samples\KafkaFlow.Retry.SchemaRegistry.Sample\KafkaFlow.Retry.SchemaRegistry.Sample.csproj", "{510D65E8-B62C-402C-9CE3-47C7055A29FF}"
EndProject
-Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.Common.Sample", "..\samples\KafkaFlow.Retry.Common.Sample\KafkaFlow.Retry.Common.Sample.csproj", "{B14C5859-85C5-4E2F-80C7-D8B29E36481A}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KafkaFlow.Retry.Common.Sample", "samples\KafkaFlow.Retry.Common.Sample\KafkaFlow.Retry.Common.Sample.csproj", "{B14C5859-85C5-4E2F-80C7-D8B29E36481A}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Workflows", "Workflows", "{2A0BC610-E0FE-4BC3-B232-A8B918BE7381}"
ProjectSection(SolutionItems) = preProject
- ..\.github\workflows\build.yml = ..\.github\workflows\build.yml
- ..\.github\workflows\publish.yml = ..\.github\workflows\publish.yml
+ .github\workflows\build.yml = .github\workflows\build.yml
+ .github\workflows\publish.yml = .github\workflows\publish.yml
EndProjectSection
EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "KafkaFlow.Retry.Postgres", "KafkaFlow.Retry.Postgres\KafkaFlow.Retry.Postgres.csproj", "{B7E4C23D-48DC-4056-8658-19D54AF7008A}"
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "KafkaFlow.Retry.Postgres", "src\KafkaFlow.Retry.Postgres\KafkaFlow.Retry.Postgres.csproj", "{B7E4C23D-48DC-4056-8658-19D54AF7008A}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{0192C262-63AF-4918-B142-EC07DBB9E501}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{18F1AB11-9DC0-442F-9B6D-7098A93727B8}"
+ ProjectSection(SolutionItems) = preProject
+ CodeCoverage.runsettings = tests\CodeCoverage.runsettings
+ EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@@ -108,12 +112,15 @@ Global
{F06DD63E-8965-4C43-BEEE-4ABBB5914FF8} = {90B43DD5-C3CE-4BF8-B63E-3A34B962DC96}
{D3664EBB-D77B-42C2-AF90-7B2F3E354C3F} = {5AE4E956-15A8-4117-9A9D-97B53060FE4C}
{F27309CD-D796-425B-B5D6-780B7B57E9C7} = {B1D2A20A-0742-4D8E-A773-66EB95560152}
- {9E3B34BA-E309-4DA4-93D4-C0DD72D4711D} = {F92E5E32-4611-44A1-9EF6-0091F337877B}
- {A25A5E30-8D5A-40DB-BA21-7A5B4FB44DE0} = {F92E5E32-4611-44A1-9EF6-0091F337877B}
{510D65E8-B62C-402C-9CE3-47C7055A29FF} = {B1D2A20A-0742-4D8E-A773-66EB95560152}
{B14C5859-85C5-4E2F-80C7-D8B29E36481A} = {B1D2A20A-0742-4D8E-A773-66EB95560152}
{2A0BC610-E0FE-4BC3-B232-A8B918BE7381} = {9557F908-472F-4872-BCF8-8EC028EFDA9B}
{B7E4C23D-48DC-4056-8658-19D54AF7008A} = {90B43DD5-C3CE-4BF8-B63E-3A34B962DC96}
+ {A25A5E30-8D5A-40DB-BA21-7A5B4FB44DE0} = {18F1AB11-9DC0-442F-9B6D-7098A93727B8}
+ {9E3B34BA-E309-4DA4-93D4-C0DD72D4711D} = {18F1AB11-9DC0-442F-9B6D-7098A93727B8}
+ {90B43DD5-C3CE-4BF8-B63E-3A34B962DC96} = {0192C262-63AF-4918-B142-EC07DBB9E501}
+ {1DF1AB0D-37CB-4AFC-B701-8F0F2B260E54} = {0192C262-63AF-4918-B142-EC07DBB9E501}
+ {5AE4E956-15A8-4117-9A9D-97B53060FE4C} = {0192C262-63AF-4918-B142-EC07DBB9E501}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {A953E534-FBCA-4F30-9CA5-96F67C1A49D8}
diff --git a/samples/KafkaFlow.Retry.API.Sample/Program.cs b/samples/KafkaFlow.Retry.API.Sample/Program.cs
index 4a62bb78..b2cf4f02 100644
--- a/samples/KafkaFlow.Retry.API.Sample/Program.cs
+++ b/samples/KafkaFlow.Retry.API.Sample/Program.cs
@@ -1,26 +1,18 @@
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Threading.Tasks;
using Microsoft.AspNetCore.Hosting;
-using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Hosting;
-using Microsoft.Extensions.Logging;
-namespace KafkaFlow.Retry.API.Sample
+namespace KafkaFlow.Retry.API.Sample;
+
+public class Program
{
- public class Program
+ public static void Main(string[] args)
{
- public static void Main(string[] args)
- {
- CreateHostBuilder(args).Build().Run();
- }
+ CreateHostBuilder(args).Build().Run();
+ }
- public static IHostBuilder CreateHostBuilder(string[] args) =>
- Host.CreateDefaultBuilder(args)
- .ConfigureWebHostDefaults(webBuilder =>
- {
- webBuilder.UseStartup();
- });
+ public static IHostBuilder CreateHostBuilder(string[] args)
+ {
+ return Host.CreateDefaultBuilder(args)
+ .ConfigureWebHostDefaults(webBuilder => { webBuilder.UseStartup(); });
}
-}
+}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.API.Sample/Startup.cs b/samples/KafkaFlow.Retry.API.Sample/Startup.cs
index bd79f6ab..1460852b 100644
--- a/samples/KafkaFlow.Retry.API.Sample/Startup.cs
+++ b/samples/KafkaFlow.Retry.API.Sample/Startup.cs
@@ -5,56 +5,52 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
-namespace KafkaFlow.Retry.API.Sample
+namespace KafkaFlow.Retry.API.Sample;
+
+public class Startup
{
- public class Startup
+ public Startup(IConfiguration configuration)
{
- public Startup(IConfiguration configuration)
- {
- Configuration = configuration;
- }
+ Configuration = configuration;
+ }
- public IConfiguration Configuration { get; }
+ public IConfiguration Configuration { get; }
- // This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
- public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
+ // This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
+ public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
+ {
+ if (env.IsDevelopment())
{
- if (env.IsDevelopment())
- {
- app.UseDeveloperExceptionPage();
- }
+ app.UseDeveloperExceptionPage();
+ }
- app.UseHttpsRedirection();
+ app.UseHttpsRedirection();
- app.UseRouting();
+ app.UseRouting();
- app.UseAuthorization();
+ app.UseAuthorization();
- app.UseKafkaFlowRetryEndpoints();
+ app.UseKafkaFlowRetryEndpoints();
- app.UseEndpoints(endpoints =>
- {
- endpoints.MapControllers();
- });
- }
+ app.UseEndpoints(endpoints => { endpoints.MapControllers(); });
+ }
- // This method gets called by the runtime. Use this method to add services to the container.
- public void ConfigureServices(IServiceCollection services)
- {
- services.AddSingleton(sp =>
- new MongoDbDataProviderFactory()
- .TryCreate(
- new MongoDbSettings
- {
- ConnectionString = "mongodb://localhost:27017/SVC_KAFKA_FLOW_RETRY_DURABLE",
- DatabaseName = "SVC_KAFKA_FLOW_RETRY_DURABLE",
- RetryQueueCollectionName = "RetryQueues",
- RetryQueueItemCollectionName = "RetryQueueItems"
- }
- ).Result
- );
-
- services.AddControllers();
- }
+ // This method gets called by the runtime. Use this method to add services to the container.
+ public void ConfigureServices(IServiceCollection services)
+ {
+ services.AddSingleton(sp =>
+ new MongoDbDataProviderFactory()
+ .TryCreate(
+ new MongoDbSettings
+ {
+ ConnectionString = "mongodb://localhost:27017/SVC_KAFKA_FLOW_RETRY_DURABLE",
+ DatabaseName = "SVC_KAFKA_FLOW_RETRY_DURABLE",
+ RetryQueueCollectionName = "RetryQueues",
+ RetryQueueItemCollectionName = "RetryQueueItems"
+ }
+ ).Result
+ );
+
+ services.AddControllers();
}
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.Common.Sample/Helpers/KafkaHelper.cs b/samples/KafkaFlow.Retry.Common.Sample/Helpers/KafkaHelper.cs
index 53e96734..1c8bc83c 100644
--- a/samples/KafkaFlow.Retry.Common.Sample/Helpers/KafkaHelper.cs
+++ b/samples/KafkaFlow.Retry.Common.Sample/Helpers/KafkaHelper.cs
@@ -1,61 +1,64 @@
-namespace KafkaFlow.Retry.Common.Sample.Helpers
-{
- using System;
- using System.Collections.Generic;
- using System.Linq;
- using System.Threading.Tasks;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using Confluent.Kafka;
+using Confluent.Kafka.Admin;
+
+namespace KafkaFlow.Retry.Common.Sample.Helpers;
- public static class KafkaHelper
+public static class KafkaHelper
+{
+ public static async Task CreateKafkaTopics(string kafkaBrokers, string[] topics)
{
- public static async Task CreateKafkaTopics(string kafkaBrokers, string[] topics)
+ using (var adminClient =
+ new AdminClientBuilder(new AdminClientConfig { BootstrapServers = kafkaBrokers }).Build())
{
- using (var adminClient = new Confluent.Kafka.AdminClientBuilder(new Confluent.Kafka.AdminClientConfig { BootstrapServers = kafkaBrokers }).Build())
+ foreach (var topic in topics)
{
- foreach (var topic in topics)
+ var topicMetadata = adminClient.GetMetadata(topic, TimeSpan.FromSeconds(20));
+ if (topicMetadata.Topics.First().Partitions.Count > 0)
{
- var topicMetadata = adminClient.GetMetadata(topic, TimeSpan.FromSeconds(20));
- if (topicMetadata.Topics.First().Partitions.Count > 0)
+ try
{
- try
+ var deleteTopicRecords = new List();
+ for (var i = 0; i < topicMetadata.Topics.First().Partitions.Count; i++)
{
- var deleteTopicRecords = new List();
- for (int i = 0; i < topicMetadata.Topics.First().Partitions.Count; i++)
- {
- deleteTopicRecords.Add(new Confluent.Kafka.TopicPartitionOffset(topic, i, Confluent.Kafka.Offset.End));
- }
- await adminClient.DeleteRecordsAsync(deleteTopicRecords).ConfigureAwait(false);
- }
- catch (Confluent.Kafka.Admin.DeleteRecordsException e)
- {
- Console.WriteLine($"An error occured deleting topic records: {e.Results[0].Error.Reason}");
+ deleteTopicRecords.Add(new Confluent.Kafka.TopicPartitionOffset(topic, i, Offset.End));
}
+
+ await adminClient.DeleteRecordsAsync(deleteTopicRecords).ConfigureAwait(false);
}
- else
+ catch (DeleteRecordsException e)
{
- try
- {
- await adminClient
- .CreatePartitionsAsync(
- new List
+ Console.WriteLine($"An error occured deleting topic records: {e.Results[0].Error.Reason}");
+ }
+ }
+ else
+ {
+ try
+ {
+ await adminClient
+ .CreatePartitionsAsync(
+ new List
+ {
+ new()
{
- new Confluent.Kafka.Admin.PartitionsSpecification
- {
- Topic = topic,
- IncreaseTo = 6
- }
- })
- .ConfigureAwait(false);
+ Topic = topic,
+ IncreaseTo = 6
+ }
+ })
+ .ConfigureAwait(false);
+ }
+ catch (CreateTopicsException e)
+ {
+ if (e.Results[0].Error.Code != ErrorCode.UnknownTopicOrPart)
+ {
+ Console.WriteLine($"An error occured creating a topic: {e.Results[0].Error.Reason}");
}
- catch (Confluent.Kafka.Admin.CreateTopicsException e)
+ else
{
- if (e.Results[0].Error.Code != Confluent.Kafka.ErrorCode.UnknownTopicOrPart)
- {
- Console.WriteLine($"An error occured creating a topic: {e.Results[0].Error.Reason}");
- }
- else
- {
- Console.WriteLine("Topic does not exists");
- }
+ Console.WriteLine("Topic does not exists");
}
}
}
diff --git a/samples/KafkaFlow.Retry.Common.Sample/Helpers/SqlServerHelper.cs b/samples/KafkaFlow.Retry.Common.Sample/Helpers/SqlServerHelper.cs
index df7af91d..0656ac6b 100644
--- a/samples/KafkaFlow.Retry.Common.Sample/Helpers/SqlServerHelper.cs
+++ b/samples/KafkaFlow.Retry.Common.Sample/Helpers/SqlServerHelper.cs
@@ -1,56 +1,56 @@
-namespace KafkaFlow.Retry.Common.Sample.Helpers
-{
- using System.Collections.Generic;
- using Microsoft.Data.SqlClient;
- using System.IO;
- using System.Linq;
- using System.Reflection;
- using System.Threading.Tasks;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Reflection;
+using System.Threading.Tasks;
+using Microsoft.Data.SqlClient;
+
+namespace KafkaFlow.Retry.Common.Sample.Helpers;
- public static class SqlServerHelper
+public static class SqlServerHelper
+{
+ public static async Task RecreateSqlSchema(string databaseName, string connectionString)
{
- public static async Task RecreateSqlSchema(string databaseName, string connectionString)
+ using (var openCon = new SqlConnection(connectionString))
{
- using (SqlConnection openCon = new SqlConnection(connectionString))
+ openCon.Open();
+
+ foreach (var script in GetScriptsForSchemaCreation())
{
- openCon.Open();
+ var batches = script.Split(new[] { "GO\r\n", "GO\t", "GO\n" }, StringSplitOptions.RemoveEmptyEntries);
- foreach (var script in GetScriptsForSchemaCreation())
+ foreach (var batch in batches)
{
- string[] batches = script.Split(new[] { "GO\r\n", "GO\t", "GO\n" }, System.StringSplitOptions.RemoveEmptyEntries);
+ var replacedBatch = batch.Replace("@dbname", databaseName);
- foreach (var batch in batches)
+ using (var queryCommand = new SqlCommand(replacedBatch))
{
- string replacedBatch = batch.Replace("@dbname", databaseName);
-
- using (SqlCommand queryCommand = new SqlCommand(replacedBatch))
- {
- queryCommand.Connection = openCon;
+ queryCommand.Connection = openCon;
- await queryCommand.ExecuteNonQueryAsync().ConfigureAwait(false);
- }
+ await queryCommand.ExecuteNonQueryAsync().ConfigureAwait(false);
}
}
}
}
+ }
- private static IEnumerable GetScriptsForSchemaCreation()
- {
- Assembly sqlServerAssembly = Assembly.LoadFrom("KafkaFlow.Retry.SqlServer.dll");
- return sqlServerAssembly
- .GetManifestResourceNames()
- .OrderBy(x => x)
- .Select(script =>
+ private static IEnumerable GetScriptsForSchemaCreation()
+ {
+ var sqlServerAssembly = Assembly.LoadFrom("KafkaFlow.Retry.SqlServer.dll");
+ return sqlServerAssembly
+ .GetManifestResourceNames()
+ .OrderBy(x => x)
+ .Select(script =>
+ {
+ using (var s = sqlServerAssembly.GetManifestResourceStream(script))
{
- using (Stream s = sqlServerAssembly.GetManifestResourceStream(script))
+ using (var sr = new StreamReader(s))
{
- using (StreamReader sr = new StreamReader(s))
- {
- return sr.ReadToEnd();
- }
+ return sr.ReadToEnd();
}
- })
- .ToList();
- }
+ }
+ })
+ .ToList();
}
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.Sample/Exceptions/RetryDurableTestException.cs b/samples/KafkaFlow.Retry.Sample/Exceptions/RetryDurableTestException.cs
index 20d06c0c..50c095b8 100644
--- a/samples/KafkaFlow.Retry.Sample/Exceptions/RetryDurableTestException.cs
+++ b/samples/KafkaFlow.Retry.Sample/Exceptions/RetryDurableTestException.cs
@@ -1,10 +1,10 @@
-namespace KafkaFlow.Retry.Sample.Exceptions
-{
- using System;
+using System;
+
+namespace KafkaFlow.Retry.Sample.Exceptions;
- public class RetryDurableTestException : Exception
+public class RetryDurableTestException : Exception
+{
+ public RetryDurableTestException(string message) : base(message)
{
- public RetryDurableTestException(string message) : base(message)
- { }
}
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.Sample/Exceptions/RetryForeverTestException.cs b/samples/KafkaFlow.Retry.Sample/Exceptions/RetryForeverTestException.cs
index 63cce1a7..6cf3dc70 100644
--- a/samples/KafkaFlow.Retry.Sample/Exceptions/RetryForeverTestException.cs
+++ b/samples/KafkaFlow.Retry.Sample/Exceptions/RetryForeverTestException.cs
@@ -1,10 +1,9 @@
-namespace KafkaFlow.Retry.Sample.Exceptions
-{
- using System;
+using System;
+
+namespace KafkaFlow.Retry.Sample.Exceptions;
- public class RetrySimpleTestException : Exception
- {
- public RetrySimpleTestException(string message) : base(message)
- { }
- }
+public class RetrySimpleTestException : Exception
+{
+ public RetrySimpleTestException(string message) : base(message)
+ { }
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.Sample/Exceptions/RetrySimpleTestException.cs b/samples/KafkaFlow.Retry.Sample/Exceptions/RetrySimpleTestException.cs
index ab6e7ea3..4182aaf3 100644
--- a/samples/KafkaFlow.Retry.Sample/Exceptions/RetrySimpleTestException.cs
+++ b/samples/KafkaFlow.Retry.Sample/Exceptions/RetrySimpleTestException.cs
@@ -1,10 +1,9 @@
-namespace KafkaFlow.Retry.Sample.Exceptions
-{
- using System;
+using System;
+
+namespace KafkaFlow.Retry.Sample.Exceptions;
- public class RetryForeverTestException : Exception
- {
- public RetryForeverTestException(string message) : base(message)
- { }
- }
+public class RetryForeverTestException : Exception
+{
+ public RetryForeverTestException(string message) : base(message)
+ { }
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.Sample/Handlers/RetryDurableTestHandler.cs b/samples/KafkaFlow.Retry.Sample/Handlers/RetryDurableTestHandler.cs
index c88242f3..c178578a 100644
--- a/samples/KafkaFlow.Retry.Sample/Handlers/RetryDurableTestHandler.cs
+++ b/samples/KafkaFlow.Retry.Sample/Handlers/RetryDurableTestHandler.cs
@@ -1,22 +1,20 @@
-namespace KafkaFlow.Retry.Sample.Handlers
+using System;
+using System.Threading.Tasks;
+using KafkaFlow.Retry.Sample.Exceptions;
+using KafkaFlow.Retry.Sample.Messages;
+
+namespace KafkaFlow.Retry.Sample.Handlers;
+
+internal class RetryDurableTestHandler : IMessageHandler
{
- using System;
- using System.Threading.Tasks;
- using KafkaFlow;
- using KafkaFlow.Retry.Sample.Exceptions;
- using KafkaFlow.Retry.Sample.Messages;
-
- internal class RetryDurableTestHandler : IMessageHandler
+ public Task Handle(IMessageContext context, RetryDurableTestMessage message)
{
- public Task Handle(IMessageContext context, RetryDurableTestMessage message)
- {
- Console.WriteLine(
- "Partition: {0} | Offset: {1} | Message: {2}",
- context.ConsumerContext.Partition,
- context.ConsumerContext.Offset,
- message.Text);
+ Console.WriteLine(
+ "Partition: {0} | Offset: {1} | Message: {2}",
+ context.ConsumerContext.Partition,
+ context.ConsumerContext.Offset,
+ message.Text);
- throw new RetryDurableTestException($"Error: {message.Text}");
- }
+ throw new RetryDurableTestException($"Error: {message.Text}");
}
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.Sample/Handlers/RetryForeverTestHandler.cs b/samples/KafkaFlow.Retry.Sample/Handlers/RetryForeverTestHandler.cs
index 93d6e6ec..a3d06eb6 100644
--- a/samples/KafkaFlow.Retry.Sample/Handlers/RetryForeverTestHandler.cs
+++ b/samples/KafkaFlow.Retry.Sample/Handlers/RetryForeverTestHandler.cs
@@ -1,23 +1,20 @@
-namespace KafkaFlow.Retry.Sample.Handlers
-{
- using System;
- using System.Threading.Tasks;
- using KafkaFlow;
- using KafkaFlow.Retry.Sample.Exceptions;
- using KafkaFlow.Retry.Sample.Messages;
-
+using System;
+using System.Threading.Tasks;
+using KafkaFlow.Retry.Sample.Exceptions;
+using KafkaFlow.Retry.Sample.Messages;
+
+namespace KafkaFlow.Retry.Sample.Handlers;
- internal class RetryForeverTestHandler : IMessageHandler
+internal class RetryForeverTestHandler : IMessageHandler
+{
+ public Task Handle(IMessageContext context, RetryForeverTestMessage message)
{
- public Task Handle(IMessageContext context, RetryForeverTestMessage message)
- {
- Console.WriteLine(
- "Partition: {0} | Offset: {1} | Message: {2}",
- context.ConsumerContext.Partition,
- context.ConsumerContext.Offset,
- message.Text);
+ Console.WriteLine(
+ "Partition: {0} | Offset: {1} | Message: {2}",
+ context.ConsumerContext.Partition,
+ context.ConsumerContext.Offset,
+ message.Text);
- throw new RetryForeverTestException($"Error: {message.Text}");
- }
+ throw new RetryForeverTestException($"Error: {message.Text}");
}
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.Sample/Handlers/RetrySimpleTestHandler.cs b/samples/KafkaFlow.Retry.Sample/Handlers/RetrySimpleTestHandler.cs
index f17c5bff..ed5929df 100644
--- a/samples/KafkaFlow.Retry.Sample/Handlers/RetrySimpleTestHandler.cs
+++ b/samples/KafkaFlow.Retry.Sample/Handlers/RetrySimpleTestHandler.cs
@@ -1,23 +1,20 @@
-namespace KafkaFlow.Retry.Sample.Handlers
-{
- using System;
- using System.Threading.Tasks;
- using KafkaFlow;
- using KafkaFlow.Retry.Sample.Exceptions;
- using KafkaFlow.Retry.Sample.Messages;
-
+using System;
+using System.Threading.Tasks;
+using KafkaFlow.Retry.Sample.Exceptions;
+using KafkaFlow.Retry.Sample.Messages;
+
+namespace KafkaFlow.Retry.Sample.Handlers;
- internal class RetrySimpleTestHandler : IMessageHandler
+internal class RetrySimpleTestHandler : IMessageHandler
+{
+ public Task Handle(IMessageContext context, RetrySimpleTestMessage message)
{
- public Task Handle(IMessageContext context, RetrySimpleTestMessage message)
- {
- Console.WriteLine(
- "Partition: {0} | Offset: {1} | Message: {2}",
- context.ConsumerContext.Partition,
- context.ConsumerContext.Offset,
- message.Text);
+ Console.WriteLine(
+ "Partition: {0} | Offset: {1} | Message: {2}",
+ context.ConsumerContext.Partition,
+ context.ConsumerContext.Offset,
+ message.Text);
- throw new RetrySimpleTestException($"Error: {message.Text}");
- }
+ throw new RetrySimpleTestException($"Error: {message.Text}");
}
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.Sample/Helpers/KafkaClusterConfigurationBuilderHelper.cs b/samples/KafkaFlow.Retry.Sample/Helpers/KafkaClusterConfigurationBuilderHelper.cs
index 2a45fdab..cbdde234 100644
--- a/samples/KafkaFlow.Retry.Sample/Helpers/KafkaClusterConfigurationBuilderHelper.cs
+++ b/samples/KafkaFlow.Retry.Sample/Helpers/KafkaClusterConfigurationBuilderHelper.cs
@@ -1,280 +1,282 @@
-namespace KafkaFlow.Retry.Sample.Helpers
+using System;
+using Confluent.Kafka;
+using KafkaFlow.Configuration;
+using KafkaFlow.Retry.MongoDb;
+using KafkaFlow.Retry.Sample.Exceptions;
+using KafkaFlow.Retry.Sample.Handlers;
+using KafkaFlow.Retry.Sample.Messages;
+using KafkaFlow.Retry.SqlServer;
+using KafkaFlow.Serializer;
+
+namespace KafkaFlow.Retry.Sample.Helpers;
+
+internal static class KafkaClusterConfigurationBuilderHelper
{
- using System;
- using KafkaFlow.Configuration;
- using KafkaFlow.Retry.MongoDb;
- using KafkaFlow.Retry.Sample.Exceptions;
- using KafkaFlow.Retry.Sample.Handlers;
- using KafkaFlow.Retry.Sample.Messages;
- using KafkaFlow.Retry.SqlServer;
- using KafkaFlow.Serializer;
- internal static class KafkaClusterConfigurationBuilderHelper
+ internal static IClusterConfigurationBuilder SetupRetryDurableMongoDb(
+ this IClusterConfigurationBuilder cluster,
+ string mongoDbConnectionString,
+ string mongoDbDatabaseName,
+ string mongoDbRetryQueueCollectionName,
+ string mongoDbRetryQueueItemCollectionName)
{
- internal static IClusterConfigurationBuilder SetupRetryDurableMongoDb(
- this IClusterConfigurationBuilder cluster,
- string mongoDbConnectionString,
- string mongoDbDatabaseName,
- string mongoDbRetryQueueCollectionName,
- string mongoDbRetryQueueItemCollectionName)
- {
- cluster
- .AddProducer(
- "kafka-flow-retry-durable-mongodb-producer",
- producer => producer
- .DefaultTopic("sample-kafka-flow-retry-durable-mongodb-topic")
- .WithCompression(Confluent.Kafka.CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSerializer()
- )
- .WithAcks(Acks.All)
- )
- .AddConsumer(
- consumer => consumer
- .Topic("sample-kafka-flow-retry-durable-mongodb-topic")
- .WithGroupId("sample-consumer-kafka-flow-retry-durable-mongodb")
- .WithName("kafka-flow-retry-durable-mongodb-consumer")
- .WithBufferSize(10)
- .WithWorkersCount(20)
- .WithAutoOffsetReset(AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddDeserializer()
- .RetryDurable(
- configure => configure
- .Handle()
- .WithMessageType(typeof(RetryDurableTestMessage))
- .WithMongoDbDataProvider(
- mongoDbConnectionString,
- mongoDbDatabaseName,
- mongoDbRetryQueueCollectionName,
- mongoDbRetryQueueItemCollectionName)
- .WithRetryPlanBeforeRetryDurable(
- configure => configure
- .TryTimes(3)
- .WithTimeBetweenTriesPlan(
- TimeSpan.FromMilliseconds(250),
- TimeSpan.FromMilliseconds(500),
- TimeSpan.FromMilliseconds(1000))
- .ShouldPauseConsumer(false)
- )
- .WithEmbeddedRetryCluster(
- cluster,
- configure => configure
- .WithRetryTopicName("sample-kafka-flow-retry-durable-mongodb-topic-retry")
- .WithRetryConsumerBufferSize(4)
- .WithRetryConsumerWorkersCount(2)
- .WithRetryConsumerStrategy(RetryConsumerStrategy.GuaranteeOrderedConsumption)
- .WithRetryTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler()
- )
- .Enabled(true)
- )
- .WithPollingJobsConfiguration(
- configure => configure
- .WithSchedulerId("retry-durable-mongodb-polling-id")
- .WithRetryDurablePollingConfiguration(
- configure => configure
- .WithCronExpression("0 0/1 * 1/1 * ? *")
- .WithExpirationIntervalFactor(1)
- .WithFetchSize(10)
- .Enabled(true)
- )
- .WithCleanupPollingConfiguration(
- configure => configure
- .WithCronExpression("0 0 * 1/1 * ? *")
- .WithRowsPerRequest(1048)
- .WithTimeToLiveInDays(60)
- .Enabled(true)
- )
- ))
- .AddTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler())
- )
- );
+ cluster
+ .AddProducer(
+ "kafka-flow-retry-durable-mongodb-producer",
+ producer => producer
+ .DefaultTopic("sample-kafka-flow-retry-durable-mongodb-topic")
+ .WithCompression(CompressionType.Gzip)
+ .AddMiddlewares(
+ middlewares => middlewares
+ .AddSerializer()
+ )
+ .WithAcks(Acks.All)
+ )
+ .AddConsumer(
+ consumer => consumer
+ .Topic("sample-kafka-flow-retry-durable-mongodb-topic")
+ .WithGroupId("sample-consumer-kafka-flow-retry-durable-mongodb")
+ .WithName("kafka-flow-retry-durable-mongodb-consumer")
+ .WithBufferSize(10)
+ .WithWorkersCount(20)
+ .WithAutoOffsetReset(AutoOffsetReset.Latest)
+ .AddMiddlewares(
+ middlewares => middlewares
+ .AddDeserializer()
+ .RetryDurable(
+ configure => configure
+ .Handle()
+ .WithMessageType(typeof(RetryDurableTestMessage))
+ .WithMongoDbDataProvider(
+ mongoDbConnectionString,
+ mongoDbDatabaseName,
+ mongoDbRetryQueueCollectionName,
+ mongoDbRetryQueueItemCollectionName)
+ .WithRetryPlanBeforeRetryDurable(
+ configure => configure
+ .TryTimes(3)
+ .WithTimeBetweenTriesPlan(
+ TimeSpan.FromMilliseconds(250),
+ TimeSpan.FromMilliseconds(500),
+ TimeSpan.FromMilliseconds(1000))
+ .ShouldPauseConsumer(false)
+ )
+ .WithEmbeddedRetryCluster(
+ cluster,
+ configure => configure
+ .WithRetryTopicName("sample-kafka-flow-retry-durable-mongodb-topic-retry")
+ .WithRetryConsumerBufferSize(4)
+ .WithRetryConsumerWorkersCount(2)
+ .WithRetryConsumerStrategy(
+ RetryConsumerStrategy.GuaranteeOrderedConsumption)
+ .WithRetryTypedHandlers(
+ handlers => handlers
+ .WithHandlerLifetime(InstanceLifetime.Transient)
+ .AddHandler()
+ )
+ .Enabled(true)
+ )
+ .WithPollingJobsConfiguration(
+ configure => configure
+ .WithSchedulerId("retry-durable-mongodb-polling-id")
+ .WithRetryDurablePollingConfiguration(
+ configure => configure
+ .WithCronExpression("0 0/1 * 1/1 * ? *")
+ .WithExpirationIntervalFactor(1)
+ .WithFetchSize(10)
+ .Enabled(true)
+ )
+ .WithCleanupPollingConfiguration(
+ configure => configure
+ .WithCronExpression("0 0 * 1/1 * ? *")
+ .WithRowsPerRequest(1048)
+ .WithTimeToLiveInDays(60)
+ .Enabled(true)
+ )
+ ))
+ .AddTypedHandlers(
+ handlers => handlers
+ .WithHandlerLifetime(InstanceLifetime.Transient)
+ .AddHandler())
+ )
+ );
- return cluster;
- }
+ return cluster;
+ }
- internal static IClusterConfigurationBuilder SetupRetryDurableSqlServer(
- this IClusterConfigurationBuilder cluster,
- string sqlServerConnectionString,
- string sqlServerDatabaseName)
- {
- cluster
- .AddProducer(
- "kafka-flow-retry-durable-sqlserver-producer",
- producer => producer
- .DefaultTopic("sample-kafka-flow-retry-durable-sqlserver-topic")
- .WithCompression(Confluent.Kafka.CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSerializer()
- )
- .WithAcks(Acks.All)
- )
- .AddConsumer(
- consumer => consumer
- .Topic("sample-kafka-flow-retry-durable-sqlserver-topic")
- .WithGroupId("sample-consumer-kafka-flow-retry-durable-sqlserver")
- .WithName("kafka-flow-retry-durable-sqlserver-consumer")
- .WithBufferSize(10)
- .WithWorkersCount(20)
- .WithAutoOffsetReset(AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddDeserializer()
- .RetryDurable(
- configure => configure
- .Handle()
- .WithMessageType(typeof(RetryDurableTestMessage))
- .WithSqlServerDataProvider(
- sqlServerConnectionString,
- sqlServerDatabaseName)
- .WithRetryPlanBeforeRetryDurable(
- configure => configure
- .TryTimes(3)
- .WithTimeBetweenTriesPlan(
- TimeSpan.FromMilliseconds(250),
- TimeSpan.FromMilliseconds(500),
- TimeSpan.FromMilliseconds(1000))
- .ShouldPauseConsumer(false)
- )
- .WithEmbeddedRetryCluster(
- cluster,
- configure => configure
- .WithRetryTopicName("sample-kafka-flow-retry-durable-sqlserver-topic-retry")
- .WithRetryConsumerBufferSize(4)
- .WithRetryConsumerWorkersCount(2)
- .WithRetryConsumerStrategy(RetryConsumerStrategy.LatestConsumption)
- .WithRetryTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler()
- )
- .Enabled(true)
- )
- .WithPollingJobsConfiguration(
- configure => configure
- .WithSchedulerId("retry-durable-sqlserver-polling-id")
- .WithRetryDurablePollingConfiguration(
- configure => configure
- .WithCronExpression("0 0/1 * 1/1 * ? *")
- .WithExpirationIntervalFactor(1)
- .WithFetchSize(10)
- .Enabled(true)
- )
- .WithCleanupPollingConfiguration(
- configure => configure
- .Enabled(false)
- )
- ))
- .AddTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler())
- )
- );
+ internal static IClusterConfigurationBuilder SetupRetryDurableSqlServer(
+ this IClusterConfigurationBuilder cluster,
+ string sqlServerConnectionString,
+ string sqlServerDatabaseName)
+ {
+ cluster
+ .AddProducer(
+ "kafka-flow-retry-durable-sqlserver-producer",
+ producer => producer
+ .DefaultTopic("sample-kafka-flow-retry-durable-sqlserver-topic")
+ .WithCompression(CompressionType.Gzip)
+ .AddMiddlewares(
+ middlewares => middlewares
+ .AddSerializer()
+ )
+ .WithAcks(Acks.All)
+ )
+ .AddConsumer(
+ consumer => consumer
+ .Topic("sample-kafka-flow-retry-durable-sqlserver-topic")
+ .WithGroupId("sample-consumer-kafka-flow-retry-durable-sqlserver")
+ .WithName("kafka-flow-retry-durable-sqlserver-consumer")
+ .WithBufferSize(10)
+ .WithWorkersCount(20)
+ .WithAutoOffsetReset(AutoOffsetReset.Latest)
+ .AddMiddlewares(
+ middlewares => middlewares
+ .AddDeserializer()
+ .RetryDurable(
+ configure => configure
+ .Handle()
+ .WithMessageType(typeof(RetryDurableTestMessage))
+ .WithSqlServerDataProvider(
+ sqlServerConnectionString,
+ sqlServerDatabaseName)
+ .WithRetryPlanBeforeRetryDurable(
+ configure => configure
+ .TryTimes(3)
+ .WithTimeBetweenTriesPlan(
+ TimeSpan.FromMilliseconds(250),
+ TimeSpan.FromMilliseconds(500),
+ TimeSpan.FromMilliseconds(1000))
+ .ShouldPauseConsumer(false)
+ )
+ .WithEmbeddedRetryCluster(
+ cluster,
+ configure => configure
+ .WithRetryTopicName("sample-kafka-flow-retry-durable-sqlserver-topic-retry")
+ .WithRetryConsumerBufferSize(4)
+ .WithRetryConsumerWorkersCount(2)
+ .WithRetryConsumerStrategy(RetryConsumerStrategy.LatestConsumption)
+ .WithRetryTypedHandlers(
+ handlers => handlers
+ .WithHandlerLifetime(InstanceLifetime.Transient)
+ .AddHandler()
+ )
+ .Enabled(true)
+ )
+ .WithPollingJobsConfiguration(
+ configure => configure
+ .WithSchedulerId("retry-durable-sqlserver-polling-id")
+ .WithRetryDurablePollingConfiguration(
+ configure => configure
+ .WithCronExpression("0 0/1 * 1/1 * ? *")
+ .WithExpirationIntervalFactor(1)
+ .WithFetchSize(10)
+ .Enabled(true)
+ )
+ .WithCleanupPollingConfiguration(
+ configure => configure
+ .Enabled(false)
+ )
+ ))
+ .AddTypedHandlers(
+ handlers => handlers
+ .WithHandlerLifetime(InstanceLifetime.Transient)
+ .AddHandler())
+ )
+ );
- return cluster;
- }
+ return cluster;
+ }
- internal static IClusterConfigurationBuilder SetupRetryForever(this IClusterConfigurationBuilder cluster)
- {
- cluster
- .AddProducer(
- "kafka-flow-retry-forever-producer",
- producer => producer
- .DefaultTopic("sample-kafka-flow-retry-forever-topic")
- .WithCompression(Confluent.Kafka.CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSerializer()
- )
- .WithAcks(Acks.All)
- )
- .AddConsumer(
- consumer => consumer
- .Topic("sample-kafka-flow-retry-forever-topic")
- .WithGroupId("sample-consumer-kafka-flow-retry-forever")
- .WithName("kafka-flow-retry-forever-consumer")
- .WithBufferSize(10)
- .WithWorkersCount(20)
- .WithAutoOffsetReset(AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddDeserializer()
- .RetryForever(
- (configure) => configure
- .Handle()
- .WithTimeBetweenTriesPlan(
- TimeSpan.FromMilliseconds(500),
- TimeSpan.FromMilliseconds(1000))
- )
- .AddTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler())
- )
- );
+ internal static IClusterConfigurationBuilder SetupRetryForever(this IClusterConfigurationBuilder cluster)
+ {
+ cluster
+ .AddProducer(
+ "kafka-flow-retry-forever-producer",
+ producer => producer
+ .DefaultTopic("sample-kafka-flow-retry-forever-topic")
+ .WithCompression(CompressionType.Gzip)
+ .AddMiddlewares(
+ middlewares => middlewares
+ .AddSerializer()
+ )
+ .WithAcks(Acks.All)
+ )
+ .AddConsumer(
+ consumer => consumer
+ .Topic("sample-kafka-flow-retry-forever-topic")
+ .WithGroupId("sample-consumer-kafka-flow-retry-forever")
+ .WithName("kafka-flow-retry-forever-consumer")
+ .WithBufferSize(10)
+ .WithWorkersCount(20)
+ .WithAutoOffsetReset(AutoOffsetReset.Latest)
+ .AddMiddlewares(
+ middlewares => middlewares
+ .AddDeserializer()
+ .RetryForever(
+ configure => configure
+ .Handle()
+ .WithTimeBetweenTriesPlan(
+ TimeSpan.FromMilliseconds(500),
+ TimeSpan.FromMilliseconds(1000))
+ )
+ .AddTypedHandlers(
+ handlers => handlers
+ .WithHandlerLifetime(InstanceLifetime.Transient)
+ .AddHandler())
+ )
+ );
- return cluster;
- }
+ return cluster;
+ }
- internal static IClusterConfigurationBuilder SetupRetrySimple(this IClusterConfigurationBuilder cluster)
- {
- cluster
- .AddProducer(
- "kafka-flow-retry-simple-producer",
- producer => producer
- .DefaultTopic("sample-kafka-flow-retry-simple-topic")
- .WithCompression(Confluent.Kafka.CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSerializer()
- )
- .WithAcks(Acks.All)
- )
- .AddConsumer(
- consumer => consumer
- .Topic("sample-kafka-flow-retry-simple-topic")
- .WithGroupId("sample-consumer-kafka-flow-retry-simple")
- .WithName("kafka-flow-retry-simple-consumer")
- .WithBufferSize(10)
- .WithWorkersCount(20)
- .WithAutoOffsetReset(AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddDeserializer()
- .RetrySimple(
- (configure) => configure
- .Handle()
- .TryTimes(2)
- .WithTimeBetweenTriesPlan((retryCount) =>
+ internal static IClusterConfigurationBuilder SetupRetrySimple(this IClusterConfigurationBuilder cluster)
+ {
+ cluster
+ .AddProducer(
+ "kafka-flow-retry-simple-producer",
+ producer => producer
+ .DefaultTopic("sample-kafka-flow-retry-simple-topic")
+ .WithCompression(CompressionType.Gzip)
+ .AddMiddlewares(
+ middlewares => middlewares
+ .AddSerializer()
+ )
+ .WithAcks(Acks.All)
+ )
+ .AddConsumer(
+ consumer => consumer
+ .Topic("sample-kafka-flow-retry-simple-topic")
+ .WithGroupId("sample-consumer-kafka-flow-retry-simple")
+ .WithName("kafka-flow-retry-simple-consumer")
+ .WithBufferSize(10)
+ .WithWorkersCount(20)
+ .WithAutoOffsetReset(AutoOffsetReset.Latest)
+ .AddMiddlewares(
+ middlewares => middlewares
+ .AddDeserializer()
+ .RetrySimple(
+ configure => configure
+ .Handle()
+ .TryTimes(2)
+ .WithTimeBetweenTriesPlan(retryCount =>
+ {
+ var plan = new[]
{
- var plan = new[]
- {
TimeSpan.FromMilliseconds(1500),
TimeSpan.FromMilliseconds(2000),
TimeSpan.FromMilliseconds(2000)
- };
+ };
- return plan[retryCount];
- })
- .ShouldPauseConsumer(false)
- )
- .AddTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler())
- )
- );
+ return plan[retryCount];
+ })
+ .ShouldPauseConsumer(false)
+ )
+ .AddTypedHandlers(
+ handlers => handlers
+ .WithHandlerLifetime(InstanceLifetime.Transient)
+ .AddHandler())
+ )
+ );
- return cluster;
- }
+ return cluster;
}
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.Sample/Messages/RetryDurableTestMessage.cs b/samples/KafkaFlow.Retry.Sample/Messages/RetryDurableTestMessage.cs
index 6c20b3a6..23069989 100644
--- a/samples/KafkaFlow.Retry.Sample/Messages/RetryDurableTestMessage.cs
+++ b/samples/KafkaFlow.Retry.Sample/Messages/RetryDurableTestMessage.cs
@@ -1,11 +1,9 @@
-namespace KafkaFlow.Retry.Sample.Messages
-{
- using System.Runtime.Serialization;
+using System.Runtime.Serialization;
+
+namespace KafkaFlow.Retry.Sample.Messages;
- [DataContract]
- public class RetryDurableTestMessage
- {
- [DataMember(Order = 1)]
- public string Text { get; set; }
- }
+[DataContract]
+public class RetryDurableTestMessage
+{
+ [DataMember(Order = 1)] public string Text { get; set; }
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.Sample/Messages/RetryForeverTestMessage.cs b/samples/KafkaFlow.Retry.Sample/Messages/RetryForeverTestMessage.cs
index 70c3e06e..705c454d 100644
--- a/samples/KafkaFlow.Retry.Sample/Messages/RetryForeverTestMessage.cs
+++ b/samples/KafkaFlow.Retry.Sample/Messages/RetryForeverTestMessage.cs
@@ -1,11 +1,9 @@
-namespace KafkaFlow.Retry.Sample.Messages
-{
- using System.Runtime.Serialization;
+using System.Runtime.Serialization;
+
+namespace KafkaFlow.Retry.Sample.Messages;
- [DataContract]
- public class RetryForeverTestMessage
- {
- [DataMember(Order = 1)]
- public string Text { get; set; }
- }
+[DataContract]
+public class RetryForeverTestMessage
+{
+ [DataMember(Order = 1)] public string Text { get; set; }
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.Sample/Messages/RetrySimpleTestMessage.cs b/samples/KafkaFlow.Retry.Sample/Messages/RetrySimpleTestMessage.cs
index c5a6491f..d2cdeb16 100644
--- a/samples/KafkaFlow.Retry.Sample/Messages/RetrySimpleTestMessage.cs
+++ b/samples/KafkaFlow.Retry.Sample/Messages/RetrySimpleTestMessage.cs
@@ -1,11 +1,9 @@
-namespace KafkaFlow.Retry.Sample.Messages
-{
- using System.Runtime.Serialization;
+using System.Runtime.Serialization;
+
+namespace KafkaFlow.Retry.Sample.Messages;
- [DataContract]
- public class RetrySimpleTestMessage
- {
- [DataMember(Order = 1)]
- public string Text { get; set; }
- }
+[DataContract]
+public class RetrySimpleTestMessage
+{
+ [DataMember(Order = 1)] public string Text { get; set; }
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.Sample/Program.cs b/samples/KafkaFlow.Retry.Sample/Program.cs
index e11d62a0..d97cfec8 100644
--- a/samples/KafkaFlow.Retry.Sample/Program.cs
+++ b/samples/KafkaFlow.Retry.Sample/Program.cs
@@ -1,192 +1,192 @@
-namespace KafkaFlow.Retry.Sample
-{
- using System;
- using System.Globalization;
- using System.Linq;
- using System.Threading;
- using System.Threading.Tasks;
- using global::Microsoft.Extensions.DependencyInjection;
- using KafkaFlow;
- using KafkaFlow.Producers;
- using KafkaFlow.Retry.Common.Sample.Helpers;
- using KafkaFlow.Retry.Sample.Helpers;
- using KafkaFlow.Retry.Sample.Messages;
-
- internal static class Program
- {
- private static async Task Main()
- {
- var services = new ServiceCollection();
- var brokers = "localhost:9092";
- var mongoDbConnectionString = "mongodb://localhost:27017";
- var mongoDbDatabaseName = "kafka_flow_retry_durable_sample";
- var mongoDbRetryQueueCollectionName = "RetryQueues";
- var mongoDbRetryQueueItemCollectionName = "RetryQueueItems";
- var sqlServerConnectionString = "Server=localhost;Trusted_Connection=True; Pooling=true; Min Pool Size=1; Max Pool Size=100; MultipleActiveResultSets=true; Application Name=KafkaFlow Retry Sample";
- var sqlServerDatabaseName = "kafka_flow_retry_durable_sample";
- var topics = new[]
+using System;
+using System.Globalization;
+using System.Linq;
+using System.Threading;
+using System.Threading.Tasks;
+using KafkaFlow.Producers;
+using KafkaFlow.Retry.Common.Sample.Helpers;
+using KafkaFlow.Retry.Sample.Helpers;
+using KafkaFlow.Retry.Sample.Messages;
+using Microsoft.Extensions.DependencyInjection;
+
+namespace KafkaFlow.Retry.Sample;
+
+internal static class Program
+{
+ private static async Task Main()
+ {
+ var services = new ServiceCollection();
+ var brokers = "localhost:9092";
+ var mongoDbConnectionString = "mongodb://localhost:27017";
+ var mongoDbDatabaseName = "kafka_flow_retry_durable_sample";
+ var mongoDbRetryQueueCollectionName = "RetryQueues";
+ var mongoDbRetryQueueItemCollectionName = "RetryQueueItems";
+ var sqlServerConnectionString =
+ "Server=localhost;Trusted_Connection=True; Pooling=true; Min Pool Size=1; Max Pool Size=100; MultipleActiveResultSets=true; Application Name=KafkaFlow Retry Sample";
+ var sqlServerDatabaseName = "kafka_flow_retry_durable_sample";
+ var topics = new[]
+ {
+ "sample-kafka-flow-retry-simple-topic",
+ "sample-kafka-flow-retry-forever-topic",
+ "sample-kafka-flow-retry-durable-sqlserver-topic",
+ "sample-kafka-flow-retry-durable-sqlserver-topic-retry",
+ "sample-kafka-flow-retry-durable-mongodb-topic",
+ "sample-kafka-flow-retry-durable-mongodb-topic-retry"
+ };
+
+ SqlServerHelper.RecreateSqlSchema(sqlServerDatabaseName, sqlServerConnectionString).GetAwaiter().GetResult();
+ KafkaHelper.CreateKafkaTopics(brokers, topics).GetAwaiter().GetResult();
+
+ services.AddKafka(
+ kafka => kafka
+ .UseConsoleLog()
+ .AddCluster(
+ cluster => cluster
+ .WithBrokers(new[] { brokers })
+ .EnableAdminMessages("kafka-flow.admin", Guid.NewGuid().ToString())
+ .SetupRetrySimple()
+ .SetupRetryForever()
+ .SetupRetryDurableMongoDb(
+ mongoDbConnectionString,
+ mongoDbDatabaseName,
+ mongoDbRetryQueueCollectionName,
+ mongoDbRetryQueueItemCollectionName)
+ .SetupRetryDurableSqlServer(
+ sqlServerConnectionString,
+ sqlServerDatabaseName)
+ )
+ );
+
+ var provider = services.BuildServiceProvider();
+
+ var bus = provider.CreateKafkaBus();
+
+ await bus.StartAsync();
+
+ // Wait partition assignment
+ Thread.Sleep(10000);
+
+ var producers = provider.GetRequiredService();
+
+ while (true)
+ {
+ Console.Write("retry-simple, retry-forever, retry-durable-mongodb, retry-durable-sqlserver or exit: ");
+ var input = Console.ReadLine().ToLower(CultureInfo.InvariantCulture);
+
+ switch (input)
{
- "sample-kafka-flow-retry-simple-topic",
- "sample-kafka-flow-retry-forever-topic",
- "sample-kafka-flow-retry-durable-sqlserver-topic",
- "sample-kafka-flow-retry-durable-sqlserver-topic-retry",
- "sample-kafka-flow-retry-durable-mongodb-topic",
- "sample-kafka-flow-retry-durable-mongodb-topic-retry",
- };
-
- SqlServerHelper.RecreateSqlSchema(sqlServerDatabaseName, sqlServerConnectionString).GetAwaiter().GetResult();
- KafkaHelper.CreateKafkaTopics(brokers, topics).GetAwaiter().GetResult();
-
- services.AddKafka(
- kafka => kafka
- .UseConsoleLog()
- .AddCluster(
- cluster => cluster
- .WithBrokers(new[] { brokers })
- .EnableAdminMessages("kafka-flow.admin", Guid.NewGuid().ToString())
- .SetupRetrySimple()
- .SetupRetryForever()
- .SetupRetryDurableMongoDb(
- mongoDbConnectionString,
- mongoDbDatabaseName,
- mongoDbRetryQueueCollectionName,
- mongoDbRetryQueueItemCollectionName)
- .SetupRetryDurableSqlServer(
- sqlServerConnectionString,
- sqlServerDatabaseName)
- )
- );
-
- var provider = services.BuildServiceProvider();
-
- var bus = provider.CreateKafkaBus();
-
- await bus.StartAsync();
-
- // Wait partition assignment
- Thread.Sleep(10000);
-
- var producers = provider.GetRequiredService();
-
- while (true)
- {
- Console.Write("retry-simple, retry-forever, retry-durable-mongodb, retry-durable-sqlserver or exit: ");
- var input = Console.ReadLine().ToLower(CultureInfo.InvariantCulture);
-
- switch (input)
- {
- case "retry-durable-mongodb":
- {
- Console.Write("Number of the distinct messages to produce: ");
- int.TryParse(Console.ReadLine(), out var numOfMessages);
- Console.Write("Number of messages with same partition key: ");
- int.TryParse(Console.ReadLine(), out var numOfMessagesWithSamePartitionkey);
- var messages = Enumerable
+ case "retry-durable-mongodb":
+ {
+ Console.Write("Number of the distinct messages to produce: ");
+ int.TryParse(Console.ReadLine(), out var numOfMessages);
+ Console.Write("Number of messages with same partition key: ");
+ int.TryParse(Console.ReadLine(), out var numOfMessagesWithSamePartitionkey);
+ var messages = Enumerable
+ .Range(0, numOfMessages)
+ .SelectMany(
+ x =>
+ {
+ var partitionKey = Guid.NewGuid().ToString();
+ return Enumerable
+ .Range(0, numOfMessagesWithSamePartitionkey)
+ .Select(y => new BatchProduceItem(
+ "sample-kafka-flow-retry-durable-mongodb-topic",
+ partitionKey,
+ new RetryDurableTestMessage { Text = $"Message({y}): {Guid.NewGuid()}" },
+ null))
+ .ToList();
+ }
+ )
+ .ToList();
+
+ await producers["kafka-flow-retry-durable-mongodb-producer"]
+ .BatchProduceAsync(messages)
+ .ConfigureAwait(false);
+ Console.WriteLine("Published");
+ }
+ break;
+
+ case "retry-durable-sqlserver":
+ {
+ Console.Write("Number of the distinct messages to produce: ");
+ int.TryParse(Console.ReadLine(), out var numOfMessages);
+ Console.Write("Number of messages with same partition key: ");
+ int.TryParse(Console.ReadLine(), out var numOfMessagesWithSamePartitionkey);
+
+ var messages = Enumerable
+ .Range(0, numOfMessages)
+ .SelectMany(
+ x =>
+ {
+ var partitionKey = Guid.NewGuid().ToString();
+ return Enumerable
+ .Range(0, numOfMessagesWithSamePartitionkey)
+ .Select(y => new BatchProduceItem(
+ "sample-kafka-flow-retry-durable-sqlserver-topic",
+ partitionKey,
+ new RetryDurableTestMessage { Text = $"Message({y}): {Guid.NewGuid()}" },
+ null))
+ .ToList();
+ }
+ )
+ .ToList();
+
+ await producers["kafka-flow-retry-durable-sqlserver-producer"]
+ .BatchProduceAsync(messages)
+ .ConfigureAwait(false);
+ Console.WriteLine("Published");
+ }
+ break;
+
+ case "retry-forever":
+ {
+ Console.Write("Number of messages to produce: ");
+ int.TryParse(Console.ReadLine(), out var numOfMessages);
+ await producers["kafka-flow-retry-forever-producer"]
+ .BatchProduceAsync(
+ Enumerable
.Range(0, numOfMessages)
- .SelectMany(
- x =>
- {
- var partitionKey = Guid.NewGuid().ToString();
- return Enumerable
- .Range(0, numOfMessagesWithSamePartitionkey)
- .Select(y => new BatchProduceItem(
- "sample-kafka-flow-retry-durable-mongodb-topic",
- partitionKey,
- new RetryDurableTestMessage { Text = $"Message({y}): {Guid.NewGuid()}" },
- null))
- .ToList();
- }
- )
- .ToList();
-
- await producers["kafka-flow-retry-durable-mongodb-producer"]
- .BatchProduceAsync(messages)
- .ConfigureAwait(false);
- Console.WriteLine("Published");
- }
- break;
-
- case "retry-durable-sqlserver":
- {
- Console.Write("Number of the distinct messages to produce: ");
- int.TryParse(Console.ReadLine(), out var numOfMessages);
- Console.Write("Number of messages with same partition key: ");
- int.TryParse(Console.ReadLine(), out var numOfMessagesWithSamePartitionkey);
-
- var messages = Enumerable
+ .Select(
+ x => new BatchProduceItem(
+ "sample-kafka-flow-retry-forever-topic",
+ "partition-key",
+ new RetryForeverTestMessage { Text = $"Message({x}): {Guid.NewGuid()}" },
+ null))
+ .ToList())
+ .ConfigureAwait(false);
+ Console.WriteLine("Published");
+ }
+ break;
+
+ case "retry-simple":
+ {
+ Console.Write("Number of messages to produce:");
+ int.TryParse(Console.ReadLine(), out var numOfMessages);
+ await producers["kafka-flow-retry-simple-producer"]
+ .BatchProduceAsync(
+ Enumerable
.Range(0, numOfMessages)
- .SelectMany(
- x =>
- {
- var partitionKey = Guid.NewGuid().ToString();
- return Enumerable
- .Range(0, numOfMessagesWithSamePartitionkey)
- .Select(y => new BatchProduceItem(
- "sample-kafka-flow-retry-durable-sqlserver-topic",
- partitionKey,
- new RetryDurableTestMessage { Text = $"Message({y}): {Guid.NewGuid()}" },
- null))
- .ToList();
- }
- )
- .ToList();
-
- await producers["kafka-flow-retry-durable-sqlserver-producer"]
- .BatchProduceAsync(messages)
- .ConfigureAwait(false);
- Console.WriteLine("Published");
- }
- break;
-
- case "retry-forever":
- {
- Console.Write("Number of messages to produce: ");
- int.TryParse(Console.ReadLine(), out var num_of_messages);
- await producers["kafka-flow-retry-forever-producer"]
- .BatchProduceAsync(
- Enumerable
- .Range(0, num_of_messages)
- .Select(
- x => new BatchProduceItem(
- "sample-kafka-flow-retry-forever-topic",
- "partition-key",
- new RetryForeverTestMessage { Text = $"Message({x}): {Guid.NewGuid()}" },
- null))
- .ToList())
- .ConfigureAwait(false);
- Console.WriteLine("Published");
- }
- break;
-
- case "retry-simple":
- {
- Console.Write("Number of messages to produce:");
- int.TryParse(Console.ReadLine(), out var num_of_messages);
- await producers["kafka-flow-retry-simple-producer"]
- .BatchProduceAsync(
- Enumerable
- .Range(0, num_of_messages)
- .Select(
- x => new BatchProduceItem(
- "sample-kafka-flow-retry-simple-topic",
- "partition-key",
- new RetrySimpleTestMessage { Text = $"Message({x}): {Guid.NewGuid()}" },
- null))
- .ToList())
- .ConfigureAwait(false);
- Console.WriteLine("Published");
- }
- break;
-
- case "exit":
- await bus.StopAsync();
- break;
-
- default:
- Console.Write("USE: retry-simple, retry-forever, retry-durable-mongodb, retry-durable-sqlserver or exit: ");
- break;
- }
- }
- }
- }
+ .Select(
+ x => new BatchProduceItem(
+ "sample-kafka-flow-retry-simple-topic",
+ "partition-key",
+ new RetrySimpleTestMessage { Text = $"Message({x}): {Guid.NewGuid()}" },
+ null))
+ .ToList())
+ .ConfigureAwait(false);
+ Console.WriteLine("Published");
+ }
+ break;
+
+ case "exit":
+ await bus.StopAsync();
+ break;
+
+ default:
+ Console.Write(
+ "USE: retry-simple, retry-forever, retry-durable-mongodb, retry-durable-sqlserver or exit: ");
+ break;
+ }
+ }
+ }
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.SchemaRegistry.Sample/ContractResolvers/WritablePropertiesOnlyResolver.cs b/samples/KafkaFlow.Retry.SchemaRegistry.Sample/ContractResolvers/WritablePropertiesOnlyResolver.cs
index f878f1c4..dca61a68 100644
--- a/samples/KafkaFlow.Retry.SchemaRegistry.Sample/ContractResolvers/WritablePropertiesOnlyResolver.cs
+++ b/samples/KafkaFlow.Retry.SchemaRegistry.Sample/ContractResolvers/WritablePropertiesOnlyResolver.cs
@@ -1,17 +1,16 @@
-namespace KafkaFlow.Retry.SchemaRegistry.Sample.ContractResolvers
-{
- using System;
- using System.Collections.Generic;
- using System.Linq;
- using Newtonsoft.Json;
- using Newtonsoft.Json.Serialization;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Newtonsoft.Json;
+using Newtonsoft.Json.Serialization;
+
+namespace KafkaFlow.Retry.SchemaRegistry.Sample.ContractResolvers;
- internal class WritablePropertiesOnlyResolver : DefaultContractResolver
+internal class WritablePropertiesOnlyResolver : DefaultContractResolver
+{
+ protected override IList CreateProperties(Type type, MemberSerialization memberSerialization)
{
- protected override IList CreateProperties(Type type, MemberSerialization memberSerialization)
- {
- IList props = base.CreateProperties(type, memberSerialization);
- return props.Where(p => p.Writable).ToList();
- }
+ var props = base.CreateProperties(type, memberSerialization);
+ return props.Where(p => p.Writable).ToList();
}
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Exceptions/RetryDurableTestException.cs b/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Exceptions/RetryDurableTestException.cs
index 98f7ace0..1a2b2cd0 100644
--- a/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Exceptions/RetryDurableTestException.cs
+++ b/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Exceptions/RetryDurableTestException.cs
@@ -1,10 +1,10 @@
-namespace KafkaFlow.Retry.SchemaRegistry.Sample.Exceptions
-{
- using System;
+using System;
+
+namespace KafkaFlow.Retry.SchemaRegistry.Sample.Exceptions;
- public class RetryDurableTestException : Exception
+public class RetryDurableTestException : Exception
+{
+ public RetryDurableTestException(string message) : base(message)
{
- public RetryDurableTestException(string message) : base(message)
- { }
}
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Handlers/AvroMessageTestHandler.cs b/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Handlers/AvroMessageTestHandler.cs
index 076b1443..56b7edb8 100644
--- a/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Handlers/AvroMessageTestHandler.cs
+++ b/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Handlers/AvroMessageTestHandler.cs
@@ -1,21 +1,20 @@
-namespace KafkaFlow.Retry.SchemaRegistry.Sample.Handlers
-{
- using System;
- using System.Threading.Tasks;
- using global::SchemaRegistry;
- using KafkaFlow.Retry.SchemaRegistry.Sample.Exceptions;
+using System;
+using System.Threading.Tasks;
+using KafkaFlow.Retry.SchemaRegistry.Sample.Exceptions;
+using SchemaRegistry;
+
+namespace KafkaFlow.Retry.SchemaRegistry.Sample.Handlers;
- public class AvroMessageTestHandler : IMessageHandler
+public class AvroMessageTestHandler : IMessageHandler
+{
+ public Task Handle(IMessageContext context, AvroLogMessage message)
{
- public Task Handle(IMessageContext context, AvroLogMessage message)
- {
- Console.WriteLine(
- "Partition: {0} | Offset: {1} | Message: {2} | Avro",
- context.ConsumerContext.Partition,
- context.ConsumerContext.Offset,
- message.Severity.ToString());
+ Console.WriteLine(
+ "Partition: {0} | Offset: {1} | Message: {2} | Avro",
+ context.ConsumerContext.Partition,
+ context.ConsumerContext.Offset,
+ message.Severity.ToString());
- throw new RetryDurableTestException($"Error: {message.Severity}");
- }
+ throw new RetryDurableTestException($"Error: {message.Severity}");
}
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Helpers/KafkaClusterConfigurationBuilderHelper.cs b/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Helpers/KafkaClusterConfigurationBuilderHelper.cs
index 5a7d43c6..336e20e0 100644
--- a/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Helpers/KafkaClusterConfigurationBuilderHelper.cs
+++ b/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Helpers/KafkaClusterConfigurationBuilderHelper.cs
@@ -1,107 +1,109 @@
-namespace KafkaFlow.Retry.SchemaRegistry.Sample.Helpers
-{
- using System;
- using Confluent.SchemaRegistry;
- using Confluent.SchemaRegistry.Serdes;
- using global::SchemaRegistry;
- using KafkaFlow.Configuration;
- using KafkaFlow.Retry.MongoDb;
- using KafkaFlow.Retry.SchemaRegistry.Sample.ContractResolvers;
- using KafkaFlow.Retry.SchemaRegistry.Sample.Exceptions;
- using KafkaFlow.Retry.SchemaRegistry.Sample.Handlers;
- using Newtonsoft.Json;
+using System;
+using Confluent.Kafka;
+using Confluent.SchemaRegistry;
+using Confluent.SchemaRegistry.Serdes;
+using KafkaFlow.Configuration;
+using KafkaFlow.Retry.MongoDb;
+using KafkaFlow.Retry.SchemaRegistry.Sample.ContractResolvers;
+using KafkaFlow.Retry.SchemaRegistry.Sample.Exceptions;
+using KafkaFlow.Retry.SchemaRegistry.Sample.Handlers;
+using Newtonsoft.Json;
+using SchemaRegistry;
+
+namespace KafkaFlow.Retry.SchemaRegistry.Sample.Helpers;
- internal static class KafkaClusterConfigurationBuilderHelper
+internal static class KafkaClusterConfigurationBuilderHelper
+{
+ internal static IClusterConfigurationBuilder SetupRetryDurableMongoAvroDb(
+ this IClusterConfigurationBuilder cluster,
+ string mongoDbConnectionString,
+ string mongoDbDatabaseName,
+ string mongoDbRetryQueueCollectionName,
+ string mongoDbRetryQueueItemCollectionName)
{
- internal static IClusterConfigurationBuilder SetupRetryDurableMongoAvroDb(
- this IClusterConfigurationBuilder cluster,
- string mongoDbConnectionString,
- string mongoDbDatabaseName,
- string mongoDbRetryQueueCollectionName,
- string mongoDbRetryQueueItemCollectionName)
- {
- cluster
- .AddProducer(
- "kafka-flow-retry-durable-mongodb-avro-producer",
- producer => producer
- .DefaultTopic("sample-kafka-flow-retry-durable-mongodb-avro-topic")
- .WithCompression(Confluent.Kafka.CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSchemaRegistryAvroSerializer(
- new AvroSerializerConfig
+ cluster
+ .AddProducer(
+ "kafka-flow-retry-durable-mongodb-avro-producer",
+ producer => producer
+ .DefaultTopic("sample-kafka-flow-retry-durable-mongodb-avro-topic")
+ .WithCompression(CompressionType.Gzip)
+ .AddMiddlewares(
+ middlewares => middlewares
+ .AddSchemaRegistryAvroSerializer(
+ new AvroSerializerConfig
+ {
+ SubjectNameStrategy = SubjectNameStrategy.TopicRecord
+ })
+ )
+ .WithAcks(Acks.All)
+ )
+ .AddConsumer(
+ consumer => consumer
+ .Topic("sample-kafka-flow-retry-durable-mongodb-avro-topic")
+ .WithGroupId("sample-consumer-kafka-flow-retry-durable-mongodb-avro")
+ .WithName("kafka-flow-retry-durable-mongodb-avro-consumer")
+ .WithBufferSize(10)
+ .WithWorkersCount(20)
+ .WithAutoOffsetReset(AutoOffsetReset.Latest)
+ .AddMiddlewares(
+ middlewares => middlewares
+ .AddSchemaRegistryAvroDeserializer()
+ .RetryDurable(
+ configure => configure
+ .Handle()
+ .WithMessageType(typeof(AvroLogMessage))
+ .WithMessageSerializeSettings(new JsonSerializerSettings
{
- SubjectNameStrategy = SubjectNameStrategy.TopicRecord
+ ContractResolver = new WritablePropertiesOnlyResolver()
})
- )
- .WithAcks(Acks.All)
- )
- .AddConsumer(
- consumer => consumer
- .Topic("sample-kafka-flow-retry-durable-mongodb-avro-topic")
- .WithGroupId("sample-consumer-kafka-flow-retry-durable-mongodb-avro")
- .WithName("kafka-flow-retry-durable-mongodb-avro-consumer")
- .WithBufferSize(10)
- .WithWorkersCount(20)
- .WithAutoOffsetReset(AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSchemaRegistryAvroDeserializer()
- .RetryDurable(
- configure => configure
- .Handle()
- .WithMessageType(typeof(AvroLogMessage))
- .WithMessageSerializeSettings(new JsonSerializerSettings
- {
- ContractResolver = new WritablePropertiesOnlyResolver()
- })
- .WithMongoDbDataProvider(
- mongoDbConnectionString,
- mongoDbDatabaseName,
- mongoDbRetryQueueCollectionName,
- mongoDbRetryQueueItemCollectionName)
- .WithRetryPlanBeforeRetryDurable(
- configure => configure
- .TryTimes(3)
- .WithTimeBetweenTriesPlan(
- TimeSpan.FromMilliseconds(250),
- TimeSpan.FromMilliseconds(500),
- TimeSpan.FromMilliseconds(1000))
- .ShouldPauseConsumer(false)
- )
- .WithEmbeddedRetryCluster(
- cluster,
- configure => configure
- .WithRetryTopicName("sample-kafka-flow-retry-durable-mongodb-avro-topic-retry")
- .WithRetryConsumerBufferSize(4)
- .WithRetryConsumerWorkersCount(2)
- .WithRetryConsumerStrategy(RetryConsumerStrategy.GuaranteeOrderedConsumption)
- .WithRetryTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler()
- )
- .Enabled(true)
- )
- .WithPollingJobsConfiguration(
- configure => configure
- .WithSchedulerId("retry-durable-mongodb-avro-polling-id")
- .WithRetryDurablePollingConfiguration(
- configure => configure
- .WithCronExpression("0 0/1 * 1/1 * ? *")
- .WithExpirationIntervalFactor(1)
- .WithFetchSize(10)
- .Enabled(true)
- )
- ))
- .AddTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler())
- )
- );
+ .WithMongoDbDataProvider(
+ mongoDbConnectionString,
+ mongoDbDatabaseName,
+ mongoDbRetryQueueCollectionName,
+ mongoDbRetryQueueItemCollectionName)
+ .WithRetryPlanBeforeRetryDurable(
+ configure => configure
+ .TryTimes(3)
+ .WithTimeBetweenTriesPlan(
+ TimeSpan.FromMilliseconds(250),
+ TimeSpan.FromMilliseconds(500),
+ TimeSpan.FromMilliseconds(1000))
+ .ShouldPauseConsumer(false)
+ )
+ .WithEmbeddedRetryCluster(
+ cluster,
+ configure => configure
+ .WithRetryTopicName(
+ "sample-kafka-flow-retry-durable-mongodb-avro-topic-retry")
+ .WithRetryConsumerBufferSize(4)
+ .WithRetryConsumerWorkersCount(2)
+ .WithRetryConsumerStrategy(
+ RetryConsumerStrategy.GuaranteeOrderedConsumption)
+ .WithRetryTypedHandlers(
+ handlers => handlers
+ .WithHandlerLifetime(InstanceLifetime.Transient)
+ .AddHandler()
+ )
+ .Enabled(true)
+ )
+ .WithPollingJobsConfiguration(
+ configure => configure
+ .WithSchedulerId("retry-durable-mongodb-avro-polling-id")
+ .WithRetryDurablePollingConfiguration(
+ configure => configure
+ .WithCronExpression("0 0/1 * 1/1 * ? *")
+ .WithExpirationIntervalFactor(1)
+ .WithFetchSize(10)
+ .Enabled(true)
+ )
+ ))
+ .AddTypedHandlers(
+ handlers => handlers
+ .WithHandlerLifetime(InstanceLifetime.Transient)
+ .AddHandler())
+ )
+ );
- return cluster;
- }
+ return cluster;
}
}
\ No newline at end of file
diff --git a/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Program.cs b/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Program.cs
index d04fcbf9..eb9ea6ef 100644
--- a/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Program.cs
+++ b/samples/KafkaFlow.Retry.SchemaRegistry.Sample/Program.cs
@@ -1,82 +1,81 @@
-namespace KafkaFlow.Retry.SchemaRegistry.Sample
-{
- using System;
- using System.Threading;
- using System.Threading.Tasks;
- using global::SchemaRegistry;
- using KafkaFlow.Producers;
- using KafkaFlow.Retry.Common.Sample.Helpers;
- using KafkaFlow.Retry.SchemaRegistry.Sample.Helpers;
- using Microsoft.Extensions.DependencyInjection;
+using System;
+using System.Threading;
+using System.Threading.Tasks;
+using KafkaFlow.Producers;
+using KafkaFlow.Retry.Common.Sample.Helpers;
+using KafkaFlow.Retry.SchemaRegistry.Sample.Helpers;
+using Microsoft.Extensions.DependencyInjection;
+using SchemaRegistry;
+
+namespace KafkaFlow.Retry.SchemaRegistry.Sample;
- internal class Program
+internal class Program
+{
+ private static async Task Main(string[] args)
{
- private static async Task Main(string[] args)
+ var services = new ServiceCollection();
+ var brokers = "localhost:9092";
+ var mongoDbConnectionString = "mongodb://localhost:27017";
+ var mongoDbDatabaseName = "kafka_flow_retry_durable_sample";
+ var mongoDbRetryQueueCollectionName = "RetryQueues";
+ var mongoDbRetryQueueItemCollectionName = "RetryQueueItems";
+
+ var topics = new[]
{
- var services = new ServiceCollection();
- var brokers = "localhost:9092";
- var mongoDbConnectionString = "mongodb://localhost:27017";
- var mongoDbDatabaseName = "kafka_flow_retry_durable_sample";
- var mongoDbRetryQueueCollectionName = "RetryQueues";
- var mongoDbRetryQueueItemCollectionName = "RetryQueueItems";
+ "sample-kafka-flow-retry-durable-mongodb-avro-topic",
+ "sample-kafka-flow-retry-durable-mongodb-avro-topic-retry"
+ };
- var topics = new[]
- {
- "sample-kafka-flow-retry-durable-mongodb-avro-topic",
- "sample-kafka-flow-retry-durable-mongodb-avro-topic-retry",
- };
+ KafkaHelper.CreateKafkaTopics(brokers, topics).GetAwaiter().GetResult();
- KafkaHelper.CreateKafkaTopics(brokers, topics).GetAwaiter().GetResult();
+ services.AddKafka(
+ kafka => kafka
+ .UseConsoleLog()
+ .AddCluster(
+ cluster => cluster
+ .WithBrokers(new[] { brokers })
+ .WithSchemaRegistry(config => config.Url = "localhost:8085")
+ .SetupRetryDurableMongoAvroDb(
+ mongoDbConnectionString,
+ mongoDbDatabaseName,
+ mongoDbRetryQueueCollectionName,
+ mongoDbRetryQueueItemCollectionName)
+ )
+ );
- services.AddKafka(
- kafka => kafka
- .UseConsoleLog()
- .AddCluster(
- cluster => cluster
- .WithBrokers(new[] { brokers })
- .WithSchemaRegistry(config => config.Url = "localhost:8085")
- .SetupRetryDurableMongoAvroDb(
- mongoDbConnectionString,
- mongoDbDatabaseName,
- mongoDbRetryQueueCollectionName,
- mongoDbRetryQueueItemCollectionName)
- )
- );
+ var provider = services.BuildServiceProvider();
- var provider = services.BuildServiceProvider();
+ var bus = provider.CreateKafkaBus();
- var bus = provider.CreateKafkaBus();
+ await bus.StartAsync();
- await bus.StartAsync();
+ // Wait partition assignment
+ Thread.Sleep(10000);
- // Wait partition assignment
- Thread.Sleep(10000);
+ var producers = provider.GetRequiredService();
- var producers = provider.GetRequiredService();
+ while (true)
+ {
+ Console.WriteLine("Number of messages to produce or exit:");
+ var input = Console.ReadLine().ToLower();
- while (true)
+ switch (input)
{
- Console.WriteLine("Number of messages to produce or exit:");
- var input = Console.ReadLine().ToLower();
-
- switch (input)
- {
- case var _ when int.TryParse(input, out var count):
- for (var i = 0; i < count; i++)
- {
- await Task.WhenAll(
- producers["kafka-flow-retry-durable-mongodb-avro-producer"].ProduceAsync(
- Guid.NewGuid().ToString(),
- new AvroLogMessage { Severity = LogLevel.Info })
- );
- }
+ case var _ when int.TryParse(input, out var count):
+ for (var i = 0; i < count; i++)
+ {
+ await Task.WhenAll(
+ producers["kafka-flow-retry-durable-mongodb-avro-producer"].ProduceAsync(
+ Guid.NewGuid().ToString(),
+ new AvroLogMessage { Severity = LogLevel.Info })
+ );
+ }
- break;
+ break;
- case "exit":
- await bus.StopAsync();
- return;
- }
+ case "exit":
+ await bus.StopAsync();
+ return;
}
}
}
diff --git a/src/KafkaFlow.Retry.API/Adapters/Common/IRetryQueueItemAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/Common/IRetryQueueItemAdapter.cs
index 590a15a1..309105ce 100644
--- a/src/KafkaFlow.Retry.API/Adapters/Common/IRetryQueueItemAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/Common/IRetryQueueItemAdapter.cs
@@ -1,10 +1,9 @@
-namespace KafkaFlow.Retry.API.Adapters.Common
-{
- using KafkaFlow.Retry.API.Dtos.Common;
- using KafkaFlow.Retry.Durable.Repository.Model;
+using KafkaFlow.Retry.API.Dtos.Common;
+using KafkaFlow.Retry.Durable.Repository.Model;
+
+namespace KafkaFlow.Retry.API.Adapters.Common;
- internal interface IRetryQueueItemAdapter
- {
- RetryQueueItemDto Adapt(RetryQueueItem item, string queueGroupKey);
- }
+internal interface IRetryQueueItemAdapter
+{
+ RetryQueueItemDto Adapt(RetryQueueItem item, string queueGroupKey);
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/Common/IRetryQueueItemStatusDtoAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/Common/IRetryQueueItemStatusDtoAdapter.cs
index a412c13b..ab030096 100644
--- a/src/KafkaFlow.Retry.API/Adapters/Common/IRetryQueueItemStatusDtoAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/Common/IRetryQueueItemStatusDtoAdapter.cs
@@ -1,10 +1,9 @@
-namespace KafkaFlow.Retry.API.Adapters.Common
-{
- using KafkaFlow.Retry.API.Dtos.Common;
- using KafkaFlow.Retry.Durable.Repository.Model;
+using KafkaFlow.Retry.API.Dtos.Common;
+using KafkaFlow.Retry.Durable.Repository.Model;
+
+namespace KafkaFlow.Retry.API.Adapters.Common;
- internal interface IRetryQueueItemStatusDtoAdapter
- {
- RetryQueueItemStatus Adapt(RetryQueueItemStatusDto dto);
- }
+internal interface IRetryQueueItemStatusDtoAdapter
+{
+ RetryQueueItemStatus Adapt(RetryQueueItemStatusDto dto);
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/Common/Parsers/EnumParser.cs b/src/KafkaFlow.Retry.API/Adapters/Common/Parsers/EnumParser.cs
index 2cb2bfcf..c3cd0894 100644
--- a/src/KafkaFlow.Retry.API/Adapters/Common/Parsers/EnumParser.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/Common/Parsers/EnumParser.cs
@@ -1,33 +1,32 @@
-namespace KafkaFlow.Retry.API.Adapters.Common.Parsers
-{
- using System;
- using System.Collections.Generic;
- using System.Linq;
- using Dawn;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Dawn;
+
+namespace KafkaFlow.Retry.API.Adapters.Common.Parsers;
- internal class EnumParser : IQueryParametersParser where T : struct
+internal class EnumParser : IQueryParametersParser where T : struct
+{
+ public IEnumerable Parse(IEnumerable parameters, IEnumerable defaultValue)
{
- public IEnumerable Parse(IEnumerable parameters, IEnumerable defaultValue)
- {
- Guard.Argument(parameters, nameof(parameters)).NotNull();
- Guard.Argument(defaultValue, nameof(defaultValue)).NotNull();
+ Guard.Argument(parameters, nameof(parameters)).NotNull();
+ Guard.Argument(defaultValue, nameof(defaultValue)).NotNull();
- var items = new List();
+ var items = new List();
- if (parameters.Any())
+ if (parameters.Any())
+ {
+ foreach (var param in parameters)
{
- foreach (var param in parameters)
+ if (Enum.TryParse(param, out var item))
{
- if (Enum.TryParse(param, out var item))
- {
- items.Add(item);
- }
+ items.Add(item);
}
-
- return items;
}
- return defaultValue;
+ return items;
}
+
+ return defaultValue;
}
-}
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/Common/Parsers/IQueryParametersParser.cs b/src/KafkaFlow.Retry.API/Adapters/Common/Parsers/IQueryParametersParser.cs
index bc0b5fd8..8be9c588 100644
--- a/src/KafkaFlow.Retry.API/Adapters/Common/Parsers/IQueryParametersParser.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/Common/Parsers/IQueryParametersParser.cs
@@ -1,9 +1,8 @@
-namespace KafkaFlow.Retry.API.Adapters.Common.Parsers
-{
- using System.Collections.Generic;
+using System.Collections.Generic;
+
+namespace KafkaFlow.Retry.API.Adapters.Common.Parsers;
- internal interface IQueryParametersParser where T : struct
- {
- IEnumerable Parse(IEnumerable parameters, IEnumerable defaultValue);
- }
-}
+internal interface IQueryParametersParser where T : struct
+{
+ IEnumerable Parse(IEnumerable parameters, IEnumerable defaultValue);
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/Common/RetryQueueItemAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/Common/RetryQueueItemAdapter.cs
index b1df14a1..7ec988b8 100644
--- a/src/KafkaFlow.Retry.API/Adapters/Common/RetryQueueItemAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/Common/RetryQueueItemAdapter.cs
@@ -1,36 +1,35 @@
-namespace KafkaFlow.Retry.API.Adapters.Common
-{
- using Dawn;
- using KafkaFlow.Retry.API.Dtos.Common;
- using KafkaFlow.Retry.Durable.Repository.Model;
+using Dawn;
+using KafkaFlow.Retry.API.Dtos.Common;
+using KafkaFlow.Retry.Durable.Repository.Model;
+
+namespace KafkaFlow.Retry.API.Adapters.Common;
- internal class RetryQueueItemAdapter : IRetryQueueItemAdapter
+internal class RetryQueueItemAdapter : IRetryQueueItemAdapter
+{
+ public RetryQueueItemDto Adapt(RetryQueueItem item, string queueGroupKey)
{
- public RetryQueueItemDto Adapt(RetryQueueItem item, string queueGroupKey)
- {
- Guard.Argument(item, nameof(item)).NotNull();
- Guard.Argument(item.Message, nameof(item.Message)).NotNull();
+ Guard.Argument(item, nameof(item)).NotNull();
+ Guard.Argument(item.Message, nameof(item.Message)).NotNull();
- return new RetryQueueItemDto()
+ return new RetryQueueItemDto
+ {
+ Id = item.Id,
+ Status = item.Status,
+ SeverityLevel = item.SeverityLevel,
+ AttemptsCount = item.AttemptsCount,
+ CreationDate = item.CreationDate,
+ LastExecution = item.LastExecution,
+ Sort = item.Sort,
+ MessageInfo = new RetryQueuetItemMessageInfoDto
{
- Id = item.Id,
- Status = item.Status,
- SeverityLevel = item.SeverityLevel,
- AttemptsCount = item.AttemptsCount,
- CreationDate = item.CreationDate,
- LastExecution = item.LastExecution,
- Sort = item.Sort,
- MessageInfo = new RetryQueuetItemMessageInfoDto()
- {
- Key = item.Message.Key,
- Offset = item.Message.Offset,
- Partition = item.Message.Partition,
- Topic = item.Message.TopicName,
- UtcTimeStamp = item.Message.UtcTimeStamp
- },
- Description = item.Description,
- QueueGroupKey = queueGroupKey
- };
- }
+ Key = item.Message.Key,
+ Offset = item.Message.Offset,
+ Partition = item.Message.Partition,
+ Topic = item.Message.TopicName,
+ UtcTimeStamp = item.Message.UtcTimeStamp
+ },
+ Description = item.Description,
+ QueueGroupKey = queueGroupKey
+ };
}
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/Common/RetryQueueItemStatusDtoAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/Common/RetryQueueItemStatusDtoAdapter.cs
index 78562175..c524f830 100644
--- a/src/KafkaFlow.Retry.API/Adapters/Common/RetryQueueItemStatusDtoAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/Common/RetryQueueItemStatusDtoAdapter.cs
@@ -1,12 +1,12 @@
-namespace KafkaFlow.Retry.API.Adapters.Common
-{
- using KafkaFlow.Retry.API.Dtos.Common;
- using KafkaFlow.Retry.Durable.Repository.Model;
+using KafkaFlow.Retry.API.Dtos.Common;
+using KafkaFlow.Retry.Durable.Repository.Model;
+
+namespace KafkaFlow.Retry.API.Adapters.Common;
- internal class RetryQueueItemStatusDtoAdapter : IRetryQueueItemStatusDtoAdapter
+internal class RetryQueueItemStatusDtoAdapter : IRetryQueueItemStatusDtoAdapter
+{
+ public RetryQueueItemStatus Adapt(RetryQueueItemStatusDto dto)
{
- public RetryQueueItemStatus Adapt(RetryQueueItemStatusDto dto)
- {
switch (dto)
{
case RetryQueueItemStatusDto.Waiting:
@@ -25,5 +25,4 @@ public RetryQueueItemStatus Adapt(RetryQueueItemStatusDto dto)
return RetryQueueItemStatus.None;
}
}
- }
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/GetItems/GetItemsInputAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/GetItems/GetItemsInputAdapter.cs
index 1fbe1d4e..60c9187c 100644
--- a/src/KafkaFlow.Retry.API/Adapters/GetItems/GetItemsInputAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/GetItems/GetItemsInputAdapter.cs
@@ -1,23 +1,22 @@
-namespace KafkaFlow.Retry.API.Adapters.GetItems
+using Dawn;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository.Actions.Read;
+using KafkaFlow.Retry.Durable.Repository.Model;
+
+namespace KafkaFlow.Retry.API.Adapters.GetItems;
+
+internal class GetItemsInputAdapter : IGetItemsInputAdapter
{
- using Dawn;
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository.Actions.Read;
- using KafkaFlow.Retry.Durable.Repository.Model;
+ private readonly GetQueuesSortOption _sortOption = GetQueuesSortOption.ByCreationDateDescending;
- internal class GetItemsInputAdapter : IGetItemsInputAdapter
+ public GetQueuesInput Adapt(GetItemsRequestDto requestDto)
{
- private readonly GetQueuesSortOption sortOption = GetQueuesSortOption.ByCreationDate_Descending;
+ Guard.Argument(requestDto, nameof(requestDto)).NotNull();
- public GetQueuesInput Adapt(GetItemsRequestDto requestDto)
+ return new GetQueuesInput(RetryQueueStatus.Active, requestDto.ItemsStatuses, _sortOption, requestDto.TopQueues)
{
- Guard.Argument(requestDto, nameof(requestDto)).NotNull();
-
- return new GetQueuesInput(RetryQueueStatus.Active, requestDto.ItemsStatuses, this.sortOption, requestDto.TopQueues)
- {
- SeverityLevels = requestDto.SeverityLevels,
- TopItemsByQueue = requestDto.TopItemsByQueue
- };
- }
+ SeverityLevels = requestDto.SeverityLevels,
+ TopItemsByQueue = requestDto.TopItemsByQueue
+ };
}
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/GetItems/GetItemsRequestDtoReader.cs b/src/KafkaFlow.Retry.API/Adapters/GetItems/GetItemsRequestDtoReader.cs
index dcd9e1e8..7e7adc15 100644
--- a/src/KafkaFlow.Retry.API/Adapters/GetItems/GetItemsRequestDtoReader.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/GetItems/GetItemsRequestDtoReader.cs
@@ -1,43 +1,49 @@
-namespace KafkaFlow.Retry.API.Adapters.GetItems
+using System.Collections.Generic;
+using System.Linq;
+using KafkaFlow.Retry.API.Adapters.Common.Parsers;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Common;
+using KafkaFlow.Retry.Durable.Repository.Model;
+using Microsoft.AspNetCore.Http;
+
+namespace KafkaFlow.Retry.API.Adapters.GetItems;
+
+internal class GetItemsRequestDtoReader : IGetItemsRequestDtoReader
{
- using System.Collections.Generic;
- using System.Linq;
- using KafkaFlow.Retry.API.Adapters.Common.Parsers;
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Common;
- using KafkaFlow.Retry.Durable.Repository.Model;
- using Microsoft.AspNetCore.Http;
-
- internal class GetItemsRequestDtoReader : IGetItemsRequestDtoReader
- {
- private const int DefaultTopItemsByQueueValue = 100;
- private const int DefaultTopQueuesValue = 10000;
- private readonly IEnumerable DefaultItemsStatuses = new RetryQueueItemStatus[] { RetryQueueItemStatus.Waiting, RetryQueueItemStatus.InRetry };
- private readonly IEnumerable DefaultSeverityLevels = Enumerable.Empty();
+ private const int DefaultTopItemsByQueueValue = 100;
+ private const int DefaultTopQueuesValue = 10000;
- private readonly EnumParser severitiesParser;
- private readonly EnumParser statusesParser;
+ private readonly IEnumerable _defaultItemsStatuses =
+ new[] { RetryQueueItemStatus.Waiting, RetryQueueItemStatus.InRetry };
- public GetItemsRequestDtoReader()
- {
- this.statusesParser = new EnumParser();
- this.severitiesParser = new EnumParser();
- }
+ private readonly IEnumerable _defaultSeverityLevels = Enumerable.Empty();
+
+ private readonly EnumParser _severitiesParser;
+ private readonly EnumParser _statusesParser;
+
+ public GetItemsRequestDtoReader()
+ {
+ _statusesParser = new EnumParser();
+ _severitiesParser = new EnumParser();
+ }
+
+ public GetItemsRequestDto Read(HttpRequest request)
+ {
+ var statusIds = request.ReadQueryParams("status");
+ var severityIds = request.ReadQueryParams("severitylevel");
+ var topQueues = request.ReadQueryParams("topqueues");
+ var topItemsByQueue = request.ReadQueryParams("topitemsbyqueue");
- public GetItemsRequestDto Read(HttpRequest request)
+ return new GetItemsRequestDto
{
- var statusIds = request.ReadQueryParams("status");
- var severityIds = request.ReadQueryParams("severitylevel");
- var topQueues = request.ReadQueryParams("topqueues");
- var topItemsByQueue = request.ReadQueryParams("topitemsbyqueue");
-
- return new GetItemsRequestDto()
- {
- ItemsStatuses = this.statusesParser.Parse(statusIds, DefaultItemsStatuses),
- SeverityLevels = this.severitiesParser.Parse(severityIds, DefaultSeverityLevels),
- TopQueues = int.TryParse(topQueues.LastOrDefault(), out int parsedTopQueues) ? parsedTopQueues : DefaultTopQueuesValue,
- TopItemsByQueue = int.TryParse(topItemsByQueue.LastOrDefault(), out int parsedTopItemsByQueue) ? parsedTopItemsByQueue : DefaultTopItemsByQueueValue
- };
- }
+ ItemsStatuses = _statusesParser.Parse(statusIds, _defaultItemsStatuses),
+ SeverityLevels = _severitiesParser.Parse(severityIds, _defaultSeverityLevels),
+ TopQueues = int.TryParse(topQueues.LastOrDefault(), out var parsedTopQueues)
+ ? parsedTopQueues
+ : DefaultTopQueuesValue,
+ TopItemsByQueue = int.TryParse(topItemsByQueue.LastOrDefault(), out var parsedTopItemsByQueue)
+ ? parsedTopItemsByQueue
+ : DefaultTopItemsByQueueValue
+ };
}
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/GetItems/GetItemsResponseDtoAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/GetItems/GetItemsResponseDtoAdapter.cs
index d67cd1cd..cf29e7fd 100644
--- a/src/KafkaFlow.Retry.API/Adapters/GetItems/GetItemsResponseDtoAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/GetItems/GetItemsResponseDtoAdapter.cs
@@ -1,37 +1,36 @@
-namespace KafkaFlow.Retry.API.Adapters.GetItems
+using System.Collections.Generic;
+using Dawn;
+using KafkaFlow.Retry.API.Adapters.Common;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.API.Dtos.Common;
+using KafkaFlow.Retry.Durable.Repository.Actions.Read;
+
+namespace KafkaFlow.Retry.API.Adapters.GetItems;
+
+internal class GetItemsResponseDtoAdapter : IGetItemsResponseDtoAdapter
{
- using System.Collections.Generic;
- using Dawn;
- using KafkaFlow.Retry.API.Adapters.Common;
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.API.Dtos.Common;
- using KafkaFlow.Retry.Durable.Repository.Actions.Read;
+ private readonly IRetryQueueItemAdapter _retryQueueItemAdapter;
- internal class GetItemsResponseDtoAdapter : IGetItemsResponseDtoAdapter
+ public GetItemsResponseDtoAdapter()
{
- private readonly IRetryQueueItemAdapter retryQueueItemAdapter;
-
- public GetItemsResponseDtoAdapter()
- {
- this.retryQueueItemAdapter = new RetryQueueItemAdapter();
- }
+ _retryQueueItemAdapter = new RetryQueueItemAdapter();
+ }
- public GetItemsResponseDto Adapt(GetQueuesResult getQueuesResult)
- {
- Guard.Argument(getQueuesResult, nameof(getQueuesResult)).NotNull();
- Guard.Argument(getQueuesResult.RetryQueues, nameof(getQueuesResult.RetryQueues)).NotNull();
+ public GetItemsResponseDto Adapt(GetQueuesResult getQueuesResult)
+ {
+ Guard.Argument(getQueuesResult, nameof(getQueuesResult)).NotNull();
+ Guard.Argument(getQueuesResult.RetryQueues, nameof(getQueuesResult.RetryQueues)).NotNull();
- var itemsDto = new List();
+ var itemsDto = new List();
- foreach (var queue in getQueuesResult.RetryQueues)
+ foreach (var queue in getQueuesResult.RetryQueues)
+ {
+ foreach (var item in queue.Items)
{
- foreach (var item in queue.Items)
- {
- itemsDto.Add(this.retryQueueItemAdapter.Adapt(item, queue.QueueGroupKey));
- }
+ itemsDto.Add(_retryQueueItemAdapter.Adapt(item, queue.QueueGroupKey));
}
-
- return new GetItemsResponseDto(itemsDto);
}
+
+ return new GetItemsResponseDto(itemsDto);
}
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/GetItems/IGetItemsInputAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/GetItems/IGetItemsInputAdapter.cs
index adb80b50..90db7176 100644
--- a/src/KafkaFlow.Retry.API/Adapters/GetItems/IGetItemsInputAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/GetItems/IGetItemsInputAdapter.cs
@@ -1,10 +1,9 @@
-namespace KafkaFlow.Retry.API.Adapters.GetItems
-{
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository.Actions.Read;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository.Actions.Read;
+
+namespace KafkaFlow.Retry.API.Adapters.GetItems;
- public interface IGetItemsInputAdapter
- {
- GetQueuesInput Adapt(GetItemsRequestDto requestDto);
- }
+public interface IGetItemsInputAdapter
+{
+ GetQueuesInput Adapt(GetItemsRequestDto requestDto);
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/GetItems/IGetItemsRequestDtoReader.cs b/src/KafkaFlow.Retry.API/Adapters/GetItems/IGetItemsRequestDtoReader.cs
index a3670372..65f0ffa8 100644
--- a/src/KafkaFlow.Retry.API/Adapters/GetItems/IGetItemsRequestDtoReader.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/GetItems/IGetItemsRequestDtoReader.cs
@@ -1,10 +1,9 @@
-namespace KafkaFlow.Retry.API.Adapters.GetItems
-{
- using KafkaFlow.Retry.API.Dtos;
- using Microsoft.AspNetCore.Http;
+using KafkaFlow.Retry.API.Dtos;
+using Microsoft.AspNetCore.Http;
+
+namespace KafkaFlow.Retry.API.Adapters.GetItems;
- public interface IGetItemsRequestDtoReader
- {
- GetItemsRequestDto Read(HttpRequest request);
- }
-}
+public interface IGetItemsRequestDtoReader
+{
+ GetItemsRequestDto Read(HttpRequest request);
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/GetItems/IGetItemsResponseDtoAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/GetItems/IGetItemsResponseDtoAdapter.cs
index f3f04f44..3f347135 100644
--- a/src/KafkaFlow.Retry.API/Adapters/GetItems/IGetItemsResponseDtoAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/GetItems/IGetItemsResponseDtoAdapter.cs
@@ -1,10 +1,9 @@
-namespace KafkaFlow.Retry.API.Adapters.GetItems
-{
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository.Actions.Read;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository.Actions.Read;
+
+namespace KafkaFlow.Retry.API.Adapters.GetItems;
- public interface IGetItemsResponseDtoAdapter
- {
- GetItemsResponseDto Adapt(GetQueuesResult getQueuesResult);
- }
+public interface IGetItemsResponseDtoAdapter
+{
+ GetItemsResponseDto Adapt(GetQueuesResult getQueuesResult);
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/UpdateItems/IUpdateItemsInputAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/UpdateItems/IUpdateItemsInputAdapter.cs
index 3ae1fa5d..c2afc3f1 100644
--- a/src/KafkaFlow.Retry.API/Adapters/UpdateItems/IUpdateItemsInputAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/UpdateItems/IUpdateItemsInputAdapter.cs
@@ -1,10 +1,9 @@
-namespace KafkaFlow.Retry.API.Adapters.UpdateItems
-{
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+
+namespace KafkaFlow.Retry.API.Adapters.UpdateItems;
- public interface IUpdateItemsInputAdapter
- {
- UpdateItemsInput Adapt(UpdateItemsRequestDto requestDto);
- }
+public interface IUpdateItemsInputAdapter
+{
+ UpdateItemsInput Adapt(UpdateItemsRequestDto requestDto);
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/UpdateItems/IUpdateItemsResponseDtoAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/UpdateItems/IUpdateItemsResponseDtoAdapter.cs
index 2831f7ce..ba01353a 100644
--- a/src/KafkaFlow.Retry.API/Adapters/UpdateItems/IUpdateItemsResponseDtoAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/UpdateItems/IUpdateItemsResponseDtoAdapter.cs
@@ -1,10 +1,9 @@
-namespace KafkaFlow.Retry.API.Adapters.UpdateItems
-{
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+
+namespace KafkaFlow.Retry.API.Adapters.UpdateItems;
- public interface IUpdateItemsResponseDtoAdapter
- {
- UpdateItemsResponseDto Adapt(UpdateItemsResult updateItemsResult);
- }
+public interface IUpdateItemsResponseDtoAdapter
+{
+ UpdateItemsResponseDto Adapt(UpdateItemsResult updateItemsResult);
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/UpdateItems/UpdateItemsInputAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/UpdateItems/UpdateItemsInputAdapter.cs
index 5102c600..db15b221 100644
--- a/src/KafkaFlow.Retry.API/Adapters/UpdateItems/UpdateItemsInputAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/UpdateItems/UpdateItemsInputAdapter.cs
@@ -1,27 +1,26 @@
-namespace KafkaFlow.Retry.API.Adapters.UpdateItems
+using Dawn;
+using KafkaFlow.Retry.API.Adapters.Common;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+
+namespace KafkaFlow.Retry.API.Adapters.UpdateItems;
+
+internal class UpdateItemsInputAdapter : IUpdateItemsInputAdapter
{
- using Dawn;
- using KafkaFlow.Retry.API.Adapters.Common;
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+ private readonly IRetryQueueItemStatusDtoAdapter _retryQueueItemStatusDtoAdapter;
- internal class UpdateItemsInputAdapter : IUpdateItemsInputAdapter
+ public UpdateItemsInputAdapter()
{
- private readonly IRetryQueueItemStatusDtoAdapter retryQueueItemStatusDtoAdapter;
-
- public UpdateItemsInputAdapter()
- {
- this.retryQueueItemStatusDtoAdapter = new RetryQueueItemStatusDtoAdapter();
- }
+ _retryQueueItemStatusDtoAdapter = new RetryQueueItemStatusDtoAdapter();
+ }
- public UpdateItemsInput Adapt(UpdateItemsRequestDto requestDto)
- {
- Guard.Argument(requestDto, nameof(requestDto)).NotNull();
+ public UpdateItemsInput Adapt(UpdateItemsRequestDto requestDto)
+ {
+ Guard.Argument(requestDto, nameof(requestDto)).NotNull();
- return new UpdateItemsInput(
- requestDto.ItemIds,
- this.retryQueueItemStatusDtoAdapter.Adapt(requestDto.Status)
- );
- }
+ return new UpdateItemsInput(
+ requestDto.ItemIds,
+ _retryQueueItemStatusDtoAdapter.Adapt(requestDto.Status)
+ );
}
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/UpdateItems/UpdateItemsResponseDtoAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/UpdateItems/UpdateItemsResponseDtoAdapter.cs
index 41172fb9..474173f2 100644
--- a/src/KafkaFlow.Retry.API/Adapters/UpdateItems/UpdateItemsResponseDtoAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/UpdateItems/UpdateItemsResponseDtoAdapter.cs
@@ -1,23 +1,22 @@
-namespace KafkaFlow.Retry.API.Adapters.UpdateItems
-{
- using Dawn;
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+using Dawn;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository.Actions.Update;
- internal class UpdateItemsResponseDtoAdapter : IUpdateItemsResponseDtoAdapter
- {
- public UpdateItemsResponseDto Adapt(UpdateItemsResult updateItemsResult)
- {
- Guard.Argument(updateItemsResult, nameof(updateItemsResult)).NotNull();
+namespace KafkaFlow.Retry.API.Adapters.UpdateItems;
- var resultDto = new UpdateItemsResponseDto();
+internal class UpdateItemsResponseDtoAdapter : IUpdateItemsResponseDtoAdapter
+{
+ public UpdateItemsResponseDto Adapt(UpdateItemsResult updateItemsResult)
+ {
+ Guard.Argument(updateItemsResult, nameof(updateItemsResult)).NotNull();
- foreach (var result in updateItemsResult.Results)
- {
- resultDto.UpdateItemsResults.Add(new UpdateItemResultDto(result.Id, result.Status));
- }
+ var resultDto = new UpdateItemsResponseDto();
- return resultDto;
+ foreach (var result in updateItemsResult.Results)
+ {
+ resultDto.UpdateItemsResults.Add(new UpdateItemResultDto(result.Id, result.Status));
}
+
+ return resultDto;
}
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/IUpdateQueuesInputAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/IUpdateQueuesInputAdapter.cs
index aeff6fd7..3bb4fcf7 100644
--- a/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/IUpdateQueuesInputAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/IUpdateQueuesInputAdapter.cs
@@ -1,10 +1,9 @@
-namespace KafkaFlow.Retry.API.Adapters.UpdateQueues
-{
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository;
+
+namespace KafkaFlow.Retry.API.Adapters.UpdateQueues;
- public interface IUpdateQueuesInputAdapter
- {
- UpdateQueuesInput Adapt(UpdateQueuesRequestDto dto);
- }
+public interface IUpdateQueuesInputAdapter
+{
+ UpdateQueuesInput Adapt(UpdateQueuesRequestDto dto);
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/IUpdateQueuesResponseDtoAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/IUpdateQueuesResponseDtoAdapter.cs
index 9c724851..4bbc9819 100644
--- a/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/IUpdateQueuesResponseDtoAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/IUpdateQueuesResponseDtoAdapter.cs
@@ -1,10 +1,9 @@
-namespace KafkaFlow.Retry.API.Adapters.UpdateQueues
-{
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+
+namespace KafkaFlow.Retry.API.Adapters.UpdateQueues;
- public interface IUpdateQueuesResponseDtoAdapter
- {
- UpdateQueuesResponseDto Adapt(UpdateQueuesResult updateQueuesResult);
- }
+public interface IUpdateQueuesResponseDtoAdapter
+{
+ UpdateQueuesResponseDto Adapt(UpdateQueuesResult updateQueuesResult);
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/UpdateQueuesInputAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/UpdateQueuesInputAdapter.cs
index 0bc5aee9..a0882677 100644
--- a/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/UpdateQueuesInputAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/UpdateQueuesInputAdapter.cs
@@ -1,27 +1,26 @@
-namespace KafkaFlow.Retry.API.Adapters.UpdateQueues
+using Dawn;
+using KafkaFlow.Retry.API.Adapters.Common;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository;
+
+namespace KafkaFlow.Retry.API.Adapters.UpdateQueues;
+
+internal class UpdateQueuesInputAdapter : IUpdateQueuesInputAdapter
{
- using Dawn;
- using KafkaFlow.Retry.API.Adapters.Common;
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository;
+ private readonly IRetryQueueItemStatusDtoAdapter _retryQueueItemStatusDtoAdapter;
- internal class UpdateQueuesInputAdapter : IUpdateQueuesInputAdapter
+ public UpdateQueuesInputAdapter()
{
- private readonly IRetryQueueItemStatusDtoAdapter retryQueueItemStatusDtoAdapter;
-
- public UpdateQueuesInputAdapter()
- {
- this.retryQueueItemStatusDtoAdapter = new RetryQueueItemStatusDtoAdapter();
- }
+ _retryQueueItemStatusDtoAdapter = new RetryQueueItemStatusDtoAdapter();
+ }
- public UpdateQueuesInput Adapt(UpdateQueuesRequestDto requestDto)
- {
- Guard.Argument(requestDto, nameof(requestDto)).NotNull();
+ public UpdateQueuesInput Adapt(UpdateQueuesRequestDto requestDto)
+ {
+ Guard.Argument(requestDto, nameof(requestDto)).NotNull();
- return new UpdateQueuesInput(
- requestDto.QueueGroupKeys,
- this.retryQueueItemStatusDtoAdapter.Adapt(requestDto.ItemStatus)
- );
- }
+ return new UpdateQueuesInput(
+ requestDto.QueueGroupKeys,
+ _retryQueueItemStatusDtoAdapter.Adapt(requestDto.ItemStatus)
+ );
}
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/UpdateQueuesResponseDtoAdapter.cs b/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/UpdateQueuesResponseDtoAdapter.cs
index 87a41a5e..211d765d 100644
--- a/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/UpdateQueuesResponseDtoAdapter.cs
+++ b/src/KafkaFlow.Retry.API/Adapters/UpdateQueues/UpdateQueuesResponseDtoAdapter.cs
@@ -1,23 +1,23 @@
-namespace KafkaFlow.Retry.API.Adapters.UpdateQueues
-{
- using Dawn;
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+using Dawn;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository.Actions.Update;
- internal class UpdateQueuesResponseDtoAdapter : IUpdateQueuesResponseDtoAdapter
- {
- public UpdateQueuesResponseDto Adapt(UpdateQueuesResult updateQueuesResult)
- {
- Guard.Argument(updateQueuesResult, nameof(updateQueuesResult)).NotNull();
+namespace KafkaFlow.Retry.API.Adapters.UpdateQueues;
- var resultDto = new UpdateQueuesResponseDto();
+internal class UpdateQueuesResponseDtoAdapter : IUpdateQueuesResponseDtoAdapter
+{
+ public UpdateQueuesResponseDto Adapt(UpdateQueuesResult updateQueuesResult)
+ {
+ Guard.Argument(updateQueuesResult, nameof(updateQueuesResult)).NotNull();
- foreach (var res in updateQueuesResult.Results)
- {
- resultDto.UpdateQueuesResults.Add(new UpdateQueueResultDto(res.QueueGroupKey, res.Status, res.RetryQueueStatus));
- }
+ var resultDto = new UpdateQueuesResponseDto();
- return resultDto;
+ foreach (var res in updateQueuesResult.Results)
+ {
+ resultDto.UpdateQueuesResults.Add(new UpdateQueueResultDto(res.QueueGroupKey, res.Status,
+ res.RetryQueueStatus));
}
+
+ return resultDto;
}
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/AppBuilderExtensions.cs b/src/KafkaFlow.Retry.API/AppBuilderExtensions.cs
index 3c186e78..b892d3b2 100644
--- a/src/KafkaFlow.Retry.API/AppBuilderExtensions.cs
+++ b/src/KafkaFlow.Retry.API/AppBuilderExtensions.cs
@@ -1,73 +1,70 @@
-namespace KafkaFlow.Retry.API
-{
- using KafkaFlow.Retry.API.Adapters.GetItems;
- using KafkaFlow.Retry.API.Adapters.UpdateItems;
- using KafkaFlow.Retry.API.Adapters.UpdateQueues;
- using KafkaFlow.Retry.API.Handlers;
- using KafkaFlow.Retry.Durable.Repository;
- using Microsoft.AspNetCore.Builder;
+using KafkaFlow.Retry.API.Adapters.GetItems;
+using KafkaFlow.Retry.API.Adapters.UpdateItems;
+using KafkaFlow.Retry.API.Adapters.UpdateQueues;
+using KafkaFlow.Retry.API.Handlers;
+using KafkaFlow.Retry.Durable.Repository;
+using Microsoft.AspNetCore.Builder;
- public static class AppBuilderExtensions
- {
+namespace KafkaFlow.Retry.API;
- public static IApplicationBuilder UseKafkaFlowRetryEndpoints(
- this IApplicationBuilder appBuilder,
- string endpointPrefix)
- {
- var retryDurableQueueRepositoryProvider =
- appBuilder
- .ApplicationServices
- .GetService(typeof(IRetryDurableQueueRepositoryProvider)) as IRetryDurableQueueRepositoryProvider;
+public static class AppBuilderExtensions
+{
+ public static IApplicationBuilder UseKafkaFlowRetryEndpoints(
+ this IApplicationBuilder appBuilder,
+ string endpointPrefix)
+ {
+ var retryDurableQueueRepositoryProvider =
+ appBuilder
+ .ApplicationServices
+ .GetService(typeof(IRetryDurableQueueRepositoryProvider)) as IRetryDurableQueueRepositoryProvider;
- appBuilder.UseRetryEndpoints(retryDurableQueueRepositoryProvider, endpointPrefix);
+ appBuilder.UseRetryEndpoints(retryDurableQueueRepositoryProvider, endpointPrefix);
- return appBuilder;
- }
+ return appBuilder;
+ }
- public static IApplicationBuilder UseKafkaFlowRetryEndpoints(
+ public static IApplicationBuilder UseKafkaFlowRetryEndpoints(
this IApplicationBuilder appBuilder)
- {
- return appBuilder.UseKafkaFlowRetryEndpoints(string.Empty);
- }
-
- public static IApplicationBuilder UseRetryEndpoints(
- this IApplicationBuilder appBuilder,
- IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider,
- string endpointPrefix
- )
- {
- appBuilder.UseMiddleware(
- new GetItemsHandler(
- retryDurableQueueRepositoryProvider,
- new GetItemsRequestDtoReader(),
- new GetItemsInputAdapter(),
- new GetItemsResponseDtoAdapter(),
- endpointPrefix));
+ {
+ return appBuilder.UseKafkaFlowRetryEndpoints(string.Empty);
+ }
- appBuilder.UseMiddleware(
- new PatchItemsHandler(
- retryDurableQueueRepositoryProvider,
- new UpdateItemsInputAdapter(),
- new UpdateItemsResponseDtoAdapter(),
- endpointPrefix));
+ public static IApplicationBuilder UseRetryEndpoints(
+ this IApplicationBuilder appBuilder,
+ IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider,
+ string endpointPrefix
+ )
+ {
+ appBuilder.UseMiddleware(
+ new GetItemsHandler(
+ retryDurableQueueRepositoryProvider,
+ new GetItemsRequestDtoReader(),
+ new GetItemsInputAdapter(),
+ new GetItemsResponseDtoAdapter(),
+ endpointPrefix));
- appBuilder.UseMiddleware(
- new PatchQueuesHandler(
- retryDurableQueueRepositoryProvider,
- new UpdateQueuesInputAdapter(),
- new UpdateQueuesResponseDtoAdapter(),
- endpointPrefix));
+ appBuilder.UseMiddleware(
+ new PatchItemsHandler(
+ retryDurableQueueRepositoryProvider,
+ new UpdateItemsInputAdapter(),
+ new UpdateItemsResponseDtoAdapter(),
+ endpointPrefix));
- return appBuilder;
- }
+ appBuilder.UseMiddleware(
+ new PatchQueuesHandler(
+ retryDurableQueueRepositoryProvider,
+ new UpdateQueuesInputAdapter(),
+ new UpdateQueuesResponseDtoAdapter(),
+ endpointPrefix));
- public static IApplicationBuilder UseRetryEndpoints(
- this IApplicationBuilder appBuilder,
- IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider
- )
- {
- return appBuilder.UseRetryEndpoints(retryDurableQueueRepositoryProvider, string.Empty);
- }
+ return appBuilder;
+ }
+ public static IApplicationBuilder UseRetryEndpoints(
+ this IApplicationBuilder appBuilder,
+ IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider
+ )
+ {
+ return appBuilder.UseRetryEndpoints(retryDurableQueueRepositoryProvider, string.Empty);
}
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Dtos/Common/RetryQueueItemDto.cs b/src/KafkaFlow.Retry.API/Dtos/Common/RetryQueueItemDto.cs
index dc684158..7134a838 100644
--- a/src/KafkaFlow.Retry.API/Dtos/Common/RetryQueueItemDto.cs
+++ b/src/KafkaFlow.Retry.API/Dtos/Common/RetryQueueItemDto.cs
@@ -1,29 +1,28 @@
-namespace KafkaFlow.Retry.API.Dtos.Common
-{
- using System;
- using KafkaFlow.Retry.Durable.Common;
- using KafkaFlow.Retry.Durable.Repository.Model;
+using System;
+using KafkaFlow.Retry.Durable.Common;
+using KafkaFlow.Retry.Durable.Repository.Model;
+
+namespace KafkaFlow.Retry.API.Dtos.Common;
- public class RetryQueueItemDto
- {
- public int AttemptsCount { get; set; }
+public class RetryQueueItemDto
+{
+ public int AttemptsCount { get; set; }
- public DateTime CreationDate { get; set; }
+ public DateTime CreationDate { get; set; }
- public string Description { get; set; }
+ public string Description { get; set; }
- public Guid Id { get; set; }
+ public Guid Id { get; set; }
- public DateTime? LastExecution { get; set; }
+ public DateTime? LastExecution { get; set; }
- public RetryQueuetItemMessageInfoDto MessageInfo { get; set; }
+ public RetryQueuetItemMessageInfoDto MessageInfo { get; set; }
- public string QueueGroupKey { get; set; }
+ public string QueueGroupKey { get; set; }
- public SeverityLevel SeverityLevel { get; set; }
+ public SeverityLevel SeverityLevel { get; set; }
- public int Sort { get; set; }
+ public int Sort { get; set; }
- public RetryQueueItemStatus Status { get; set; }
- }
+ public RetryQueueItemStatus Status { get; set; }
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Dtos/Common/RetryQueueItemStatusDto.cs b/src/KafkaFlow.Retry.API/Dtos/Common/RetryQueueItemStatusDto.cs
index 3fd531cd..508fc61c 100644
--- a/src/KafkaFlow.Retry.API/Dtos/Common/RetryQueueItemStatusDto.cs
+++ b/src/KafkaFlow.Retry.API/Dtos/Common/RetryQueueItemStatusDto.cs
@@ -1,11 +1,10 @@
-namespace KafkaFlow.Retry.API.Dtos.Common
+namespace KafkaFlow.Retry.API.Dtos.Common;
+
+public enum RetryQueueItemStatusDto
{
- public enum RetryQueueItemStatusDto
- {
- None = 0,
- Waiting = 1,
- InRetry = 2,
- Done = 3,
- Cancelled = 4
- }
-}
+ None = 0,
+ Waiting = 1,
+ InRetry = 2,
+ Done = 3,
+ Cancelled = 4
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Dtos/Common/RetryQueuetItemMessageInfoDto.cs b/src/KafkaFlow.Retry.API/Dtos/Common/RetryQueuetItemMessageInfoDto.cs
index d6dab0a0..7af27fca 100644
--- a/src/KafkaFlow.Retry.API/Dtos/Common/RetryQueuetItemMessageInfoDto.cs
+++ b/src/KafkaFlow.Retry.API/Dtos/Common/RetryQueuetItemMessageInfoDto.cs
@@ -1,17 +1,16 @@
-namespace KafkaFlow.Retry.API.Dtos.Common
-{
- using System;
+using System;
+
+namespace KafkaFlow.Retry.API.Dtos.Common;
- public class RetryQueuetItemMessageInfoDto
- {
- public byte[] Key { get; set; }
+public class RetryQueuetItemMessageInfoDto
+{
+ public byte[] Key { get; set; }
- public long Offset { get; set; }
+ public long Offset { get; set; }
- public int Partition { get; set; }
+ public int Partition { get; set; }
- public string Topic { get; set; }
+ public string Topic { get; set; }
- public DateTimeOffset UtcTimeStamp { get; set; }
- }
-}
+ public DateTimeOffset UtcTimeStamp { get; set; }
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Dtos/GetItemsRequestDto.cs b/src/KafkaFlow.Retry.API/Dtos/GetItemsRequestDto.cs
index 3566bbe0..227e4c9a 100644
--- a/src/KafkaFlow.Retry.API/Dtos/GetItemsRequestDto.cs
+++ b/src/KafkaFlow.Retry.API/Dtos/GetItemsRequestDto.cs
@@ -1,14 +1,13 @@
-namespace KafkaFlow.Retry.API.Dtos
-{
- using System.Collections.Generic;
- using KafkaFlow.Retry.Durable.Common;
- using KafkaFlow.Retry.Durable.Repository.Model;
+using System.Collections.Generic;
+using KafkaFlow.Retry.Durable.Common;
+using KafkaFlow.Retry.Durable.Repository.Model;
+
+namespace KafkaFlow.Retry.API.Dtos;
- public class GetItemsRequestDto
- {
- public IEnumerable ItemsStatuses { get; set; }
- public IEnumerable SeverityLevels { get; set; }
- public int TopItemsByQueue { get; set; }
- public int TopQueues { get; set; }
- }
+public class GetItemsRequestDto
+{
+ public IEnumerable ItemsStatuses { get; set; }
+ public IEnumerable SeverityLevels { get; set; }
+ public int TopItemsByQueue { get; set; }
+ public int TopQueues { get; set; }
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Dtos/GetItemsResponseDto.cs b/src/KafkaFlow.Retry.API/Dtos/GetItemsResponseDto.cs
index 61a0a26b..b6f2ed0c 100644
--- a/src/KafkaFlow.Retry.API/Dtos/GetItemsResponseDto.cs
+++ b/src/KafkaFlow.Retry.API/Dtos/GetItemsResponseDto.cs
@@ -1,15 +1,14 @@
-namespace KafkaFlow.Retry.API.Dtos
-{
- using System.Collections.Generic;
- using KafkaFlow.Retry.API.Dtos.Common;
+using System.Collections.Generic;
+using KafkaFlow.Retry.API.Dtos.Common;
- public class GetItemsResponseDto
- {
- public GetItemsResponseDto(IEnumerable queueItemDtos)
- {
- this.QueueItems = queueItemDtos;
- }
+namespace KafkaFlow.Retry.API.Dtos;
- public IEnumerable QueueItems { get; set; }
+public class GetItemsResponseDto
+{
+ public GetItemsResponseDto(IEnumerable queueItemDtos)
+ {
+ QueueItems = queueItemDtos;
}
-}
+
+ public IEnumerable QueueItems { get; set; }
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Dtos/UpdateItemResultDto.cs b/src/KafkaFlow.Retry.API/Dtos/UpdateItemResultDto.cs
index ec5ac699..fc4120b8 100644
--- a/src/KafkaFlow.Retry.API/Dtos/UpdateItemResultDto.cs
+++ b/src/KafkaFlow.Retry.API/Dtos/UpdateItemResultDto.cs
@@ -1,18 +1,17 @@
-namespace KafkaFlow.Retry.API.Dtos
-{
- using System;
- using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+using System;
+using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+
+namespace KafkaFlow.Retry.API.Dtos;
- public class UpdateItemResultDto
+public class UpdateItemResultDto
+{
+ public UpdateItemResultDto(Guid itemId, UpdateItemResultStatus value)
{
- public UpdateItemResultDto(Guid itemId, UpdateItemResultStatus value)
- {
- this.ItemId = itemId;
- this.Result = value.ToString();
- }
+ ItemId = itemId;
+ Result = value.ToString();
+ }
- public Guid ItemId { get; set; }
+ public Guid ItemId { get; set; }
- public string Result { get; set; }
- }
+ public string Result { get; set; }
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Dtos/UpdateItemsRequestDto.cs b/src/KafkaFlow.Retry.API/Dtos/UpdateItemsRequestDto.cs
index ee0960e3..17d26341 100644
--- a/src/KafkaFlow.Retry.API/Dtos/UpdateItemsRequestDto.cs
+++ b/src/KafkaFlow.Retry.API/Dtos/UpdateItemsRequestDto.cs
@@ -1,13 +1,12 @@
-namespace KafkaFlow.Retry.API.Dtos
-{
- using System;
- using System.Collections.Generic;
- using KafkaFlow.Retry.API.Dtos.Common;
+using System;
+using System.Collections.Generic;
+using KafkaFlow.Retry.API.Dtos.Common;
+
+namespace KafkaFlow.Retry.API.Dtos;
- public class UpdateItemsRequestDto
- {
- public IEnumerable ItemIds { get; set; }
+public class UpdateItemsRequestDto
+{
+ public IEnumerable ItemIds { get; set; }
- public RetryQueueItemStatusDto Status { get; set; }
- }
-}
+ public RetryQueueItemStatusDto Status { get; set; }
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Dtos/UpdateItemsResponseDto.cs b/src/KafkaFlow.Retry.API/Dtos/UpdateItemsResponseDto.cs
index 8ae6ff1f..c325feea 100644
--- a/src/KafkaFlow.Retry.API/Dtos/UpdateItemsResponseDto.cs
+++ b/src/KafkaFlow.Retry.API/Dtos/UpdateItemsResponseDto.cs
@@ -1,14 +1,13 @@
-namespace KafkaFlow.Retry.API.Dtos
-{
- using System.Collections.Generic;
+using System.Collections.Generic;
- public class UpdateItemsResponseDto
- {
- public UpdateItemsResponseDto()
- {
- this.UpdateItemsResults = new List();
- }
+namespace KafkaFlow.Retry.API.Dtos;
- public IList UpdateItemsResults { get; set; }
+public class UpdateItemsResponseDto
+{
+ public UpdateItemsResponseDto()
+ {
+ UpdateItemsResults = new List();
}
-}
+
+ public IList UpdateItemsResults { get; set; }
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Dtos/UpdateQueueResultDto.cs b/src/KafkaFlow.Retry.API/Dtos/UpdateQueueResultDto.cs
index 3a805454..e8b0c197 100644
--- a/src/KafkaFlow.Retry.API/Dtos/UpdateQueueResultDto.cs
+++ b/src/KafkaFlow.Retry.API/Dtos/UpdateQueueResultDto.cs
@@ -1,21 +1,20 @@
-namespace KafkaFlow.Retry.API.Dtos
-{
- using KafkaFlow.Retry.Durable.Repository.Actions.Update;
- using KafkaFlow.Retry.Durable.Repository.Model;
+using KafkaFlow.Retry.Durable.Repository.Actions.Update;
+using KafkaFlow.Retry.Durable.Repository.Model;
+
+namespace KafkaFlow.Retry.API.Dtos;
- public class UpdateQueueResultDto
+public class UpdateQueueResultDto
+{
+ public UpdateQueueResultDto(string queueGroupKey, UpdateQueueResultStatus status, RetryQueueStatus retryQueueStatus)
{
- public UpdateQueueResultDto(string queueGroupKey, UpdateQueueResultStatus status, RetryQueueStatus retryQueueStatus)
- {
- this.QueueGroupKey = queueGroupKey;
- this.Result = status.ToString();
- this.QueueStatus = retryQueueStatus.ToString();
- }
+ QueueGroupKey = queueGroupKey;
+ Result = status.ToString();
+ QueueStatus = retryQueueStatus.ToString();
+ }
- public string QueueGroupKey { get; set; }
+ public string QueueGroupKey { get; set; }
- public string QueueStatus { get; set; }
+ public string QueueStatus { get; set; }
- public string Result { get; set; }
- }
+ public string Result { get; set; }
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Dtos/UpdateQueuesRequestDto.cs b/src/KafkaFlow.Retry.API/Dtos/UpdateQueuesRequestDto.cs
index f3965fdb..a6495ed8 100644
--- a/src/KafkaFlow.Retry.API/Dtos/UpdateQueuesRequestDto.cs
+++ b/src/KafkaFlow.Retry.API/Dtos/UpdateQueuesRequestDto.cs
@@ -1,12 +1,11 @@
-namespace KafkaFlow.Retry.API.Dtos
-{
- using System.Collections.Generic;
- using KafkaFlow.Retry.API.Dtos.Common;
+using System.Collections.Generic;
+using KafkaFlow.Retry.API.Dtos.Common;
+
+namespace KafkaFlow.Retry.API.Dtos;
- public class UpdateQueuesRequestDto
- {
- public RetryQueueItemStatusDto ItemStatus { get; set; }
+public class UpdateQueuesRequestDto
+{
+ public RetryQueueItemStatusDto ItemStatus { get; set; }
- public IEnumerable QueueGroupKeys { get; set; }
- }
-}
+ public IEnumerable QueueGroupKeys { get; set; }
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Dtos/UpdateQueuesResponseDto.cs b/src/KafkaFlow.Retry.API/Dtos/UpdateQueuesResponseDto.cs
index 12401ca2..3d1a13f1 100644
--- a/src/KafkaFlow.Retry.API/Dtos/UpdateQueuesResponseDto.cs
+++ b/src/KafkaFlow.Retry.API/Dtos/UpdateQueuesResponseDto.cs
@@ -1,14 +1,13 @@
-namespace KafkaFlow.Retry.API.Dtos
-{
- using System.Collections.Generic;
+using System.Collections.Generic;
- public class UpdateQueuesResponseDto
- {
- public UpdateQueuesResponseDto()
- {
- this.UpdateQueuesResults = new List();
- }
+namespace KafkaFlow.Retry.API.Dtos;
- public IList UpdateQueuesResults { get; set; }
+public class UpdateQueuesResponseDto
+{
+ public UpdateQueuesResponseDto()
+ {
+ UpdateQueuesResults = new List();
}
-}
+
+ public IList UpdateQueuesResults { get; set; }
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Handlers/GetItemsHandler.cs b/src/KafkaFlow.Retry.API/Handlers/GetItemsHandler.cs
index cd0e854b..2a08e7d7 100644
--- a/src/KafkaFlow.Retry.API/Handlers/GetItemsHandler.cs
+++ b/src/KafkaFlow.Retry.API/Handlers/GetItemsHandler.cs
@@ -1,57 +1,57 @@
-namespace KafkaFlow.Retry.API.Handlers
+using System;
+using System.Net;
+using System.Threading.Tasks;
+using Dawn;
+using KafkaFlow.Retry.API.Adapters.GetItems;
+using KafkaFlow.Retry.Durable.Repository;
+using Microsoft.AspNetCore.Http;
+
+namespace KafkaFlow.Retry.API.Handlers;
+
+internal class GetItemsHandler : RetryRequestHandlerBase
{
- using System.Net;
- using System.Threading.Tasks;
- using Dawn;
- using KafkaFlow.Retry.API.Adapters.GetItems;
- using KafkaFlow.Retry.Durable.Repository;
- using Microsoft.AspNetCore.Http;
-
- internal class GetItemsHandler : RetryRequestHandlerBase
+ private readonly IGetItemsInputAdapter _getItemsInputAdapter;
+ private readonly IGetItemsRequestDtoReader _getItemsRequestDtoReader;
+ private readonly IGetItemsResponseDtoAdapter _getItemsResponseDtoAdapter;
+ private readonly IRetryDurableQueueRepositoryProvider _retryDurableQueueRepositoryProvider;
+
+ public GetItemsHandler(
+ IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider,
+ IGetItemsRequestDtoReader getItemsRequestDtoReader,
+ IGetItemsInputAdapter getItemsInputAdapter,
+ IGetItemsResponseDtoAdapter getItemsResponseDtoAdapter,
+ string endpointPrefix) : base(endpointPrefix, "items")
{
- private readonly IGetItemsInputAdapter getItemsInputAdapter;
- private readonly IGetItemsRequestDtoReader getItemsRequestDtoReader;
- private readonly IGetItemsResponseDtoAdapter getItemsResponseDtoAdapter;
- private readonly IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider;
-
- public GetItemsHandler(
- IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider,
- IGetItemsRequestDtoReader getItemsRequestDtoReader,
- IGetItemsInputAdapter getItemsInputAdapter,
- IGetItemsResponseDtoAdapter getItemsResponseDtoAdapter,
- string endpointPrefix) : base(endpointPrefix, "items")
- {
- Guard.Argument(retryDurableQueueRepositoryProvider, nameof(retryDurableQueueRepositoryProvider)).NotNull();
- Guard.Argument(getItemsRequestDtoReader, nameof(getItemsRequestDtoReader)).NotNull();
- Guard.Argument(getItemsInputAdapter, nameof(getItemsInputAdapter)).NotNull();
- Guard.Argument(getItemsResponseDtoAdapter, nameof(getItemsResponseDtoAdapter)).NotNull();
-
- this.getItemsInputAdapter = getItemsInputAdapter;
- this.retryDurableQueueRepositoryProvider = retryDurableQueueRepositoryProvider;
- this.getItemsRequestDtoReader = getItemsRequestDtoReader;
- this.getItemsResponseDtoAdapter = getItemsResponseDtoAdapter;
- }
+ Guard.Argument(retryDurableQueueRepositoryProvider, nameof(retryDurableQueueRepositoryProvider)).NotNull();
+ Guard.Argument(getItemsRequestDtoReader, nameof(getItemsRequestDtoReader)).NotNull();
+ Guard.Argument(getItemsInputAdapter, nameof(getItemsInputAdapter)).NotNull();
+ Guard.Argument(getItemsResponseDtoAdapter, nameof(getItemsResponseDtoAdapter)).NotNull();
+
+ _getItemsInputAdapter = getItemsInputAdapter;
+ _retryDurableQueueRepositoryProvider = retryDurableQueueRepositoryProvider;
+ _getItemsRequestDtoReader = getItemsRequestDtoReader;
+ _getItemsResponseDtoAdapter = getItemsResponseDtoAdapter;
+ }
- protected override HttpMethod HttpMethod => HttpMethod.GET;
+ protected override HttpMethod HttpMethod => HttpMethod.GET;
- protected override async Task HandleRequestAsync(HttpRequest request, HttpResponse response)
+ protected override async Task HandleRequestAsync(HttpRequest request, HttpResponse response)
+ {
+ try
{
- try
- {
- var requestDto = this.getItemsRequestDtoReader.Read(request);
+ var requestDto = _getItemsRequestDtoReader.Read(request);
- var input = this.getItemsInputAdapter.Adapt(requestDto);
+ var input = _getItemsInputAdapter.Adapt(requestDto);
- var result = await this.retryDurableQueueRepositoryProvider.GetQueuesAsync(input).ConfigureAwait(false);
+ var result = await _retryDurableQueueRepositoryProvider.GetQueuesAsync(input).ConfigureAwait(false);
- var responseDto = this.getItemsResponseDtoAdapter.Adapt(result);
+ var responseDto = _getItemsResponseDtoAdapter.Adapt(result);
- await this.WriteResponseAsync(response, responseDto, (int)HttpStatusCode.OK).ConfigureAwait(false);
- }
- catch (System.Exception ex)
- {
- await this.WriteResponseAsync(response, ex, (int)HttpStatusCode.InternalServerError).ConfigureAwait(false);
- }
+ await WriteResponseAsync(response, responseDto, (int)HttpStatusCode.OK).ConfigureAwait(false);
+ }
+ catch (Exception ex)
+ {
+ await WriteResponseAsync(response, ex, (int)HttpStatusCode.InternalServerError).ConfigureAwait(false);
}
}
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Handlers/PatchItemsHandler.cs b/src/KafkaFlow.Retry.API/Handlers/PatchItemsHandler.cs
index 008fc5c8..d5e00226 100644
--- a/src/KafkaFlow.Retry.API/Handlers/PatchItemsHandler.cs
+++ b/src/KafkaFlow.Retry.API/Handlers/PatchItemsHandler.cs
@@ -1,68 +1,67 @@
-namespace KafkaFlow.Retry.API.Handlers
+using System;
+using System.Net;
+using System.Threading.Tasks;
+using KafkaFlow.Retry.API.Adapters.UpdateItems;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository;
+using Microsoft.AspNetCore.Http;
+using Newtonsoft.Json;
+
+namespace KafkaFlow.Retry.API.Handlers;
+
+internal class PatchItemsHandler : RetryRequestHandlerBase
{
- using System;
- using System.Net;
- using System.Threading.Tasks;
- using KafkaFlow.Retry.API.Adapters.UpdateItems;
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository;
- using Microsoft.AspNetCore.Http;
- using Newtonsoft.Json;
+ private readonly IRetryDurableQueueRepositoryProvider _retryDurableQueueRepositoryProvider;
+ private readonly IUpdateItemsInputAdapter _updateItemsInputAdapter;
+ private readonly IUpdateItemsResponseDtoAdapter _updateItemsResponseDtoAdapter;
- internal class PatchItemsHandler : RetryRequestHandlerBase
+ public PatchItemsHandler(
+ IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider,
+ IUpdateItemsInputAdapter updateItemsInputAdapter,
+ IUpdateItemsResponseDtoAdapter updateItemsResponseDtoAdapter,
+ string endpointPrefix) : base(endpointPrefix, "items")
{
- private readonly IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider;
- private readonly IUpdateItemsInputAdapter updateItemsInputAdapter;
- private readonly IUpdateItemsResponseDtoAdapter updateItemsResponseDtoAdapter;
+ _retryDurableQueueRepositoryProvider = retryDurableQueueRepositoryProvider;
+ _updateItemsInputAdapter = updateItemsInputAdapter;
+ _updateItemsResponseDtoAdapter = updateItemsResponseDtoAdapter;
+ }
- public PatchItemsHandler(
- IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider,
- IUpdateItemsInputAdapter updateItemsInputAdapter,
- IUpdateItemsResponseDtoAdapter updateItemsResponseDtoAdapter,
- string endpointPrefix) : base(endpointPrefix, "items")
- {
- this.retryDurableQueueRepositoryProvider = retryDurableQueueRepositoryProvider;
- this.updateItemsInputAdapter = updateItemsInputAdapter;
- this.updateItemsResponseDtoAdapter = updateItemsResponseDtoAdapter;
- }
+ protected override HttpMethod HttpMethod => HttpMethod.PATCH;
- protected override HttpMethod HttpMethod => HttpMethod.PATCH;
+ protected override async Task HandleRequestAsync(HttpRequest request, HttpResponse response)
+ {
+ UpdateItemsRequestDto requestDto;
- protected override async Task HandleRequestAsync(HttpRequest request, HttpResponse response)
+ try
{
- UpdateItemsRequestDto requestDto;
-
- try
- {
- requestDto = await this.ReadRequestDtoAsync(request).ConfigureAwait(false);
- }
- catch (JsonSerializationException ex)
- {
- await this.WriteResponseAsync(response, ex, (int)HttpStatusCode.BadRequest).ConfigureAwait(false);
+ requestDto = await ReadRequestDtoAsync(request).ConfigureAwait(false);
+ }
+ catch (JsonSerializationException ex)
+ {
+ await WriteResponseAsync(response, ex, (int)HttpStatusCode.BadRequest).ConfigureAwait(false);
- return;
- }
- catch (Exception ex)
- {
- await this.WriteResponseAsync(response, ex, (int)HttpStatusCode.InternalServerError).ConfigureAwait(false);
+ return;
+ }
+ catch (Exception ex)
+ {
+ await WriteResponseAsync(response, ex, (int)HttpStatusCode.InternalServerError).ConfigureAwait(false);
- return;
- }
+ return;
+ }
- try
- {
- var input = this.updateItemsInputAdapter.Adapt(requestDto);
+ try
+ {
+ var input = _updateItemsInputAdapter.Adapt(requestDto);
- var result = await this.retryDurableQueueRepositoryProvider.UpdateItemsAsync(input).ConfigureAwait(false);
+ var result = await _retryDurableQueueRepositoryProvider.UpdateItemsAsync(input).ConfigureAwait(false);
- var responseDto = this.updateItemsResponseDtoAdapter.Adapt(result);
+ var responseDto = _updateItemsResponseDtoAdapter.Adapt(result);
- await this.WriteResponseAsync(response, responseDto, (int)HttpStatusCode.OK).ConfigureAwait(false);
- }
- catch (Exception ex)
- {
- await this.WriteResponseAsync(response, ex, (int)HttpStatusCode.InternalServerError).ConfigureAwait(false);
- }
+ await WriteResponseAsync(response, responseDto, (int)HttpStatusCode.OK).ConfigureAwait(false);
+ }
+ catch (Exception ex)
+ {
+ await WriteResponseAsync(response, ex, (int)HttpStatusCode.InternalServerError).ConfigureAwait(false);
}
}
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/Handlers/PatchQueuesHandler.cs b/src/KafkaFlow.Retry.API/Handlers/PatchQueuesHandler.cs
index 2c9c6297..e55ef11b 100644
--- a/src/KafkaFlow.Retry.API/Handlers/PatchQueuesHandler.cs
+++ b/src/KafkaFlow.Retry.API/Handlers/PatchQueuesHandler.cs
@@ -1,69 +1,68 @@
-namespace KafkaFlow.Retry.API.Handlers
+using System;
+using System.Net;
+using System.Threading.Tasks;
+using KafkaFlow.Retry.API.Adapters.UpdateQueues;
+using KafkaFlow.Retry.API.Dtos;
+using KafkaFlow.Retry.Durable.Repository;
+using Microsoft.AspNetCore.Http;
+using Newtonsoft.Json;
+
+namespace KafkaFlow.Retry.API.Handlers;
+
+internal class PatchQueuesHandler : RetryRequestHandlerBase
{
- using System;
- using System.Net;
- using System.Threading.Tasks;
- using KafkaFlow.Retry.API.Adapters.UpdateQueues;
- using KafkaFlow.Retry.API.Dtos;
- using KafkaFlow.Retry.Durable.Repository;
- using Microsoft.AspNetCore.Http;
- using Newtonsoft.Json;
+ private readonly IRetryDurableQueueRepositoryProvider _retryDurableQueueRepositoryProvider;
+ private readonly IUpdateQueuesInputAdapter _updateQueuesInputAdapter;
+ private readonly IUpdateQueuesResponseDtoAdapter _updateQueuesResponseDtoAdapter;
- internal class PatchQueuesHandler : RetryRequestHandlerBase
+ public PatchQueuesHandler(
+ IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider,
+ IUpdateQueuesInputAdapter updateQueuesInputAdapter,
+ IUpdateQueuesResponseDtoAdapter updateQueuesResponseDtoAdapter,
+ string endpointPrefix) : base(endpointPrefix, "queues")
{
- private readonly IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider;
- private readonly IUpdateQueuesInputAdapter updateQueuesInputAdapter;
- private readonly IUpdateQueuesResponseDtoAdapter updateQueuesResponseDtoAdapter;
+ _retryDurableQueueRepositoryProvider = retryDurableQueueRepositoryProvider;
+ _updateQueuesInputAdapter = updateQueuesInputAdapter;
+ _updateQueuesResponseDtoAdapter = updateQueuesResponseDtoAdapter;
+ }
- public PatchQueuesHandler(
- IRetryDurableQueueRepositoryProvider retryDurableQueueRepositoryProvider,
- IUpdateQueuesInputAdapter updateQueuesInputAdapter,
- IUpdateQueuesResponseDtoAdapter updateQueuesResponseDtoAdapter,
- string endpointPrefix) : base(endpointPrefix, "queues")
- {
- this.retryDurableQueueRepositoryProvider = retryDurableQueueRepositoryProvider;
- this.updateQueuesInputAdapter = updateQueuesInputAdapter;
- this.updateQueuesResponseDtoAdapter = updateQueuesResponseDtoAdapter;
- }
+ protected override HttpMethod HttpMethod => HttpMethod.PATCH;
- protected override HttpMethod HttpMethod => HttpMethod.PATCH;
+ protected override async Task HandleRequestAsync(HttpRequest request, HttpResponse response)
+ {
+ UpdateQueuesRequestDto requestDto;
- protected override async Task HandleRequestAsync(HttpRequest request, HttpResponse response)
+ try
{
- UpdateQueuesRequestDto requestDto;
-
- try
- {
- requestDto = await this.ReadRequestDtoAsync(request).ConfigureAwait(false);
- }
- catch (JsonSerializationException ex)
- {
- await this.WriteResponseAsync(response, ex, (int)HttpStatusCode.BadRequest).ConfigureAwait(false);
+ requestDto = await ReadRequestDtoAsync(request).ConfigureAwait(false);
+ }
+ catch (JsonSerializationException ex)
+ {
+ await WriteResponseAsync(response, ex, (int)HttpStatusCode.BadRequest).ConfigureAwait(false);
- return;
- }
- catch (Exception ex)
- {
- await this.WriteResponseAsync(response, ex, (int)HttpStatusCode.InternalServerError).ConfigureAwait(false);
+ return;
+ }
+ catch (Exception ex)
+ {
+ await WriteResponseAsync(response, ex, (int)HttpStatusCode.InternalServerError).ConfigureAwait(false);
- return;
- }
+ return;
+ }
- try
- {
- var input = this.updateQueuesInputAdapter.Adapt(requestDto);
+ try
+ {
+ var input = _updateQueuesInputAdapter.Adapt(requestDto);
- var result = await this.retryDurableQueueRepositoryProvider.UpdateQueuesAsync(input).ConfigureAwait(false);
+ var result = await _retryDurableQueueRepositoryProvider.UpdateQueuesAsync(input).ConfigureAwait(false);
- var responseDto = this.updateQueuesResponseDtoAdapter.Adapt(result);
+ var responseDto = _updateQueuesResponseDtoAdapter.Adapt(result);
- await this.WriteResponseAsync(response, responseDto, (int)HttpStatusCode.OK).ConfigureAwait(false);
- }
- catch (Exception ex)
- {
- await this.WriteResponseAsync(response, ex, (int)HttpStatusCode.InternalServerError).ConfigureAwait(false);
- }
+ await WriteResponseAsync(response, responseDto, (int)HttpStatusCode.OK).ConfigureAwait(false);
+ }
+ catch (Exception ex)
+ {
+ await WriteResponseAsync(response, ex, (int)HttpStatusCode.InternalServerError).ConfigureAwait(false);
}
}
}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/HttpExtensions.cs b/src/KafkaFlow.Retry.API/HttpExtensions.cs
index b7b21edf..4f79f4c8 100644
--- a/src/KafkaFlow.Retry.API/HttpExtensions.cs
+++ b/src/KafkaFlow.Retry.API/HttpExtensions.cs
@@ -1,37 +1,35 @@
-namespace KafkaFlow.Retry.API
-{
- using System;
- using System.Collections.Generic;
- using Microsoft.AspNetCore.Http;
+using System.Collections.Generic;
+using Microsoft.AspNetCore.Http;
- internal static class HttpExtensions
- {
- private const char QueryStringDelimiter = ',';
+namespace KafkaFlow.Retry.API;
- private const char ResourcePathDelimiter = '/';
+internal static class HttpExtensions
+{
+ private const char QueryStringDelimiter = ',';
- public static void AddQueryParams(this HttpRequest httpRequest, string name, string value)
- {
- httpRequest.QueryString = httpRequest.QueryString.Add(name, value);
- }
+ private const char ResourcePathDelimiter = '/';
- public static string ExtendResourcePath(this string resource, string extension)
- {
- return String.Concat(resource, ResourcePathDelimiter, extension);
- }
+ public static void AddQueryParams(this HttpRequest httpRequest, string name, string value)
+ {
+ httpRequest.QueryString = httpRequest.QueryString.Add(name, value);
+ }
- public static IEnumerable ReadQueryParams(this HttpRequest httpRequest, string paramKey)
- {
- var aggregatedParamValues = new List();
+ public static string ExtendResourcePath(this string resource, string extension)
+ {
+ return string.Concat(resource, ResourcePathDelimiter, extension);
+ }
- var paramValues = httpRequest.Query[paramKey].ToArray();
+ public static IEnumerable ReadQueryParams(this HttpRequest httpRequest, string paramKey)
+ {
+ var aggregatedParamValues = new List();
- foreach (var value in paramValues)
- {
- aggregatedParamValues.AddRange(value.Split(QueryStringDelimiter));
- }
+ var paramValues = httpRequest.Query[paramKey].ToArray();
- return aggregatedParamValues;
+ foreach (var value in paramValues)
+ {
+ aggregatedParamValues.AddRange(value.Split(QueryStringDelimiter));
}
+
+ return aggregatedParamValues;
}
-}
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/HttpMethod.cs b/src/KafkaFlow.Retry.API/HttpMethod.cs
index 05c64336..a2d7c4d0 100644
--- a/src/KafkaFlow.Retry.API/HttpMethod.cs
+++ b/src/KafkaFlow.Retry.API/HttpMethod.cs
@@ -1,12 +1,11 @@
-namespace KafkaFlow.Retry.API
+namespace KafkaFlow.Retry.API;
+
+internal enum HttpMethod
{
- internal enum HttpMethod
- {
- None = 0,
- GET = 1,
- POST = 2,
- PUT = 3,
- PATCH = 4,
- DELETE = 5
- }
-}
+ None = 0,
+ GET = 1,
+ POST = 2,
+ PUT = 3,
+ PATCH = 4,
+ DELETE = 5
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/IHttpRequestHandler.cs b/src/KafkaFlow.Retry.API/IHttpRequestHandler.cs
index 62eb7129..e1e0a867 100644
--- a/src/KafkaFlow.Retry.API/IHttpRequestHandler.cs
+++ b/src/KafkaFlow.Retry.API/IHttpRequestHandler.cs
@@ -1,10 +1,9 @@
-namespace KafkaFlow.Retry.API
-{
- using System.Threading.Tasks;
- using Microsoft.AspNetCore.Http;
+using System.Threading.Tasks;
+using Microsoft.AspNetCore.Http;
+
+namespace KafkaFlow.Retry.API;
- internal interface IHttpRequestHandler
- {
- Task HandleAsync(HttpRequest httpRequest, HttpResponse response);
- }
-}
+internal interface IHttpRequestHandler
+{
+ Task HandleAsync(HttpRequest httpRequest, HttpResponse response);
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/RetryMiddleware.cs b/src/KafkaFlow.Retry.API/RetryMiddleware.cs
index 3754a82a..8af54f07 100644
--- a/src/KafkaFlow.Retry.API/RetryMiddleware.cs
+++ b/src/KafkaFlow.Retry.API/RetryMiddleware.cs
@@ -1,30 +1,29 @@
-namespace KafkaFlow.Retry.API
+using System.Threading.Tasks;
+using Microsoft.AspNetCore.Http;
+
+namespace KafkaFlow.Retry.API;
+
+internal class RetryMiddleware
{
- using System.Threading.Tasks;
- using Microsoft.AspNetCore.Http;
+ private readonly IHttpRequestHandler _httpRequestHandler;
+ private readonly RequestDelegate _next;
- internal class RetryMiddleware
+ public RetryMiddleware(RequestDelegate next, IHttpRequestHandler httpRequestHandler)
{
- private readonly IHttpRequestHandler httpRequestHandler;
- private readonly RequestDelegate next;
+ _next = next;
+ _httpRequestHandler = httpRequestHandler;
+ }
- public RetryMiddleware(RequestDelegate next, IHttpRequestHandler httpRequestHandler)
- {
- this.next = next;
- this.httpRequestHandler = httpRequestHandler;
- }
+ public async Task InvokeAsync(HttpContext httpContext)
+ {
+ var handled = await _httpRequestHandler
+ .HandleAsync(httpContext.Request, httpContext.Response)
+ .ConfigureAwait(false);
- public async Task InvokeAsync(HttpContext httpContext)
+ if (!handled)
{
- var handled = await this.httpRequestHandler
- .HandleAsync(httpContext.Request, httpContext.Response)
- .ConfigureAwait(false);
-
- if (!handled)
- {
- // Call the next delegate/middleware in the pipeline
- await this.next(httpContext).ConfigureAwait(false);
- }
+ // Call the next delegate/middleware in the pipeline
+ await _next(httpContext).ConfigureAwait(false);
}
}
-}
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.API/RetryRequestHandlerBase.cs b/src/KafkaFlow.Retry.API/RetryRequestHandlerBase.cs
index e2e64431..6cee44cd 100644
--- a/src/KafkaFlow.Retry.API/RetryRequestHandlerBase.cs
+++ b/src/KafkaFlow.Retry.API/RetryRequestHandlerBase.cs
@@ -1,98 +1,97 @@
-namespace KafkaFlow.Retry.API
-{
- using System.IO;
- using System.Text;
- using System.Threading.Tasks;
- using Dawn;
- using Microsoft.AspNetCore.Http;
- using Newtonsoft.Json;
-
- internal abstract class RetryRequestHandlerBase : IHttpRequestHandler
- {
+using System.IO;
+using System.Text;
+using System.Threading.Tasks;
+using Dawn;
+using Microsoft.AspNetCore.Http;
+using Newtonsoft.Json;
- private readonly string path;
- private const string RetryResource = "retry";
+namespace KafkaFlow.Retry.API;
+internal abstract class RetryRequestHandlerBase : IHttpRequestHandler
+{
+ private const string RetryResource = "retry";
- protected JsonSerializerSettings jsonSerializerSettings = new JsonSerializerSettings()
- {
- DateTimeZoneHandling = DateTimeZoneHandling.Utc,
- TypeNameHandling = TypeNameHandling.None
- };
+ private readonly string _path;
- protected abstract HttpMethod HttpMethod { get; }
- protected RetryRequestHandlerBase(string endpointPrefix, string resource)
- {
- Guard.Argument(resource, nameof(resource)).NotNull().NotEmpty();
+ protected JsonSerializerSettings JsonSerializerSettings = new()
+ {
+ DateTimeZoneHandling = DateTimeZoneHandling.Utc,
+ TypeNameHandling = TypeNameHandling.None
+ };
- if (!string.IsNullOrEmpty(endpointPrefix))
- {
- this.path = this.path
- .ExtendResourcePath(endpointPrefix);
- }
+ protected RetryRequestHandlerBase(string endpointPrefix, string resource)
+ {
+ Guard.Argument(resource, nameof(resource)).NotNull().NotEmpty();
- this.path = this.path
- .ExtendResourcePath(RetryResource)
- .ExtendResourcePath(resource);
+ if (!string.IsNullOrEmpty(endpointPrefix))
+ {
+ _path = _path
+ .ExtendResourcePath(endpointPrefix);
}
+ _path = _path
+ .ExtendResourcePath(RetryResource)
+ .ExtendResourcePath(resource);
+ }
- public virtual async Task HandleAsync(HttpRequest request, HttpResponse response)
- {
- if (!this.CanHandle(request))
- {
- return false;
- }
+ protected abstract HttpMethod HttpMethod { get; }
- await this.HandleRequestAsync(request, response).ConfigureAwait(false);
- return true;
- }
-
- protected bool CanHandle(HttpRequest httpRequest)
+ public virtual async Task HandleAsync(HttpRequest request, HttpResponse response)
+ {
+ if (!CanHandle(request))
{
- var resource = httpRequest.Path.ToUriComponent();
+ return false;
+ }
- if (!resource.Equals(this.path))
- {
- return false;
- }
+ await HandleRequestAsync(request, response).ConfigureAwait(false);
- var method = httpRequest.Method;
+ return true;
+ }
- if (!method.Equals(this.HttpMethod.ToString()))
- {
- return false;
- }
+ protected bool CanHandle(HttpRequest httpRequest)
+ {
+ var resource = httpRequest.Path.ToUriComponent();
- return true;
+ if (!resource.Equals(_path))
+ {
+ return false;
}
- protected abstract Task HandleRequestAsync(HttpRequest request, HttpResponse response);
+ var method = httpRequest.Method;
- protected virtual async Task ReadRequestDtoAsync(HttpRequest request)
+ if (!method.Equals(HttpMethod.ToString()))
{
- string requestMessage;
+ return false;
+ }
- using (var reader = new StreamReader(request.Body, Encoding.UTF8))
- {
- requestMessage = await reader.ReadToEndAsync().ConfigureAwait(false);
- }
+ return true;
+ }
- var requestDto = JsonConvert.DeserializeObject(requestMessage, this.jsonSerializerSettings);
+ protected abstract Task HandleRequestAsync(HttpRequest request, HttpResponse response);
- return requestDto;
- }
+ protected virtual async Task ReadRequestDtoAsync(HttpRequest request)
+ {
+ string requestMessage;
- protected virtual async Task WriteResponseAsync(HttpResponse response, T responseDto, int statusCode)
+ using (var reader = new StreamReader(request.Body, Encoding.UTF8))
{
- var body = JsonConvert.SerializeObject(responseDto, this.jsonSerializerSettings);
+ requestMessage = await reader.ReadToEndAsync().ConfigureAwait(false);
+ }
- response.ContentType = "application/json";
- response.StatusCode = statusCode;
+ var requestDto = JsonConvert.DeserializeObject(requestMessage, JsonSerializerSettings);
- await response.WriteAsync(body, Encoding.UTF8).ConfigureAwait(false);
- }
+ return requestDto;
+ }
+
+ protected virtual async Task WriteResponseAsync(HttpResponse response, T responseDto, int statusCode)
+ {
+ var body = JsonConvert.SerializeObject(responseDto, JsonSerializerSettings);
+
+ response.ContentType = "application/json";
+ response.StatusCode = statusCode;
+
+ await response.WriteAsync(body, Encoding.UTF8).ConfigureAwait(false);
}
-}
+}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/BootstrapperKafka.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/BootstrapperKafka.cs
deleted file mode 100644
index 1c2f7551..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/BootstrapperKafka.cs
+++ /dev/null
@@ -1,580 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Bootstrappers
-{
- using System;
- using Confluent.Kafka;
- using KafkaFlow.Configuration;
- using KafkaFlow.Retry.IntegrationTests.Core.Exceptions;
- using KafkaFlow.Retry.IntegrationTests.Core.Handlers;
- using KafkaFlow.Retry.IntegrationTests.Core.Messages;
- using KafkaFlow.Retry.IntegrationTests.Core.Producers;
- using KafkaFlow.Retry.MongoDb;
- using KafkaFlow.Retry.Postgres;
- using KafkaFlow.Retry.SqlServer;
- using KafkaFlow.Serializer;
- using Newtonsoft.Json;
-
- internal static class BootstrapperKafka
- {
- private const int NumberOfPartitions = 6;
- private const int ReplicationFactor = 1;
-
- private static readonly string[] TestTopics = new[]
- {
- "test-kafka-flow-retry-retry-simple",
- "test-kafka-flow-retry-retry-forever",
- "test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-mongo-db",
- "test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-mongo-db-retry",
- "test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-sql-server",
- "test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-sql-server-retry",
- "test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-postgres",
- "test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-postgres-retry",
- "test-kafka-flow-retry-retry-durable-latest-consumption-mongo-db",
- "test-kafka-flow-retry-retry-durable-latest-consumption-mongo-db-retry",
- "test-kafka-flow-retry-retry-durable-latest-consumption-sql-server",
- "test-kafka-flow-retry-retry-durable-latest-consumption-sql-server-retry",
- "test-kafka-flow-retry-retry-durable-latest-consumption-postgres",
- "test-kafka-flow-retry-retry-durable-latest-consumption-postgres-retry"
- };
-
- internal static IClusterConfigurationBuilder CreatAllTestTopicsIfNotExist(this IClusterConfigurationBuilder cluster)
- {
- foreach (var topic in TestTopics)
- {
- cluster.CreateTopicIfNotExists(topic, NumberOfPartitions, ReplicationFactor);
- }
-
- return cluster;
- }
-
- internal static IClusterConfigurationBuilder SetupRetryDurableGuaranteeOrderedConsumptionMongoDbCluster(
- this IClusterConfigurationBuilder cluster,
- string mongoDbConnectionString,
- string mongoDbDatabaseName,
- string mongoDbRetryQueueCollectionName,
- string mongoDbRetryQueueItemCollectionName)
- {
- cluster
- .AddProducer(
- producer => producer
- .DefaultTopic("test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-mongo-db")
- .WithCompression(CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeSerializer()))
- .AddConsumer(
- consumer => consumer
- .Topic("test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-mongo-db")
- .WithGroupId("test-consumer-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-mongo-db")
- .WithBufferSize(100)
- .WithWorkersCount(10)
- .WithAutoOffsetReset(KafkaFlow.AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeDeserializer(typeof(RetryDurableTestMessage))
- .RetryDurable(
- (configure) => configure
- .Handle()
- .WithMessageType(typeof(RetryDurableTestMessage))
- .WithMessageSerializeSettings(
- new JsonSerializerSettings
- {
- DateTimeZoneHandling = DateTimeZoneHandling.Utc,
- TypeNameHandling = TypeNameHandling.Auto
- })
- .WithEmbeddedRetryCluster(
- cluster,
- configure => configure
- .Enabled(true)
- .WithRetryTopicName("test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-mongo-db-retry")
- .WithRetryConsumerBufferSize(100)
- .WithRetryConsumerWorkersCount(10)
- .WithRetryConsumerStrategy(RetryConsumerStrategy.GuaranteeOrderedConsumption)
- .WithRetryTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler()))
- .WithPollingJobsConfiguration(
- configure => configure
- .WithSchedulerId("custom_search_key_durable_guarantee_ordered_consumption_mongo_db")
- .WithRetryDurablePollingConfiguration(
- configure => configure
- .Enabled(true)
- .WithCronExpression("0/30 * * ? * * *")
- .WithExpirationIntervalFactor(1)
- .WithFetchSize(256))
- )
- .WithMongoDbDataProvider(
- mongoDbConnectionString,
- mongoDbDatabaseName,
- mongoDbRetryQueueCollectionName,
- mongoDbRetryQueueItemCollectionName)
- .WithRetryPlanBeforeRetryDurable(
- configure => configure
- .TryTimes(3)
- .WithTimeBetweenTriesPlan(
- TimeSpan.FromMilliseconds(250),
- TimeSpan.FromMilliseconds(500),
- TimeSpan.FromMilliseconds(1000))
- .ShouldPauseConsumer(false)))
- .AddTypedHandlers(
- handlers =>
- handlers
- .WithHandlerLifetime(InstanceLifetime.Singleton)
- .AddHandler())));
- return cluster;
- }
-
- internal static IClusterConfigurationBuilder SetupRetryDurableGuaranteeOrderedConsumptionSqlServerCluster(
- this IClusterConfigurationBuilder cluster,
- string sqlServerConnectionString,
- string sqlServerDatabaseName)
- {
- cluster
- .AddProducer(
- producer => producer
- .DefaultTopic("test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-sql-server")
- .WithCompression(Confluent.Kafka.CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeSerializer()))
- .AddConsumer(
- consumer => consumer
- .Topic("test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-sql-server")
- .WithGroupId("test-consumer-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-sql-server")
- .WithBufferSize(100)
- .WithWorkersCount(10)
- .WithAutoOffsetReset(KafkaFlow.AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeDeserializer(typeof(RetryDurableTestMessage))
- .RetryDurable(
- (configure) => configure
- .Handle()
- .WithMessageType(typeof(RetryDurableTestMessage))
- .WithMessageSerializeSettings(
- new JsonSerializerSettings
- {
- DateTimeZoneHandling = DateTimeZoneHandling.Utc,
- TypeNameHandling = TypeNameHandling.Auto
- })
- .WithEmbeddedRetryCluster(
- cluster,
- configure => configure
- .Enabled(true)
- .WithRetryTopicName("test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-sql-server-retry")
- .WithRetryConsumerBufferSize(100)
- .WithRetryConsumerWorkersCount(10)
- .WithRetryConsumerStrategy(RetryConsumerStrategy.GuaranteeOrderedConsumption)
- .WithRetryTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler()))
- .WithPollingJobsConfiguration(
- configure => configure
- .WithSchedulerId("custom_search_key_durable_guarantee_ordered_consumption_sql_server")
- .WithRetryDurablePollingConfiguration(
- configure => configure
- .Enabled(true)
- .WithCronExpression("0/30 * * ? * * *")
- .WithExpirationIntervalFactor(1)
- .WithFetchSize(256))
- )
- .WithSqlServerDataProvider(
- sqlServerConnectionString,
- sqlServerDatabaseName)
- .WithRetryPlanBeforeRetryDurable(
- configure => configure
- .TryTimes(3)
- .WithTimeBetweenTriesPlan(
- TimeSpan.FromMilliseconds(250),
- TimeSpan.FromMilliseconds(500),
- TimeSpan.FromMilliseconds(1000))
- .ShouldPauseConsumer(false)))
- .AddTypedHandlers(
- handlers =>
- handlers
- .WithHandlerLifetime(InstanceLifetime.Singleton)
- .AddHandler())));
- return cluster;
- }
-
- internal static IClusterConfigurationBuilder SetupRetryDurableGuaranteeOrderedConsumptionPostgresCluster(
- this IClusterConfigurationBuilder cluster,
- string postgresConnectionString,
- string postgresDatabaseName)
- {
- cluster
- .AddProducer(
- producer => producer
- .DefaultTopic("test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-postgres")
- .WithCompression(Confluent.Kafka.CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeSerializer()))
- .AddConsumer(
- consumer => consumer
- .Topic("test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-postgres")
- .WithGroupId("test-consumer-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-postgres")
- .WithBufferSize(100)
- .WithWorkersCount(10)
- .WithAutoOffsetReset(KafkaFlow.AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeDeserializer(typeof(RetryDurableTestMessage))
- .RetryDurable(
- (configure) => configure
- .Handle()
- .WithMessageType(typeof(RetryDurableTestMessage))
- .WithMessageSerializeSettings(
- new JsonSerializerSettings
- {
- DateTimeZoneHandling = DateTimeZoneHandling.Utc,
- TypeNameHandling = TypeNameHandling.Auto
- })
- .WithEmbeddedRetryCluster(
- cluster,
- configure => configure
- .Enabled(true)
- .WithRetryTopicName("test-kafka-flow-retry-retry-durable-guarantee-ordered-consumption-postgres-retry")
- .WithRetryConsumerBufferSize(100)
- .WithRetryConsumerWorkersCount(10)
- .WithRetryConsumerStrategy(RetryConsumerStrategy.GuaranteeOrderedConsumption)
- .WithRetryTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler()))
- .WithPollingJobsConfiguration(
- configure => configure
- .WithSchedulerId("custom_search_key_durable_guarantee_ordered_consumption_postgres")
- .WithRetryDurablePollingConfiguration(
- configure => configure
- .Enabled(true)
- .WithCronExpression("0/30 * * ? * * *")
- .WithExpirationIntervalFactor(1)
- .WithFetchSize(256))
- )
- .WithPostgresDataProvider(
- postgresConnectionString,
- postgresDatabaseName)
- .WithRetryPlanBeforeRetryDurable(
- configure => configure
- .TryTimes(3)
- .WithTimeBetweenTriesPlan(
- TimeSpan.FromMilliseconds(250),
- TimeSpan.FromMilliseconds(500),
- TimeSpan.FromMilliseconds(1000))
- .ShouldPauseConsumer(false)))
- .AddTypedHandlers(
- handlers =>
- handlers
- .WithHandlerLifetime(InstanceLifetime.Singleton)
- .AddHandler())));
- return cluster;
- }
-
- internal static IClusterConfigurationBuilder SetupRetryDurableLatestConsumptionMongoDbCluster(
- this IClusterConfigurationBuilder cluster,
- string mongoDbConnectionString,
- string mongoDbDatabaseName,
- string mongoDbRetryQueueCollectionName,
- string mongoDbRetryQueueItemCollectionName)
- {
- cluster
- .AddProducer(
- producer => producer
- .DefaultTopic("test-kafka-flow-retry-retry-durable-latest-consumption-mongo-db")
- .WithCompression(Confluent.Kafka.CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeSerializer()))
- .AddConsumer(
- consumer => consumer
- .Topic("test-kafka-flow-retry-retry-durable-latest-consumption-mongo-db")
- .WithGroupId("test-consumer-kafka-flow-retry-retry-durable-latest-consumption-mongo-db")
- .WithBufferSize(100)
- .WithWorkersCount(10)
- .WithAutoOffsetReset(KafkaFlow.AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeDeserializer(typeof(RetryDurableTestMessage))
- .RetryDurable(
- (configure) => configure
- .Handle()
- .WithMessageType(typeof(RetryDurableTestMessage))
- .WithMessageSerializeSettings(
- new JsonSerializerSettings
- {
- DateTimeZoneHandling = DateTimeZoneHandling.Utc,
- TypeNameHandling = TypeNameHandling.Auto
- })
- .WithEmbeddedRetryCluster(
- cluster,
- configure => configure
- .Enabled(true)
- .WithRetryTopicName("test-kafka-flow-retry-retry-durable-latest-consumption-mongo-db-retry")
- .WithRetryConsumerBufferSize(100)
- .WithRetryConsumerWorkersCount(10)
- .WithRetryConsumerStrategy(RetryConsumerStrategy.LatestConsumption)
- .WithRetryTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler()))
- .WithPollingJobsConfiguration(
- configure => configure
- .WithSchedulerId("custom_search_key_durable_latest_consumption_mongo_db")
- .WithRetryDurablePollingConfiguration(
- configure => configure
- .Enabled(true)
- .WithCronExpression("0/30 * * ? * * *")
- .WithExpirationIntervalFactor(1)
- .WithFetchSize(256))
- )
- .WithMongoDbDataProvider(
- mongoDbConnectionString,
- mongoDbDatabaseName,
- mongoDbRetryQueueCollectionName,
- mongoDbRetryQueueItemCollectionName)
- .WithRetryPlanBeforeRetryDurable(
- configure => configure
- .TryTimes(3)
- .WithTimeBetweenTriesPlan(
- TimeSpan.FromMilliseconds(250),
- TimeSpan.FromMilliseconds(500),
- TimeSpan.FromMilliseconds(1000))
- .ShouldPauseConsumer(false)))
- .AddTypedHandlers(
- handlers =>
- handlers
- .WithHandlerLifetime(InstanceLifetime.Singleton)
- .AddHandler())));
- return cluster;
- }
-
- internal static IClusterConfigurationBuilder SetupRetryDurableLatestConsumptionSqlServerCluster(
- this IClusterConfigurationBuilder cluster,
- string sqlServerConnectionString,
- string sqlServerDatabaseName)
- {
- cluster
- .AddProducer(
- producer => producer
- .DefaultTopic("test-kafka-flow-retry-retry-durable-latest-consumption-sql-server")
- .WithCompression(Confluent.Kafka.CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeSerializer()))
- .AddConsumer(
- consumer => consumer
- .Topic("test-kafka-flow-retry-retry-durable-latest-consumption-sql-server")
- .WithGroupId("test-consumer-kafka-flow-retry-retry-durable-latest-consumption-sql-server")
- .WithBufferSize(100)
- .WithWorkersCount(10)
- .WithAutoOffsetReset((KafkaFlow.AutoOffsetReset)AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeDeserializer(typeof(RetryDurableTestMessage))
- .RetryDurable(
- (configure) => configure
- .Handle()
- .WithMessageType(typeof(RetryDurableTestMessage))
- .WithMessageSerializeSettings(
- new JsonSerializerSettings
- {
- DateTimeZoneHandling = DateTimeZoneHandling.Utc,
- TypeNameHandling = TypeNameHandling.Auto
- })
- .WithEmbeddedRetryCluster(
- cluster,
- configure => configure
- .Enabled(true)
- .WithRetryTopicName("test-kafka-flow-retry-retry-durable-latest-consumption-sql-server-retry")
- .WithRetryConsumerBufferSize(100)
- .WithRetryConsumerWorkersCount(10)
- .WithRetryConsumerStrategy(RetryConsumerStrategy.LatestConsumption)
- .WithRetryTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler()))
- .WithPollingJobsConfiguration(
- configure => configure
- .WithSchedulerId("custom_search_key_durable_latest_consumption_sql_server")
- .WithRetryDurablePollingConfiguration(
- configure => configure
- .Enabled(true)
- .WithCronExpression("0/30 * * ? * * *")
- .WithExpirationIntervalFactor(1)
- .WithFetchSize(256))
- )
-
- .WithSqlServerDataProvider(
- sqlServerConnectionString,
- sqlServerDatabaseName)
- .WithRetryPlanBeforeRetryDurable(
- configure => configure
- .TryTimes(3)
- .WithTimeBetweenTriesPlan(
- TimeSpan.FromMilliseconds(250),
- TimeSpan.FromMilliseconds(500),
- TimeSpan.FromMilliseconds(1000))
- .ShouldPauseConsumer(false)))
- .AddTypedHandlers(
- handlers =>
- handlers
- .WithHandlerLifetime(InstanceLifetime.Singleton)
- .AddHandler())));
- return cluster;
- }
-
- internal static IClusterConfigurationBuilder SetupRetryDurableLatestConsumptionPostgresCluster(
- this IClusterConfigurationBuilder cluster,
- string postgresConnectionString,
- string postgresDatabaseName)
- {
- cluster
- .AddProducer(
- producer => producer
- .DefaultTopic("test-kafka-flow-retry-retry-durable-latest-consumption-postgres")
- .WithCompression(Confluent.Kafka.CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeSerializer()))
- .AddConsumer(
- consumer => consumer
- .Topic("test-kafka-flow-retry-retry-durable-latest-consumption-postgres")
- .WithGroupId("test-consumer-kafka-flow-retry-retry-durable-latest-consumption-postgres")
- .WithBufferSize(100)
- .WithWorkersCount(10)
- .WithAutoOffsetReset((KafkaFlow.AutoOffsetReset)AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeDeserializer(typeof(RetryDurableTestMessage))
- .RetryDurable(
- (configure) => configure
- .Handle()
- .WithMessageType(typeof(RetryDurableTestMessage))
- .WithMessageSerializeSettings(
- new JsonSerializerSettings
- {
- DateTimeZoneHandling = DateTimeZoneHandling.Utc,
- TypeNameHandling = TypeNameHandling.Auto
- })
- .WithEmbeddedRetryCluster(
- cluster,
- configure => configure
- .Enabled(true)
- .WithRetryTopicName("test-kafka-flow-retry-retry-durable-latest-consumption-postgres-retry")
- .WithRetryConsumerBufferSize(100)
- .WithRetryConsumerWorkersCount(10)
- .WithRetryConsumerStrategy(RetryConsumerStrategy.LatestConsumption)
- .WithRetryTypedHandlers(
- handlers => handlers
- .WithHandlerLifetime(InstanceLifetime.Transient)
- .AddHandler()))
- .WithPollingJobsConfiguration(
- configure => configure
- .WithSchedulerId("custom_search_key_durable_latest_consumption_postgres")
- .WithRetryDurablePollingConfiguration(
- configure => configure
- .Enabled(true)
- .WithCronExpression("0/30 * * ? * * *")
- .WithExpirationIntervalFactor(1)
- .WithFetchSize(256))
- )
- .WithPostgresDataProvider(
- postgresConnectionString,
- postgresDatabaseName)
- .WithRetryPlanBeforeRetryDurable(
- configure => configure
- .TryTimes(3)
- .WithTimeBetweenTriesPlan(
- TimeSpan.FromMilliseconds(250),
- TimeSpan.FromMilliseconds(500),
- TimeSpan.FromMilliseconds(1000))
- .ShouldPauseConsumer(false)))
- .AddTypedHandlers(
- handlers =>
- handlers
- .WithHandlerLifetime(InstanceLifetime.Singleton)
- .AddHandler())));
- return cluster;
- }
-
- internal static IClusterConfigurationBuilder SetupRetryForeverCluster(this IClusterConfigurationBuilder cluster)
- {
- cluster
- .AddProducer(
- producer => producer
- .DefaultTopic("test-kafka-flow-retry-retry-forever")
- .WithCompression(Confluent.Kafka.CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeSerializer()))
- .AddConsumer(
- consumer => consumer
- .Topic("test-kafka-flow-retry-retry-forever")
- .WithGroupId("test-consumer-kafka-flow-retry-retry-forever")
- .WithBufferSize(100)
- .WithWorkersCount(10)
- .WithAutoOffsetReset(KafkaFlow.AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeDeserializer(typeof(RetryForeverTestMessage))
- .RetryForever(
- (configure) => configure
- .Handle()
- .WithTimeBetweenTriesPlan(TimeSpan.FromMilliseconds(100)))
- .AddTypedHandlers(
- handlers =>
- handlers
- .WithHandlerLifetime(InstanceLifetime.Singleton)
- .AddHandler())));
- return cluster;
- }
-
- internal static IClusterConfigurationBuilder SetupRetrySimpleCluster(this IClusterConfigurationBuilder cluster)
- {
- cluster
- .AddProducer(
- producer => producer
- .DefaultTopic("test-kafka-flow-retry-retry-simple")
- .WithCompression(Confluent.Kafka.CompressionType.Gzip)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeSerializer()))
- .AddConsumer(
- consumer => consumer
- .Topic("test-kafka-flow-retry-retry-simple")
- .WithGroupId("test-consumer-kafka-flow-retry-retry-simple")
- .WithBufferSize(100)
- .WithWorkersCount(10)
- .WithAutoOffsetReset(KafkaFlow.AutoOffsetReset.Latest)
- .AddMiddlewares(
- middlewares => middlewares
- .AddSingleTypeDeserializer(typeof(RetrySimpleTestMessage))
- .RetrySimple(
- (configure) => configure
- .Handle()
- .TryTimes(3)
- .ShouldPauseConsumer(false)
- .WithTimeBetweenTriesPlan(
- (retryCount) =>
- {
- var plan = new[]
- {
- TimeSpan.FromMilliseconds(100),
- TimeSpan.FromMilliseconds(100),
- TimeSpan.FromMilliseconds(100),
- TimeSpan.FromMilliseconds(100)
- };
-
- return plan[retryCount];
- }))
- .AddTypedHandlers(
- handlers =>
- handlers
- .WithHandlerLifetime(InstanceLifetime.Singleton)
- .AddHandler())));
- return cluster;
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/BootstrapperPostgresSchema.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/BootstrapperPostgresSchema.cs
deleted file mode 100644
index 73cf4947..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/BootstrapperPostgresSchema.cs
+++ /dev/null
@@ -1,71 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Bootstrappers
-{
- using System.Collections.Generic;
- using System.IO;
- using System.Linq;
- using System.Reflection;
- using System.Threading;
- using System.Threading.Tasks;
- using Npgsql;
-
- internal static class BootstrapperPostgresSchema
- {
- private static readonly SemaphoreSlim semaphoreOneThreadAtTime = new SemaphoreSlim(1, 1);
- private static bool schemaInitialized;
-
- internal static async Task RecreatePostgresSchemaAsync(string databaseName, string connectionString)
- {
- await semaphoreOneThreadAtTime.WaitAsync().ConfigureAwait(false);
- try
- {
- if (schemaInitialized)
- {
- return;
- }
-
- await using (var openCon = new NpgsqlConnection(connectionString))
- {
- openCon.Open();
- openCon.ChangeDatabase(databaseName);
-
- var scripts = GetScriptsForSchemaCreation();
-
- foreach (var script in scripts)
- {
- await using (var queryCommand = new NpgsqlCommand(script))
- {
- queryCommand.Connection = openCon;
-
- await queryCommand.ExecuteNonQueryAsync().ConfigureAwait(false);
- }
- }
- }
-
- schemaInitialized = true;
- }
- finally
- {
- semaphoreOneThreadAtTime.Release();
- }
- }
-
- private static IEnumerable GetScriptsForSchemaCreation()
- {
- Assembly postgresAssembly = Assembly.LoadFrom("KafkaFlow.Retry.Postgres.dll");
- return postgresAssembly
- .GetManifestResourceNames()
- .OrderBy(x => x)
- .Select(script =>
- {
- using (Stream s = postgresAssembly.GetManifestResourceStream(script))
- {
- using (StreamReader sr = new StreamReader(s))
- {
- return sr.ReadToEnd();
- }
- }
- })
- .ToList();
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/BootstrapperSqlServerSchema.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/BootstrapperSqlServerSchema.cs
deleted file mode 100644
index 1bf81cc5..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/BootstrapperSqlServerSchema.cs
+++ /dev/null
@@ -1,77 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Bootstrappers
-{
- using System.Collections.Generic;
- using Microsoft.Data.SqlClient;
- using System.IO;
- using System.Linq;
- using System.Reflection;
- using System.Threading;
- using System.Threading.Tasks;
-
- internal static class BootstrapperSqlServerSchema
- {
- private static readonly SemaphoreSlim semaphoreOneThreadAtTime = new SemaphoreSlim(1, 1);
- private static bool schemaInitialized;
-
- internal static async Task RecreateSqlSchemaAsync(string databaseName, string connectionString)
- {
- await semaphoreOneThreadAtTime.WaitAsync().ConfigureAwait(false);
- try
- {
- if (schemaInitialized)
- {
- return;
- }
-
- using (SqlConnection openCon = new SqlConnection(connectionString))
- {
- openCon.Open();
-
- var scripts = GetScriptsForSchemaCreation();
-
- foreach (var script in scripts)
- {
- string[] batches = script.Split(new[] { "GO\r\n", "GO\t", "GO\n" }, System.StringSplitOptions.RemoveEmptyEntries);
-
- foreach (var batch in batches)
- {
- string replacedBatch = batch.Replace("@dbname", databaseName);
-
- using (SqlCommand queryCommand = new SqlCommand(replacedBatch))
- {
- queryCommand.Connection = openCon;
-
- await queryCommand.ExecuteNonQueryAsync().ConfigureAwait(false);
- }
- }
- }
- }
-
- schemaInitialized = true;
- }
- finally
- {
- semaphoreOneThreadAtTime.Release();
- }
- }
-
- private static IEnumerable GetScriptsForSchemaCreation()
- {
- Assembly sqlServerAssembly = Assembly.LoadFrom("KafkaFlow.Retry.SqlServer.dll");
- return sqlServerAssembly
- .GetManifestResourceNames()
- .OrderBy(x => x)
- .Select(script =>
- {
- using (Stream s = sqlServerAssembly.GetManifestResourceStream(script))
- {
- using (StreamReader sr = new StreamReader(s))
- {
- return sr.ReadToEnd();
- }
- }
- })
- .ToList();
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/Fixtures/BootstrapperFixtureTemplate.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/Fixtures/BootstrapperFixtureTemplate.cs
deleted file mode 100644
index aab104e8..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/Fixtures/BootstrapperFixtureTemplate.cs
+++ /dev/null
@@ -1,92 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Bootstrappers.Fixtures
-{
- using System;
- using System.Collections.Generic;
- using Microsoft.Data.SqlClient;
- using System.Threading.Tasks;
- using Dawn;
- using global::Microsoft.Extensions.Configuration;
- using Npgsql;
- using KafkaFlow.Retry.IntegrationTests.Core.Settings;
- using KafkaFlow.Retry.IntegrationTests.Core.Storages.Repositories;
-
- public abstract class BootstrapperFixtureTemplate : IDisposable
- {
- protected const string ConfigurationFilePath = "conf/appsettings.json";
-
- private bool databasesInitialized;
-
- private IRepositoryProvider repositoryProvider;
-
- internal KafkaSettings KafkaSettings { get; private set; }
-
- internal MongoDbRepositorySettings MongoDbSettings { get; private set; }
-
- internal IRepositoryProvider RepositoryProvider => this.repositoryProvider ?? this.CreateRepositoryProvider();
-
- internal SqlServerRepositorySettings SqlServerSettings { get; private set; }
-
- internal PostgresRepositorySettings PostgresSettings { get; private set; }
-
- public abstract void Dispose();
-
- protected async Task InitializeDatabasesAsync(IConfiguration configuration)
- {
- this.InitializeMongoDb(configuration);
- await this.InitializeSqlServerAsync(configuration).ConfigureAwait(false);
- await this.InitializePostgresAsync(configuration).ConfigureAwait(false);
-
- this.databasesInitialized = true;
- }
-
- protected void InitializeKafka(IConfiguration configuration)
- {
- this.KafkaSettings = configuration.GetSection("Kafka").Get();
- }
-
- private IRepositoryProvider CreateRepositoryProvider()
- {
- Guard.Argument(this.databasesInitialized, nameof(this.databasesInitialized)).True($"Call {nameof(this.InitializeDatabasesAsync)} first.");
-
- var repositories = new List
- {
- new MongoDbRepository( this.MongoDbSettings.ConnectionString, this.MongoDbSettings.DatabaseName, this.MongoDbSettings.RetryQueueCollectionName, this.MongoDbSettings.RetryQueueItemCollectionName),
- new SqlServerRepository(this.SqlServerSettings.ConnectionString, this.SqlServerSettings.DatabaseName),
- new PostgresRepository(this.PostgresSettings.ConnectionString, this.PostgresSettings.DatabaseName)
- };
-
- this.repositoryProvider = new RepositoryProvider(repositories);
-
- return this.repositoryProvider;
- }
-
- private void InitializeMongoDb(IConfiguration configuration)
- {
- this.MongoDbSettings = configuration.GetSection("MongoDbRepository").Get();
- }
-
- private async Task InitializeSqlServerAsync(IConfiguration configuration)
- {
- this.SqlServerSettings = configuration.GetSection("SqlServerRepository").Get();
-
- var sqlServerConnectionStringBuilder = new SqlConnectionStringBuilder(this.SqlServerSettings.ConnectionString);
- if (Environment.GetEnvironmentVariable("SQLSERVER_INTEGRATED_SECURITY") != null)
- {
- sqlServerConnectionStringBuilder.IntegratedSecurity = false;
- }
- this.SqlServerSettings.ConnectionString = sqlServerConnectionStringBuilder.ToString();
-
- await BootstrapperSqlServerSchema.RecreateSqlSchemaAsync(this.SqlServerSettings.DatabaseName, this.SqlServerSettings.ConnectionString).ConfigureAwait(false);
- }
-
- private async Task InitializePostgresAsync(IConfiguration configuration)
- {
- this.PostgresSettings = configuration.GetSection("PostgresRepository").Get();
-
- var postgresConnectionStringBuilder = new NpgsqlConnectionStringBuilder(this.PostgresSettings.ConnectionString);
- this.PostgresSettings.ConnectionString = postgresConnectionStringBuilder.ToString();
-
- await BootstrapperPostgresSchema.RecreatePostgresSchemaAsync(this.PostgresSettings.DatabaseName, this.PostgresSettings.ConnectionString).ConfigureAwait(false);
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/Fixtures/BootstrapperHostFixture.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/Fixtures/BootstrapperHostFixture.cs
deleted file mode 100644
index 59a6cb50..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/Fixtures/BootstrapperHostFixture.cs
+++ /dev/null
@@ -1,125 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Bootstrappers.Fixtures
-{
- using System;
- using System.IO;
- using System.Threading;
- using global::Microsoft.Extensions.Configuration;
- using global::Microsoft.Extensions.DependencyInjection;
- using global::Microsoft.Extensions.Hosting;
- using KafkaFlow.Retry.IntegrationTests.Core.Producers;
- using KafkaFlow.Retry.IntegrationTests.Core.Storages.Assertion;
- using KafkaFlow.Retry.IntegrationTests.Core.Storages.Repositories;
- using Xunit;
-
- [CollectionDefinition("BootstrapperHostCollection")]
- public class BootstrapperHostCollectionFixture : ICollectionFixture
- { }
-
- public class BootstrapperHostFixture : BootstrapperFixtureTemplate
- {
- private readonly IKafkaBus kafkaBus;
-
- public BootstrapperHostFixture()
- {
- var config = new ConfigurationBuilder()
- .AddJsonFile(ConfigurationFilePath)
- .Build();
-
- this.InitializeDatabasesAsync(config).GetAwaiter().GetResult();
-
- var builder = Host
- .CreateDefaultBuilder()
- .ConfigureAppConfiguration(
- (_, config) =>
- {
- config
- .SetBasePath(Directory.GetCurrentDirectory())
- .AddJsonFile(
- ConfigurationFilePath,
- false,
- true)
- .AddEnvironmentVariables();
- })
- .ConfigureServices(SetupServices)
- .UseDefaultServiceProvider(
- (_, options) =>
- {
- options.ValidateScopes = true;
- options.ValidateOnBuild = true;
- });
-
- var host = builder.Build();
- kafkaBus = host.Services.CreateKafkaBus();
- kafkaBus.StartAsync().GetAwaiter().GetResult();
-
- // Wait partition assignment
- Thread.Sleep(10000);
-
- ServiceProvider = host.Services;
- }
-
- public IServiceProvider ServiceProvider { get; private set; }
-
- public override void Dispose()
- {
- kafkaBus.StopAsync().GetAwaiter().GetResult();
-
- var repositories = this.RepositoryProvider.GetAllRepositories();
-
- foreach (var repository in repositories)
- {
- repository.CleanDatabaseAsync().GetAwaiter().GetResult();
- }
- }
-
- private void SetupServices(HostBuilderContext context, IServiceCollection services)
- {
- this.InitializeKafka(context.Configuration);
-
- services.AddKafka(
- kafka => kafka
- .UseLogHandler()
- .AddCluster(
- cluster => cluster
- .WithBrokers(this.KafkaSettings.Brokers.Split(';'))
- .CreatAllTestTopicsIfNotExist()
- .SetupRetrySimpleCluster()
- .SetupRetryForeverCluster()
- .SetupRetryDurableGuaranteeOrderedConsumptionMongoDbCluster(
- this.MongoDbSettings.ConnectionString,
- this.MongoDbSettings.DatabaseName,
- this.MongoDbSettings.RetryQueueCollectionName,
- this.MongoDbSettings.RetryQueueItemCollectionName)
- .SetupRetryDurableGuaranteeOrderedConsumptionSqlServerCluster(
- this.SqlServerSettings.ConnectionString,
- this.SqlServerSettings.DatabaseName)
- .SetupRetryDurableGuaranteeOrderedConsumptionPostgresCluster(
- this.PostgresSettings.ConnectionString,
- this.PostgresSettings.DatabaseName)
- .SetupRetryDurableLatestConsumptionMongoDbCluster(
- this.MongoDbSettings.ConnectionString,
- this.MongoDbSettings.DatabaseName,
- this.MongoDbSettings.RetryQueueCollectionName,
- this.MongoDbSettings.RetryQueueItemCollectionName)
- .SetupRetryDurableLatestConsumptionSqlServerCluster(
- this.SqlServerSettings.ConnectionString,
- this.SqlServerSettings.DatabaseName)
- .SetupRetryDurableLatestConsumptionPostgresCluster(
- this.PostgresSettings.ConnectionString,
- this.PostgresSettings.DatabaseName)
- ));
-
- services.AddSingleton();
- services.AddSingleton();
- services.AddSingleton();
- services.AddSingleton();
- services.AddSingleton();
- services.AddSingleton();
- services.AddSingleton();
- services.AddSingleton();
- services.AddSingleton(sp => this.RepositoryProvider);
- services.AddSingleton();
- services.AddSingleton();
- }
- }
-}
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/Fixtures/BootstrapperRepositoryFixture.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/Fixtures/BootstrapperRepositoryFixture.cs
deleted file mode 100644
index 114b7f59..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Bootstrappers/Fixtures/BootstrapperRepositoryFixture.cs
+++ /dev/null
@@ -1,31 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Bootstrappers.Fixtures
-{
- using global::Microsoft.Extensions.Configuration;
- using Xunit;
-
- [CollectionDefinition("BootstrapperRepositoryCollection")]
- public class BootstrapperRepositoryCollectionFixture : ICollectionFixture
- { }
-
- public class BootstrapperRepositoryFixture : BootstrapperFixtureTemplate
- {
- public BootstrapperRepositoryFixture()
- {
- var config = new ConfigurationBuilder()
- .AddJsonFile(ConfigurationFilePath)
- .Build();
-
- this.InitializeDatabasesAsync(config).GetAwaiter().GetResult();
- }
-
- public override void Dispose()
- {
- var repositories = this.RepositoryProvider.GetAllRepositories();
-
- foreach (var repository in repositories)
- {
- repository.CleanDatabaseAsync().GetAwaiter().GetResult();
- }
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Exceptions/RetryDurableTestException.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Exceptions/RetryDurableTestException.cs
deleted file mode 100644
index 0fae959a..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Exceptions/RetryDurableTestException.cs
+++ /dev/null
@@ -1,8 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Exceptions
-{
- using System;
-
- public class RetryDurableTestException : Exception
- {
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Exceptions/RetryForeverTestException.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Exceptions/RetryForeverTestException.cs
deleted file mode 100644
index 521370ca..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Exceptions/RetryForeverTestException.cs
+++ /dev/null
@@ -1,8 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Exceptions
-{
- using System;
-
- public class RetryForeverTestException : Exception
- {
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Exceptions/RetrySimpleTestException.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Exceptions/RetrySimpleTestException.cs
deleted file mode 100644
index 5d9ffa58..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Exceptions/RetrySimpleTestException.cs
+++ /dev/null
@@ -1,8 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Exceptions
-{
- using System;
-
- public class RetrySimpleTestException : Exception
- {
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Handlers/RetryDurableTestMessageHandler.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Handlers/RetryDurableTestMessageHandler.cs
deleted file mode 100644
index f405f56a..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Handlers/RetryDurableTestMessageHandler.cs
+++ /dev/null
@@ -1,30 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Handlers
-{
- using System.Threading.Tasks;
- using KafkaFlow;
- using KafkaFlow.Retry.IntegrationTests.Core.Exceptions;
- using KafkaFlow.Retry.IntegrationTests.Core.Messages;
- using KafkaFlow.Retry.IntegrationTests.Core.Storages;
-
- internal class RetryDurableTestMessageHandler : IMessageHandler
- {
- private readonly ILogHandler logHandler;
-
- public RetryDurableTestMessageHandler(ILogHandler logHandler)
- {
- this.logHandler = logHandler;
- }
-
- public Task Handle(IMessageContext context, RetryDurableTestMessage message)
- {
- InMemoryAuxiliarStorage.Add(message);
-
- if (InMemoryAuxiliarStorage.ThrowException)
- {
- throw new RetryDurableTestException();
- }
-
- return Task.CompletedTask;
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Handlers/RetryForeverTestMessageHandler.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Handlers/RetryForeverTestMessageHandler.cs
deleted file mode 100644
index a5decaf3..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Handlers/RetryForeverTestMessageHandler.cs
+++ /dev/null
@@ -1,23 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Handlers
-{
- using System.Threading.Tasks;
- using KafkaFlow;
- using KafkaFlow.Retry.IntegrationTests.Core.Exceptions;
- using KafkaFlow.Retry.IntegrationTests.Core.Messages;
- using KafkaFlow.Retry.IntegrationTests.Core.Storages;
-
- internal class RetryForeverTestMessageHandler : IMessageHandler
- {
- public Task Handle(IMessageContext context, RetryForeverTestMessage message)
- {
- InMemoryAuxiliarStorage.Add(message);
-
- if (InMemoryAuxiliarStorage.ThrowException)
- {
- throw new RetryForeverTestException();
- }
-
- return Task.CompletedTask;
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Handlers/RetrySimpleTestMessageHandler.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Handlers/RetrySimpleTestMessageHandler.cs
deleted file mode 100644
index ae7eefab..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Handlers/RetrySimpleTestMessageHandler.cs
+++ /dev/null
@@ -1,18 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Handlers
-{
- using System.Threading.Tasks;
- using KafkaFlow;
- using KafkaFlow.Retry.IntegrationTests.Core.Exceptions;
- using KafkaFlow.Retry.IntegrationTests.Core.Messages;
- using KafkaFlow.Retry.IntegrationTests.Core.Storages;
-
- internal class RetrySimpleTestMessageHandler : IMessageHandler
- {
- public Task Handle(IMessageContext context, RetrySimpleTestMessage message)
- {
- InMemoryAuxiliarStorage.Add(message);
-
- throw new RetrySimpleTestException();
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Messages/ITestMessage.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Messages/ITestMessage.cs
deleted file mode 100644
index dfa1c274..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Messages/ITestMessage.cs
+++ /dev/null
@@ -1,8 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Messages
-{
- internal interface ITestMessage
- {
- string Key { get; set; }
- string Value { get; set; }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Messages/RetryDurableTestMessage.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Messages/RetryDurableTestMessage.cs
deleted file mode 100644
index 71e3439e..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Messages/RetryDurableTestMessage.cs
+++ /dev/null
@@ -1,14 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Messages
-{
- using System.Runtime.Serialization;
-
- [DataContract]
- internal class RetryDurableTestMessage : ITestMessage
- {
- [DataMember(Order = 1)]
- public string Key { get; set; }
-
- [DataMember(Order = 2)]
- public string Value { get; set; }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Messages/RetryForeverTestMessage.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Messages/RetryForeverTestMessage.cs
deleted file mode 100644
index dc7b677b..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Messages/RetryForeverTestMessage.cs
+++ /dev/null
@@ -1,14 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Messages
-{
- using System.Runtime.Serialization;
-
- [DataContract]
- internal class RetryForeverTestMessage : ITestMessage
- {
- [DataMember(Order = 1)]
- public string Key { get; set; }
-
- [DataMember(Order = 2)]
- public string Value { get; set; }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Messages/RetrySimpleTestMessage.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Messages/RetrySimpleTestMessage.cs
deleted file mode 100644
index 0327b303..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Messages/RetrySimpleTestMessage.cs
+++ /dev/null
@@ -1,8 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Messages
-{
- internal class RetrySimpleTestMessage : ITestMessage
- {
- public string Key { get; set; }
- public string Value { get; set; }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableGuaranteeOrderedConsumptionMongoDbProducer.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableGuaranteeOrderedConsumptionMongoDbProducer.cs
deleted file mode 100644
index 21f2afc6..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableGuaranteeOrderedConsumptionMongoDbProducer.cs
+++ /dev/null
@@ -1,6 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Producers
-{
- internal class RetryDurableGuaranteeOrderedConsumptionMongoDbProducer
- {
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableGuaranteeOrderedConsumptionPostgresProducer.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableGuaranteeOrderedConsumptionPostgresProducer.cs
deleted file mode 100644
index f2da3a84..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableGuaranteeOrderedConsumptionPostgresProducer.cs
+++ /dev/null
@@ -1,6 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Producers
-{
- internal class RetryDurableGuaranteeOrderedConsumptionPostgresProducer
- {
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableGuaranteeOrderedConsumptionSqlServerProducer.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableGuaranteeOrderedConsumptionSqlServerProducer.cs
deleted file mode 100644
index 799bba18..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableGuaranteeOrderedConsumptionSqlServerProducer.cs
+++ /dev/null
@@ -1,6 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Producers
-{
- internal class RetryDurableGuaranteeOrderedConsumptionSqlServerProducer
- {
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableLatestConsumptionMongoDbProducer.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableLatestConsumptionMongoDbProducer.cs
deleted file mode 100644
index 6c46e157..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableLatestConsumptionMongoDbProducer.cs
+++ /dev/null
@@ -1,6 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Producers
-{
- internal class RetryDurableLatestConsumptionMongoDbProducer
- {
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableLatestConsumptionPostgresProducer.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableLatestConsumptionPostgresProducer.cs
deleted file mode 100644
index f8090e98..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableLatestConsumptionPostgresProducer.cs
+++ /dev/null
@@ -1,6 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Producers
-{
- internal class RetryDurableLatestConsumptionPostgresProducer
- {
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableLatestConsumptionSqlServerProducer.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableLatestConsumptionSqlServerProducer.cs
deleted file mode 100644
index 8145925a..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryDurableLatestConsumptionSqlServerProducer.cs
+++ /dev/null
@@ -1,6 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Producers
-{
- internal class RetryDurableLatestConsumptionSqlServerProducer
- {
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryForeverProducer.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryForeverProducer.cs
deleted file mode 100644
index ea9f81e4..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetryForeverProducer.cs
+++ /dev/null
@@ -1,6 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Producers
-{
- internal class RetryForeverProducer
- {
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetrySimpleProducer.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetrySimpleProducer.cs
deleted file mode 100644
index 4b044672..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Producers/RetrySimpleProducer.cs
+++ /dev/null
@@ -1,6 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Producers
-{
- internal class RetrySimpleProducer
- {
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Settings/KafkaSettings.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Settings/KafkaSettings.cs
deleted file mode 100644
index ccf76018..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Settings/KafkaSettings.cs
+++ /dev/null
@@ -1,9 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Settings
-{
- internal class KafkaSettings
- {
- public string Brokers { get; set; }
-
- public string SecurityProtocol { get; set; }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Settings/MongoDbRepositorySettings.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Settings/MongoDbRepositorySettings.cs
deleted file mode 100644
index 6e9ddbf1..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Settings/MongoDbRepositorySettings.cs
+++ /dev/null
@@ -1,13 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Settings
-{
- internal class MongoDbRepositorySettings
- {
- public string ConnectionString { get; set; }
-
- public string DatabaseName { get; set; }
-
- public string RetryQueueCollectionName { get; set; }
-
- public string RetryQueueItemCollectionName { get; set; }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Settings/PostgresRepositorySettings.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Settings/PostgresRepositorySettings.cs
deleted file mode 100644
index 7a35cff4..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Settings/PostgresRepositorySettings.cs
+++ /dev/null
@@ -1,9 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Settings
-{
- internal class PostgresRepositorySettings
- {
- public string ConnectionString { get; set; }
-
- public string DatabaseName { get; set; }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Settings/SqlServerRepositorySettings.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Settings/SqlServerRepositorySettings.cs
deleted file mode 100644
index 45c418f1..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Settings/SqlServerRepositorySettings.cs
+++ /dev/null
@@ -1,10 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Settings
-{
- internal class SqlServerRepositorySettings
- {
- public string ConnectionString { get; set; }
-
- public string DatabaseName { get; set; }
- public string Schema { get; set; }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Assertion/IPhysicalStorageAssert.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Assertion/IPhysicalStorageAssert.cs
deleted file mode 100644
index 2bacf35d..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Assertion/IPhysicalStorageAssert.cs
+++ /dev/null
@@ -1,15 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Storages.Assertion
-{
- using System.Threading.Tasks;
- using KafkaFlow.Retry.IntegrationTests.Core.Messages;
- using KafkaFlow.Retry.IntegrationTests.Core.Storages.Repositories;
-
- internal interface IPhysicalStorageAssert
- {
- Task AssertRetryDurableMessageCreationAsync(RepositoryType repositoryType, RetryDurableTestMessage message, int count);
-
- Task AssertRetryDurableMessageDoneAsync(RepositoryType repositoryType, RetryDurableTestMessage message);
-
- Task AssertRetryDurableMessageRetryingAsync(RepositoryType repositoryType, RetryDurableTestMessage message, int retryCount);
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Assertion/RetryDurableGuaranteeOrderedConsumptionPhysicalStorageAssert.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Assertion/RetryDurableGuaranteeOrderedConsumptionPhysicalStorageAssert.cs
deleted file mode 100644
index 656e0b2c..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Assertion/RetryDurableGuaranteeOrderedConsumptionPhysicalStorageAssert.cs
+++ /dev/null
@@ -1,100 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Storages.Assertion
-{
- using System;
- using System.Linq;
- using System.Threading.Tasks;
- using KafkaFlow.Retry.Durable.Repository.Model;
- using KafkaFlow.Retry.IntegrationTests.Core.Messages;
- using KafkaFlow.Retry.IntegrationTests.Core.Storages.Repositories;
- using MongoDB.Driver;
- using Xunit;
-
- internal class RetryDurableGuaranteeOrderedConsumptionPhysicalStorageAssert : IPhysicalStorageAssert
- {
- private readonly IRepositoryProvider repositoryProvider;
-
- public RetryDurableGuaranteeOrderedConsumptionPhysicalStorageAssert(IRepositoryProvider repositoryProvider)
- {
- this.repositoryProvider = repositoryProvider;
- }
-
- public async Task AssertRetryDurableMessageCreationAsync(RepositoryType repositoryType, RetryDurableTestMessage message, int count)
- {
- var retryQueue = await this
- .repositoryProvider
- .GetRepositoryOfType(repositoryType)
- .GetRetryQueueAsync(message.Key)
- .ConfigureAwait(false);
-
- Assert.True(retryQueue.Id != Guid.Empty, "Retry Durable Creation Get Retry Queue cannot be asserted.");
-
- var retryQueueItems = await this
- .repositoryProvider
- .GetRepositoryOfType(repositoryType)
- .GetRetryQueueItemsAsync(retryQueue.Id, rqi => rqi.Count() != count)
- .ConfigureAwait(false);
-
- Assert.True(retryQueueItems != null, "Retry Durable Creation Get Retry Queue Item Message cannot be asserted.");
-
- Assert.Equal(0, retryQueueItems.Sum(i => i.AttemptsCount));
- Assert.Equal(retryQueueItems.Count() - 1, retryQueueItems.Max(i => i.Sort));
- Assert.True(Enum.Equals(retryQueue.Status, RetryQueueStatus.Active));
- Assert.All(retryQueueItems, i => Enum.Equals(i.Status, RetryQueueItemStatus.Waiting));
- }
-
- public async Task AssertRetryDurableMessageDoneAsync(RepositoryType repositoryType, RetryDurableTestMessage message)
- {
- var retryQueue = await this
- .repositoryProvider
- .GetRepositoryOfType(repositoryType)
- .GetRetryQueueAsync(message.Key)
- .ConfigureAwait(false);
-
- Assert.True(retryQueue.Id != Guid.Empty, "Retry Durable Done Get Retry Queue cannot be asserted.");
-
- var retryQueueItems = await this
- .repositoryProvider
- .GetRepositoryOfType(repositoryType)
- .GetRetryQueueItemsAsync(
- retryQueue.Id,
- items =>
- {
- return items.All(item => item.Status != RetryQueueItemStatus.Done);
- }).ConfigureAwait(false);
-
- Assert.True(retryQueueItems != null, "Retry Durable Done Get Retry Queue Item Message cannot be asserted.");
-
- Assert.Equal(RetryQueueStatus.Done, retryQueue.Status);
- }
-
- public async Task AssertRetryDurableMessageRetryingAsync(RepositoryType repositoryType, RetryDurableTestMessage message, int retryCount)
- {
- var retryQueue = await this
- .repositoryProvider
- .GetRepositoryOfType(repositoryType)
- .GetRetryQueueAsync(message.Key).ConfigureAwait(false);
-
- Assert.True(retryQueue.Id != Guid.Empty, "Retry Durable Retrying Get Retry Queue cannot be asserted.");
-
- var retryQueueItems = await this
- .repositoryProvider
- .GetRepositoryOfType(repositoryType)
- .GetRetryQueueItemsAsync(
- retryQueue.Id,
- rqi =>
- {
- return
- rqi.Single(x => x.Sort == rqi.Min(i => i.Sort)).LastExecution >
- rqi.Single(x => x.Sort == rqi.Max(i => i.Sort)).LastExecution;
- }).ConfigureAwait(false);
-
- Assert.True(retryQueueItems != null, "Retry Durable Retrying Get Retry Queue Item Message cannot be asserted.");
-
- Assert.Equal(retryCount, retryQueueItems.Where(x => x.Sort == 0).Sum(i => i.AttemptsCount));
- Assert.Equal(0, retryQueueItems.Where(x => x.Sort != 0).Sum(i => i.AttemptsCount));
- Assert.Equal(retryQueueItems.Count() - 1, retryQueueItems.Max(i => i.Sort));
- Assert.True(Enum.Equals(retryQueue.Status, RetryQueueStatus.Active));
- Assert.All(retryQueueItems, i => Enum.Equals(i.Status, RetryQueueItemStatus.Waiting));
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Assertion/RetryDurableLatestConsumptionPhysicalStorageAssert.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Assertion/RetryDurableLatestConsumptionPhysicalStorageAssert.cs
deleted file mode 100644
index c44bebd5..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Assertion/RetryDurableLatestConsumptionPhysicalStorageAssert.cs
+++ /dev/null
@@ -1,100 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Storages.Assertion
-{
- using System;
- using System.Linq;
- using System.Threading.Tasks;
- using KafkaFlow.Retry.Durable.Repository.Model;
- using KafkaFlow.Retry.IntegrationTests.Core.Messages;
- using KafkaFlow.Retry.IntegrationTests.Core.Storages.Repositories;
- using MongoDB.Driver;
- using Xunit;
-
- internal class RetryDurableLatestConsumptionPhysicalStorageAssert : IPhysicalStorageAssert
- {
- private readonly IRepositoryProvider repositoryProvider;
-
- public RetryDurableLatestConsumptionPhysicalStorageAssert(IRepositoryProvider repositoryProvider)
- {
- this.repositoryProvider = repositoryProvider;
- }
-
- public async Task AssertRetryDurableMessageCreationAsync(RepositoryType repositoryType, RetryDurableTestMessage message, int count)
- {
- var retryQueue = await this
- .repositoryProvider
- .GetRepositoryOfType(repositoryType)
- .GetRetryQueueAsync(message.Key)
- .ConfigureAwait(false);
-
- Assert.True(retryQueue.Id != Guid.Empty, "Retry Durable Creation Get Retry Queue cannot be asserted.");
-
- var retryQueueItems = await this
- .repositoryProvider
- .GetRepositoryOfType(repositoryType)
- .GetRetryQueueItemsAsync(retryQueue.Id, rqi =>
- {
- return rqi.Count() != count;
- })
- .ConfigureAwait(false);
-
- Assert.True(retryQueueItems != null, "Retry Durable Creation Get Retry Queue Item Message cannot be asserted.");
-
- Assert.Equal(0, retryQueueItems.Sum(i => i.AttemptsCount));
- Assert.Equal(retryQueueItems.Count() - 1, retryQueueItems.Max(i => i.Sort));
- Assert.True(Enum.Equals(retryQueue.Status, RetryQueueStatus.Active));
- Assert.All(retryQueueItems, i => Enum.Equals(i.Status, RetryQueueItemStatus.Waiting));
- }
-
- public async Task AssertRetryDurableMessageDoneAsync(RepositoryType repositoryType, RetryDurableTestMessage message)
- {
- var retryQueue = await this
- .repositoryProvider
- .GetRepositoryOfType(repositoryType)
- .GetRetryQueueAsync(message.Key)
- .ConfigureAwait(false);
-
- Assert.True(retryQueue.Id != Guid.Empty, "Retry Durable Done Get Retry Queue cannot be asserted.");
-
- var retryQueueItems = await this
- .repositoryProvider
- .GetRepositoryOfType(repositoryType)
- .GetRetryQueueItemsAsync(
- retryQueue.Id,
- rqi =>
- {
- return rqi.OrderBy(x => x.Sort).Last().Status != RetryQueueItemStatus.Done;
- }).ConfigureAwait(false);
-
- Assert.True(retryQueueItems != null, "Retry Durable Done Get Retry Queue Item Message cannot be asserted.");
-
- Assert.True(Enum.Equals(retryQueue.Status, RetryQueueStatus.Done));
- }
-
- public async Task AssertRetryDurableMessageRetryingAsync(RepositoryType repositoryType, RetryDurableTestMessage message, int retryCount)
- {
- var retryQueue = await this
- .repositoryProvider
- .GetRepositoryOfType(repositoryType)
- .GetRetryQueueAsync(message.Key).ConfigureAwait(false);
-
- Assert.True(retryQueue.Id != Guid.Empty, "Retry Durable Retrying Get Retry Queue cannot be asserted.");
-
- var retryQueueItems = await this
- .repositoryProvider
- .GetRepositoryOfType(repositoryType)
- .GetRetryQueueItemsAsync(
- retryQueue.Id,
- rqi =>
- {
- return rqi.OrderBy(x => x.Sort).Last().AttemptsCount != retryCount;
- }).ConfigureAwait(false);
-
- Assert.True(retryQueueItems != null, "Retry Durable Retrying Get Retry Queue Item Message cannot be asserted.");
-
- Assert.True(Enum.Equals(retryQueue.Status, RetryQueueStatus.Active));
- Assert.Equal(retryQueueItems.Count() - 1, retryQueueItems.Max(i => i.Sort));
- Assert.Equal(RetryQueueItemStatus.Waiting, retryQueueItems.OrderBy(x => x.Sort).Last().Status);
- Assert.All(retryQueueItems.OrderByDescending(x => x.Sort).Skip(1), i => Enum.Equals(i.Status, RetryQueueItemStatus.Cancelled));
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/InMemoryAuxiliarStorage.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/InMemoryAuxiliarStorage.cs
deleted file mode 100644
index 135de05e..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/InMemoryAuxiliarStorage.cs
+++ /dev/null
@@ -1,44 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Storages
-{
- using System;
- using System.Collections.Concurrent;
- using System.Diagnostics;
- using System.Linq;
- using System.Threading.Tasks;
- using KafkaFlow.Retry.IntegrationTests.Core.Messages;
- using Xunit;
-
- internal static class InMemoryAuxiliarStorage where T : ITestMessage
- {
- private const int TimeoutSec = 60;
- private static readonly ConcurrentBag Message = new ConcurrentBag();
-
- public static bool ThrowException { get; set; }
-
- public static void Add(T message)
- {
- Message.Add(message);
- }
-
- public static async Task AssertCountMessageAsync(T message, int count)
- {
- var start = DateTime.Now;
-
- while (Message.Count(x => x.Key == message.Key && x.Value == message.Value) != count)
- {
- if (DateTime.Now.Subtract(start).TotalSeconds > TimeoutSec && !Debugger.IsAttached)
- {
- Assert.True(false, "Message not received.");
- return;
- }
-
- await Task.Delay(100).ConfigureAwait(false);
- }
- }
-
- public static void Clear()
- {
- Message.Clear();
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/IRepository.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/IRepository.cs
deleted file mode 100644
index d98bfb7f..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/IRepository.cs
+++ /dev/null
@@ -1,25 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Storages.Repositories
-{
- using System;
- using System.Collections.Generic;
- using System.Threading.Tasks;
- using KafkaFlow.Retry.Durable.Repository;
- using KafkaFlow.Retry.Durable.Repository.Model;
-
- public interface IRepository
- {
- RepositoryType RepositoryType { get; }
-
- IRetryDurableQueueRepositoryProvider RetryQueueDataProvider { get; }
-
- Task CleanDatabaseAsync();
-
- Task CreateQueueAsync(RetryQueue queue);
-
- Task GetAllRetryQueueDataAsync(string queueGroupKey);
-
- Task GetRetryQueueAsync(string queueGroupKey);
-
- Task> GetRetryQueueItemsAsync(Guid retryQueueId, Func, bool> stopCondition);
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/IRepositoryProvider.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/IRepositoryProvider.cs
deleted file mode 100644
index 42d12393..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/IRepositoryProvider.cs
+++ /dev/null
@@ -1,14 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Storages.Repositories
-{
- using System;
- using System.Collections.Generic;
-
- internal interface IRepositoryProvider
- {
- IEnumerable GetAllRepositories();
-
- IRepository GetRepositoryOfType(RepositoryType repositoryType);
-
-
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/MongoDbRepository.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/MongoDbRepository.cs
deleted file mode 100644
index 45907e25..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/MongoDbRepository.cs
+++ /dev/null
@@ -1,206 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Storages.Repositories
-{
- using System;
- using System.Collections.Generic;
- using System.Diagnostics;
- using System.Linq;
- using System.Threading.Tasks;
- using Dawn;
- using KafkaFlow.Retry.Durable.Repository;
- using KafkaFlow.Retry.Durable.Repository.Model;
- using KafkaFlow.Retry.MongoDb;
- using KafkaFlow.Retry.MongoDb.Adapters;
- using KafkaFlow.Retry.MongoDb.Model;
- using MongoDB.Driver;
-
- internal class MongoDbRepository : IRepository
- {
- private const int TimeoutSec = 60;
- private readonly string databaseName;
-
- private readonly MongoClient mongoClient;
- private readonly QueuesAdapter queuesAdapter;
- private readonly IMongoCollection retryQueueItemsCollection;
- private readonly IMongoCollection retryQueuesCollection;
-
- public MongoDbRepository(
- string connectionString,
- string dbName,
- string retryQueueCollectionName,
- string retryQueueItemCollectionName)
- {
- this.databaseName = dbName;
- this.mongoClient = new MongoClient(connectionString);
- this.retryQueuesCollection = mongoClient.GetDatabase(dbName).GetCollection(retryQueueCollectionName);
- this.retryQueueItemsCollection = mongoClient.GetDatabase(dbName).GetCollection(retryQueueItemCollectionName);
-
- var dataProviderCreationResult = new MongoDbDataProviderFactory().TryCreate(
- new MongoDbSettings
- {
- ConnectionString = connectionString,
- DatabaseName = dbName,
- RetryQueueCollectionName = retryQueueCollectionName,
- RetryQueueItemCollectionName = retryQueueItemCollectionName
- });
-
- this.queuesAdapter =
- new QueuesAdapter(
- new ItemAdapter(
- new MessageAdapter(
- new HeaderAdapter())));
-
- Guard.Argument(dataProviderCreationResult, nameof(dataProviderCreationResult)).NotNull();
- Guard.Argument(dataProviderCreationResult.Success, nameof(dataProviderCreationResult.Success)).True(dataProviderCreationResult.Message);
-
- this.RetryQueueDataProvider = dataProviderCreationResult.Result;
- }
-
- public RepositoryType RepositoryType => RepositoryType.MongoDb;
-
- public IRetryDurableQueueRepositoryProvider RetryQueueDataProvider { get; }
-
- public async Task CleanDatabaseAsync()
- {
- await mongoClient.DropDatabaseAsync(databaseName).ConfigureAwait(false);
- }
-
- public async Task CreateQueueAsync(RetryQueue queue)
- {
- var queueDbo = new RetryQueueDbo
- {
- Id = queue.Id,
- CreationDate = queue.CreationDate,
- LastExecution = queue.LastExecution,
- QueueGroupKey = queue.QueueGroupKey,
- SearchGroupKey = queue.SearchGroupKey,
- Status = queue.Status,
- };
-
- await this.retryQueuesCollection.InsertOneAsync(queueDbo);
-
- foreach (var item in queue.Items)
- {
- var itemDbo = new RetryQueueItemDbo
- {
- Id = item.Id,
- CreationDate = item.CreationDate,
- LastExecution = item.LastExecution,
- ModifiedStatusDate = item.ModifiedStatusDate,
- AttemptsCount = item.AttemptsCount,
- RetryQueueId = queue.Id,
- Status = item.Status,
- SeverityLevel = item.SeverityLevel,
- Description = item.Description,
- Message = new RetryQueueItemMessageDbo
- {
- Headers = item.Message.Headers
- .Select(h => new RetryQueueHeaderDbo
- {
- Key = h.Key,
- Value = h.Value
- }),
- Key = item.Message.Key,
- Offset = item.Message.Offset,
- Partition = item.Message.Partition,
- TopicName = item.Message.TopicName,
- UtcTimeStamp = item.Message.UtcTimeStamp,
- Value = item.Message.Value
- },
- Sort = item.Sort
- };
-
- await this.retryQueueItemsCollection.InsertOneAsync(itemDbo);
- }
- }
-
- public async Task GetAllRetryQueueDataAsync(string queueGroupKey)
- {
- var queueCursor = await this.retryQueuesCollection.FindAsync(x => x.QueueGroupKey == queueGroupKey);
-
- var queue = await queueCursor.FirstOrDefaultAsync();
-
- if (queue is null)
- {
- return null;
- }
-
- var itemsCursor = await this.retryQueueItemsCollection.FindAsync(x => x.RetryQueueId == queue.Id);
-
- var items = await itemsCursor.ToListAsync();
-
- return this.queuesAdapter.Adapt(new[] { queue }, items).First();
- }
-
- public async Task GetRetryQueueAsync(string queueGroupKey)
- {
- var start = DateTime.Now;
- Guid retryQueueId = Guid.Empty;
- RetryQueueDbo retryQueueDbo = new RetryQueueDbo();
- do
- {
- if (DateTime.Now.Subtract(start).TotalSeconds > TimeoutSec && !Debugger.IsAttached)
- {
- return null;
- }
-
- await Task.Delay(100).ConfigureAwait(false);
-
- var retryQueueCursor = await this.retryQueuesCollection.FindAsync(x => x.QueueGroupKey.Contains(queueGroupKey)).ConfigureAwait(false);
- var retryQueues = await retryQueueCursor.ToListAsync().ConfigureAwait(false);
- if (retryQueues.Any())
- {
- retryQueueDbo = retryQueues.Single();
- retryQueueId = retryQueueDbo.Id;
- }
- } while (retryQueueId == Guid.Empty);
-
- return new RetryQueue(
- retryQueueDbo.Id,
- retryQueueDbo.SearchGroupKey,
- retryQueueDbo.QueueGroupKey,
- retryQueueDbo.CreationDate,
- retryQueueDbo.LastExecution,
- retryQueueDbo.Status);
- }
-
- public async Task> GetRetryQueueItemsAsync(
- Guid retryQueueId,
- Func, bool> stopCondition)
- {
- var start = DateTime.Now;
- List retryQueueItems = null;
- do
- {
- if (DateTime.Now.Subtract(start).TotalSeconds > TimeoutSec && !Debugger.IsAttached)
- {
- return null;
- }
-
- await Task.Delay(100).ConfigureAwait(false);
-
- var retryQueueItemsCursor = await retryQueueItemsCollection.FindAsync(x => x.RetryQueueId == retryQueueId).ConfigureAwait(false);
- var retryQueueItemsDbo = await retryQueueItemsCursor
- .ToListAsync()
- .ConfigureAwait(false);
-
- retryQueueItems = retryQueueItemsDbo
- .Select(
- x =>
- {
- return new RetryQueueItem(
- x.Id,
- x.AttemptsCount,
- x.CreationDate,
- x.Sort,
- x.LastExecution,
- x.ModifiedStatusDate,
- x.Status,
- x.SeverityLevel,
- x.Description);
- }).ToList();
- } while (stopCondition(retryQueueItems));
-
- return retryQueueItems ?? new List();
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/PostgresRepository.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/PostgresRepository.cs
deleted file mode 100644
index 17442046..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/PostgresRepository.cs
+++ /dev/null
@@ -1,287 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Storages.Repositories
-{
- using System;
- using System.Collections.Generic;
- using System.Diagnostics;
- using System.Linq;
- using System.Threading.Tasks;
- using KafkaFlow.Retry.Durable.Common;
- using KafkaFlow.Retry.Durable.Repository;
- using KafkaFlow.Retry.Durable.Repository.Model;
- using KafkaFlow.Retry.Postgres;
- using KafkaFlow.Retry.Postgres.Model;
- using KafkaFlow.Retry.Postgres.Readers;
- using KafkaFlow.Retry.Postgres.Readers.Adapters;
- using KafkaFlow.Retry.Postgres.Repositories;
- using Npgsql;
-
- internal class PostgresRepository : IRepository
- {
- private const int TimeoutSec = 60;
- private readonly ConnectionProvider connectionProvider;
-
- private readonly IRetryQueueItemMessageHeaderRepository retryQueueItemMessageHeaderRepository;
- private readonly IRetryQueueItemMessageRepository retryQueueItemMessageRepository;
- private readonly IRetryQueueItemRepository retryQueueItemRepository;
- private readonly RetryQueueReader retryQueueReader;
- private readonly IRetryQueueRepository retryQueueRepository;
- private readonly PostgresDbSettings postgresDbSettings;
-
- public PostgresRepository(
- string connectionString,
- string dbName)
- {
- this.postgresDbSettings = new PostgresDbSettings(connectionString, dbName);
-
- this.RetryQueueDataProvider = new PostgresDbDataProviderFactory().Create(this.postgresDbSettings);
-
- this.retryQueueItemMessageHeaderRepository = new RetryQueueItemMessageHeaderRepository();
- this.retryQueueItemMessageRepository = new RetryQueueItemMessageRepository();
- this.retryQueueItemRepository = new RetryQueueItemRepository();
- this.retryQueueRepository = new RetryQueueRepository();
-
- this.retryQueueReader = new RetryQueueReader(
- new RetryQueueAdapter(),
- new RetryQueueItemAdapter(),
- new RetryQueueItemMessageAdapter(),
- new RetryQueueItemMessageHeaderAdapter()
- );
-
- this.connectionProvider = new ConnectionProvider();
- }
-
- public RepositoryType RepositoryType => RepositoryType.Postgres;
-
- public IRetryDurableQueueRepositoryProvider RetryQueueDataProvider { get; }
-
- public async Task CleanDatabaseAsync()
- {
- using var dbConnection = this.connectionProvider.Create(this.postgresDbSettings);
- using var command = dbConnection.CreateCommand();
- command.CommandType = System.Data.CommandType.Text;
- command.CommandText = @"
- delete from retry_item_message_headers;
- delete from item_messages;
- delete from retry_queues;
- delete from retry_queue_items;
- ";
- await command.ExecuteNonQueryAsync();
- }
-
- public async Task CreateQueueAsync(RetryQueue queue)
- {
- var queueDbo = new RetryQueueDbo
- {
- IdDomain = queue.Id,
- CreationDate = queue.CreationDate,
- LastExecution = queue.LastExecution,
- QueueGroupKey = queue.QueueGroupKey,
- SearchGroupKey = queue.SearchGroupKey,
- Status = queue.Status,
- };
-
- using var dbConnection = this.connectionProvider.CreateWithinTransaction(this.postgresDbSettings);
-
- var queueId = await this.retryQueueRepository.AddAsync(dbConnection, queueDbo);
-
- foreach (var item in queue.Items)
- {
- // queue item
- var itemDbo = new RetryQueueItemDbo
- {
- IdDomain = item.Id,
- CreationDate = item.CreationDate,
- LastExecution = item.LastExecution,
- ModifiedStatusDate = item.ModifiedStatusDate,
- AttemptsCount = item.AttemptsCount,
- RetryQueueId = queueId,
- DomainRetryQueueId = queue.Id,
- Status = item.Status,
- SeverityLevel = item.SeverityLevel,
- Description = item.Description
- };
-
- var itemId = await this.retryQueueItemRepository.AddAsync(dbConnection, itemDbo);
-
- // item message
- var messageDbo = new RetryQueueItemMessageDbo
- {
- IdRetryQueueItem = itemId,
- Key = item.Message.Key,
- Offset = item.Message.Offset,
- Partition = item.Message.Partition,
- TopicName = item.Message.TopicName,
- UtcTimeStamp = item.Message.UtcTimeStamp,
- Value = item.Message.Value
- };
-
- await this.retryQueueItemMessageRepository.AddAsync(dbConnection, messageDbo);
-
- // message headers
- var messageHeadersDbos = item.Message.Headers
- .Select(h => new RetryQueueItemMessageHeaderDbo
- {
- RetryQueueItemMessageId = itemId,
- Key = h.Key,
- Value = h.Value
- });
-
- await this.retryQueueItemMessageHeaderRepository.AddAsync(dbConnection, messageHeadersDbos);
- }
-
- dbConnection.Commit();
- }
-
- public async Task GetAllRetryQueueDataAsync(string queueGroupKey)
- {
- using (var dbConnection = this.connectionProvider.Create(this.postgresDbSettings))
- {
- var retryQueueDbo = await this.retryQueueRepository.GetQueueAsync(dbConnection, queueGroupKey);
-
- if (retryQueueDbo is null)
- {
- return null;
- }
-
- var retryQueueItemsDbo = await this.retryQueueItemRepository.GetItemsByQueueOrderedAsync(dbConnection, retryQueueDbo.IdDomain);
- var itemMessagesDbo = await this.retryQueueItemMessageRepository.GetMessagesOrderedAsync(dbConnection, retryQueueItemsDbo);
- var messageHeadersDbo = await this.retryQueueItemMessageHeaderRepository.GetOrderedAsync(dbConnection, itemMessagesDbo);
-
- var dboWrapper = new RetryQueuesDboWrapper
- {
- QueuesDbos = new[] { retryQueueDbo },
- ItemsDbos = retryQueueItemsDbo,
- MessagesDbos = itemMessagesDbo,
- HeadersDbos = messageHeadersDbo
- };
-
- return this.retryQueueReader.Read(dboWrapper).FirstOrDefault();
- }
- }
-
- public async Task GetRetryQueueAsync(string queueGroupKey)
- {
- var start = DateTime.Now;
- Guid retryQueueId = Guid.Empty;
- RetryQueue retryQueue;
- do
- {
- if (DateTime.Now.Subtract(start).TotalSeconds > TimeoutSec && !Debugger.IsAttached)
- {
- return null;
- }
-
- await Task.Delay(100).ConfigureAwait(false);
-
- using (var dbConnection = this.connectionProvider.Create(this.postgresDbSettings))
- using (var command = dbConnection.CreateCommand())
- {
- command.CommandType = System.Data.CommandType.Text;
- command.CommandText = @"SELECT Id, IdDomain, IdStatus, SearchGroupKey, QueueGroupKey, CreationDate, LastExecution
- FROM retry_queues
- WHERE QueueGroupKey LIKE '%'||@QueueGroupKey
- ORDER BY Id";
-
- command.Parameters.AddWithValue("QueueGroupKey", queueGroupKey);
- retryQueue = await this.ExecuteSingleLineReaderAsync(command).ConfigureAwait(false);
- }
-
- if (retryQueue != null)
- {
- retryQueueId = retryQueue.Id;
- }
- } while (retryQueueId == Guid.Empty);
-
- return retryQueue;
- }
-
- public async Task> GetRetryQueueItemsAsync(Guid retryQueueId, Func, bool> stopCondition)
- {
- var start = DateTime.Now;
- IList retryQueueItems = null;
- do
- {
- if (DateTime.Now.Subtract(start).TotalSeconds > TimeoutSec && !Debugger.IsAttached)
- {
- return null;
- }
-
- await Task.Delay(100).ConfigureAwait(false);
-
- using (var dbConnection = this.connectionProvider.Create(this.postgresDbSettings))
- using (var command = dbConnection.CreateCommand())
- {
- command.CommandType = System.Data.CommandType.Text;
- command.CommandText = @"SELECT *
- FROM retry_queue_items
- WHERE IdDomainRetryQueue = @IdDomainRetryQueue
- ORDER BY Sort ASC";
-
- command.Parameters.AddWithValue("IdDomainRetryQueue", retryQueueId);
- retryQueueItems = await this.ExecuteReaderAsync(command).ConfigureAwait(false);
- }
- } while (stopCondition(retryQueueItems));
-
- return retryQueueItems ?? new List();
- }
-
- private async Task> ExecuteReaderAsync(NpgsqlCommand command)
- {
- var items = new List();
-
- using (var reader = await command.ExecuteReaderAsync())
- {
- while (await reader.ReadAsync())
- {
- items.Add(this.FillRetryQueueItem(reader));
- }
- }
-
- return items;
- }
-
- private async Task ExecuteSingleLineReaderAsync(NpgsqlCommand command)
- {
- using (var reader = await command.ExecuteReaderAsync())
- {
- if (await reader.ReadAsync())
- {
- return this.FillRetryQueue(reader);
- }
- }
-
- return null;
- }
-
- private RetryQueue FillRetryQueue(NpgsqlDataReader reader)
- {
- return new RetryQueue(
- reader.GetGuid(reader.GetOrdinal("IdDomain")),
- reader.GetString(reader.GetOrdinal("SearchGroupKey")),
- reader.GetString(reader.GetOrdinal("QueueGroupKey")),
- reader.GetDateTime(reader.GetOrdinal("CreationDate")),
- reader.GetDateTime(reader.GetOrdinal("LastExecution")),
- (RetryQueueStatus)reader.GetByte(reader.GetOrdinal("IdStatus"))
- );
- }
-
- private RetryQueueItem FillRetryQueueItem(NpgsqlDataReader reader)
- {
- var lastExecutionOrdinal = reader.GetOrdinal("LastExecution");
- var modifiedStatusDateOrdinal = reader.GetOrdinal("ModifiedStatusDate");
- var descriptionOrdinal = reader.GetOrdinal("Description");
-
- return new RetryQueueItem(
- reader.GetGuid(reader.GetOrdinal("IdDomain")),
- reader.GetInt32(reader.GetOrdinal("AttemptsCount")),
- reader.GetDateTime(reader.GetOrdinal("CreationDate")),
- reader.GetInt32(reader.GetOrdinal("Sort")),
- reader.IsDBNull(lastExecutionOrdinal) ? null : (DateTime?)reader.GetDateTime(lastExecutionOrdinal),
- reader.IsDBNull(modifiedStatusDateOrdinal) ? null : (DateTime?)reader.GetDateTime(modifiedStatusDateOrdinal),
- (RetryQueueItemStatus)reader.GetByte(reader.GetOrdinal("IdItemStatus")),
- (SeverityLevel)reader.GetByte(reader.GetOrdinal("IdSeverityLevel")),
- reader.IsDBNull(descriptionOrdinal) ? null : reader.GetString(descriptionOrdinal)
- );
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/RepositoryProvider.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/RepositoryProvider.cs
deleted file mode 100644
index 79fff968..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/RepositoryProvider.cs
+++ /dev/null
@@ -1,22 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Storages.Repositories
-{
- using System.Collections.Generic;
- using System.Linq;
-
- internal class RepositoryProvider : IRepositoryProvider
- {
- private readonly IEnumerable repositories;
-
- public RepositoryProvider(IEnumerable repositories)
- {
- this.repositories = repositories;
- }
-
- public IEnumerable GetAllRepositories() => this.repositories;
-
- public IRepository GetRepositoryOfType(RepositoryType repositoryType)
- {
- return this.repositories.Single(r => r.RepositoryType == repositoryType);
- }
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/RepositoryType.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/RepositoryType.cs
deleted file mode 100644
index 36f94479..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/RepositoryType.cs
+++ /dev/null
@@ -1,11 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Storages.Repositories
-{
- public enum RepositoryType
- {
- Unknown = 0,
-
- SqlServer = 1,
- MongoDb = 2,
- Postgres = 3
- }
-}
\ No newline at end of file
diff --git a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/SqlServerRepository.cs b/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/SqlServerRepository.cs
deleted file mode 100644
index 483c53c4..00000000
--- a/src/KafkaFlow.Retry.IntegrationTests/Core/Storages/Repositories/SqlServerRepository.cs
+++ /dev/null
@@ -1,287 +0,0 @@
-namespace KafkaFlow.Retry.IntegrationTests.Core.Storages.Repositories
-{
- using System;
- using System.Collections.Generic;
- using Microsoft.Data.SqlClient;
- using System.Diagnostics;
- using System.Linq;
- using System.Threading.Tasks;
- using KafkaFlow.Retry.Durable.Common;
- using KafkaFlow.Retry.Durable.Repository;
- using KafkaFlow.Retry.Durable.Repository.Model;
- using KafkaFlow.Retry.SqlServer;
- using KafkaFlow.Retry.SqlServer.Model;
- using KafkaFlow.Retry.SqlServer.Readers;
- using KafkaFlow.Retry.SqlServer.Readers.Adapters;
- using KafkaFlow.Retry.SqlServer.Repositories;
-
- internal class SqlServerRepository : IRepository
- {
- private const string schema = "dbo";
- private const int TimeoutSec = 60;
- private readonly ConnectionProvider connectionProvider;
- private readonly IRetryQueueItemMessageHeaderRepository retryQueueItemMessageHeaderRepository;
- private readonly IRetryQueueItemMessageRepository retryQueueItemMessageRepository;
- private readonly IRetryQueueItemRepository retryQueueItemRepository;
- private readonly RetryQueueReader retryQueueReader;
- private readonly IRetryQueueRepository retryQueueRepository;
- private readonly SqlServerDbSettings sqlServerDbSettings;
-
- public SqlServerRepository(
- string connectionString,
- string dbName)
- {
- this.sqlServerDbSettings = new SqlServerDbSettings(connectionString, dbName, schema);
-
- this.RetryQueueDataProvider = new SqlServerDbDataProviderFactory().Create(this.sqlServerDbSettings);
-
- this.retryQueueItemMessageHeaderRepository = new RetryQueueItemMessageHeaderRepository();
- this.retryQueueItemMessageRepository = new RetryQueueItemMessageRepository();
- this.retryQueueItemRepository = new RetryQueueItemRepository();
- this.retryQueueRepository = new RetryQueueRepository();
-
- this.retryQueueReader = new RetryQueueReader(
- new RetryQueueAdapter(),
- new RetryQueueItemAdapter(),
- new RetryQueueItemMessageAdapter(),
- new RetryQueueItemMessageHeaderAdapter()
- );
-
- this.connectionProvider = new ConnectionProvider();
- }
-
- public RepositoryType RepositoryType => RepositoryType.SqlServer;
-
- public IRetryDurableQueueRepositoryProvider RetryQueueDataProvider { get; }
-
- public async Task CleanDatabaseAsync()
- {
- using var dbConnection = this.connectionProvider.Create(this.sqlServerDbSettings);
- using var command = dbConnection.CreateCommand();
- command.CommandType = System.Data.CommandType.Text;
- command.CommandText = @"
- delete from [dbo].[RetryItemMessageHeaders];
- delete from [dbo].[ItemMessages];
- delete from [dbo].[RetryQueues];
- delete from [dbo].[RetryQueueItems];
- ";
- await command.ExecuteNonQueryAsync();
- }
-
- public async Task CreateQueueAsync(RetryQueue queue)
- {
- var queueDbo = new RetryQueueDbo
- {
- IdDomain = queue.Id,
- CreationDate = queue.CreationDate,
- LastExecution = queue.LastExecution,
- QueueGroupKey = queue.QueueGroupKey,
- SearchGroupKey = queue.SearchGroupKey,
- Status = queue.Status,
- };
-
- using var dbConnection = this.connectionProvider.CreateWithinTransaction(this.sqlServerDbSettings);
-
- var queueId = await this.retryQueueRepository.AddAsync(dbConnection, queueDbo);
-
- foreach (var item in queue.Items)
- {
- // queue item
- var itemDbo = new RetryQueueItemDbo
- {
- IdDomain = item.Id,
- CreationDate = item.CreationDate,
- LastExecution = item.LastExecution,
- ModifiedStatusDate = item.ModifiedStatusDate,
- AttemptsCount = item.AttemptsCount,
- RetryQueueId = queueId,
- DomainRetryQueueId = queue.Id,
- Status = item.Status,
- SeverityLevel = item.SeverityLevel,
- Description = item.Description
- };
-
- var itemId = await this.retryQueueItemRepository.AddAsync(dbConnection, itemDbo);
-
- // item message
- var messageDbo = new RetryQueueItemMessageDbo
- {
- IdRetryQueueItem = itemId,
- Key = item.Message.Key,
- Offset = item.Message.Offset,
- Partition = item.Message.Partition,
- TopicName = item.Message.TopicName,
- UtcTimeStamp = item.Message.UtcTimeStamp,
- Value = item.Message.Value
- };
-
- await this.retryQueueItemMessageRepository.AddAsync(dbConnection, messageDbo);
-
- // message headers
- var messageHeadersDbos = item.Message.Headers
- .Select(h => new RetryQueueItemMessageHeaderDbo
- {
- RetryQueueItemMessageId = itemId,
- Key = h.Key,
- Value = h.Value
- });
-
- await this.retryQueueItemMessageHeaderRepository.AddAsync(dbConnection, messageHeadersDbos);
- }
-
- dbConnection.Commit();
- }
-
- public async Task