Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add transient failure retry to SharedConcurrencyLimitTest #7591

Merged
merged 1 commit into from
Nov 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ public static void Retry(Action func, Func<Exception, bool> shouldRetry, ITestOu
}
}

public static async Task RetryAsync(Func<Task> func, Func<Exception, bool> shouldRetry, ITestOutputHelper outputHelper, int maxRetryCount = 3)
public static async Task<T> RetryAsync<T>(Func<Task<T>> func, Func<Exception, bool> shouldRetry, ITestOutputHelper outputHelper, int maxRetryCount = 3)
{
int attemptIteration = 0;
while (true)
Expand All @@ -36,13 +36,19 @@ public static async Task RetryAsync(Func<Task> func, Func<Exception, bool> shoul
outputHelper.WriteLine("===== Attempt #{0} =====", attemptIteration);
try
{
await func();
break;
return await func();
}
catch (Exception ex) when (attemptIteration < maxRetryCount && shouldRetry(ex))
{
}
}
}

public static async Task RetryAsync(Func<Task> func, Func<Exception, bool> shouldRetry, ITestOutputHelper outputHelper, int maxRetryCount = 3)
=> await RetryAsync<object>(async () =>
{
await func();
return null;
}, shouldRetry, outputHelper, maxRetryCount);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ await ScenarioRunner.SingleTarget(
int processId = await appRunner.ProcessIdTask;

OperationResponse response1 = await EgressTraceWithDelay(apiClient, processId);
OperationResponse response3 = await EgressTraceWithDelay(apiClient, processId);
OperationResponse response2 = await EgressTraceWithDelay(apiClient, processId);
using HttpResponseMessage traceDirect1 = await TraceWithDelay(apiClient, processId);
Assert.Equal(HttpStatusCode.OK, traceDirect1.StatusCode);

Expand All @@ -253,10 +253,10 @@ await ScenarioRunner.SingleTarget(
Assert.Equal(await egressDirect.Content.ReadAsStringAsync(), await traceDirect.Content.ReadAsStringAsync());

await CancelEgressOperation(apiClient, response1);
OperationResponse response4 = await EgressTraceWithDelay(apiClient, processId, delay: false);
OperationResponse response3 = await EgressTraceWithDelay(apiClient, processId, delay: false);

await CancelEgressOperation(apiClient, response2);
await CancelEgressOperation(apiClient, response3);
await CancelEgressOperation(apiClient, response4);

await appRunner.SendCommandAsync(TestAppScenarios.AsyncWait.Commands.Continue);
},
Expand Down Expand Up @@ -426,27 +426,36 @@ await ScenarioRunner.SingleTarget(
});
}

private static async Task<HttpResponseMessage> TraceWithDelay(ApiClient client, int processId, bool delay = true)
private async Task<HttpResponseMessage> TraceWithDelay(ApiClient client, int processId, bool delay = true)
{
HttpResponseMessage message = await client.ApiCall(FormattableString.Invariant($"/trace?pid={processId}&durationSeconds=-1"));
HttpResponseMessage message = await RetryUtilities.RetryAsync(
func: () => client.ApiCall(FormattableString.Invariant($"/trace?pid={processId}&durationSeconds=-1")),
shouldRetry: IsTransientApiFailure,
outputHelper: _outputHelper);

if (delay)
{
await Task.Delay(TimeSpan.FromSeconds(1));
}
return message;
}

private static Task<HttpResponseMessage> EgressDirect(ApiClient client, int processId)
private async Task<HttpResponseMessage> EgressDirect(ApiClient client, int processId)
{
return client.ApiCall(FormattableString.Invariant($"/trace?pid={processId}&egressProvider={FileProviderName}"));
return await RetryUtilities.RetryAsync(
func: () => client.ApiCall(FormattableString.Invariant($"/trace?pid={processId}&egressProvider={FileProviderName}")),
shouldRetry: IsTransientApiFailure,
outputHelper: _outputHelper);
}

private static async Task<OperationResponse> EgressTraceWithDelay(ApiClient apiClient, int processId, bool delay = true)
private async Task<OperationResponse> EgressTraceWithDelay(ApiClient apiClient, int processId, bool delay = true)
{
try
{
OperationResponse response = await apiClient.EgressTraceAsync(processId, durationSeconds: -1, FileProviderName);
return response;
return await RetryUtilities.RetryAsync(
func: () => apiClient.EgressTraceAsync(processId, durationSeconds: -1, FileProviderName),
shouldRetry: IsTransientApiFailure,
outputHelper: _outputHelper);
}
finally
{
Expand All @@ -473,6 +482,11 @@ private static void ValidateOperation(OperationStatus expected, OperationSummary
Assert.Equal(expected.IsStoppable, summary.IsStoppable);
}

// When the process could not be found (due to transient responsiveness issues), dotnet-monitor APIs will return a 400 status code.
private static bool IsTransientApiFailure(Exception ex)
=> ex is ValidationProblemDetailsException validationException
&& validationException.StatusCode == HttpStatusCode.BadRequest;

public void Dispose()
{
_tempDirectory.Dispose();
Expand Down