Auto-reconnect support for .NET SignalR client (#9535)

This commit is contained in:
Stephen Halter 2019-05-10 11:08:18 -07:00 committed by GitHub
parent 6a630ca904
commit 1cb42b1374
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 2162 additions and 341 deletions

View File

@ -26,12 +26,15 @@ namespace Microsoft.AspNetCore.SignalR.Client
public static readonly System.TimeSpan DefaultServerTimeout;
public HubConnection(Microsoft.AspNetCore.SignalR.Client.IConnectionFactory connectionFactory, Microsoft.AspNetCore.SignalR.Protocol.IHubProtocol protocol, Microsoft.Extensions.Logging.ILoggerFactory loggerFactory) { }
public HubConnection(Microsoft.AspNetCore.SignalR.Client.IConnectionFactory connectionFactory, Microsoft.AspNetCore.SignalR.Protocol.IHubProtocol protocol, System.IServiceProvider serviceProvider, Microsoft.Extensions.Logging.ILoggerFactory loggerFactory) { }
public HubConnection(Microsoft.AspNetCore.SignalR.Client.IConnectionFactory connectionFactory, Microsoft.AspNetCore.SignalR.Protocol.IHubProtocol protocol, System.IServiceProvider serviceProvider, Microsoft.Extensions.Logging.ILoggerFactory loggerFactory, Microsoft.AspNetCore.SignalR.Client.IRetryPolicy reconnectPolicy) { }
public string ConnectionId { get { throw null; } }
public System.TimeSpan HandshakeTimeout { [System.Runtime.CompilerServices.CompilerGeneratedAttribute]get { throw null; } [System.Runtime.CompilerServices.CompilerGeneratedAttribute]set { } }
public System.TimeSpan KeepAliveInterval { [System.Runtime.CompilerServices.CompilerGeneratedAttribute]get { throw null; } [System.Runtime.CompilerServices.CompilerGeneratedAttribute]set { } }
public System.TimeSpan ServerTimeout { [System.Runtime.CompilerServices.CompilerGeneratedAttribute]get { throw null; } [System.Runtime.CompilerServices.CompilerGeneratedAttribute]set { } }
public Microsoft.AspNetCore.SignalR.Client.HubConnectionState State { get { throw null; } }
public event System.Func<System.Exception, System.Threading.Tasks.Task> Closed { add { } remove { } }
public event System.Func<string, System.Threading.Tasks.Task> Reconnected { add { } remove { } }
public event System.Func<System.Exception, System.Threading.Tasks.Task> Reconnecting { add { } remove { } }
[System.Diagnostics.DebuggerStepThroughAttribute]
public System.Threading.Tasks.Task DisposeAsync() { throw null; }
[System.Diagnostics.DebuggerStepThroughAttribute]
@ -65,6 +68,9 @@ namespace Microsoft.AspNetCore.SignalR.Client
public static partial class HubConnectionBuilderExtensions
{
public static Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder ConfigureLogging(this Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder hubConnectionBuilder, System.Action<Microsoft.Extensions.Logging.ILoggingBuilder> configureLogging) { throw null; }
public static Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder WithAutomaticReconnect(this Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder hubConnectionBuilder) { throw null; }
public static Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder WithAutomaticReconnect(this Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder hubConnectionBuilder, Microsoft.AspNetCore.SignalR.Client.IRetryPolicy retryPolicy) { throw null; }
public static Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder WithAutomaticReconnect(this Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder hubConnectionBuilder, System.TimeSpan[] reconnectDelays) { throw null; }
}
public static partial class HubConnectionExtensions
{
@ -152,6 +158,8 @@ namespace Microsoft.AspNetCore.SignalR.Client
{
Disconnected = 0,
Connected = 1,
Connecting = 2,
Reconnecting = 3,
}
public partial interface IConnectionFactory
{
@ -162,4 +170,15 @@ namespace Microsoft.AspNetCore.SignalR.Client
{
Microsoft.AspNetCore.SignalR.Client.HubConnection Build();
}
public partial interface IRetryPolicy
{
System.TimeSpan? NextRetryDelay(Microsoft.AspNetCore.SignalR.Client.RetryContext retryContext);
}
public sealed partial class RetryContext
{
public RetryContext() { }
public System.TimeSpan ElapsedTime { [System.Runtime.CompilerServices.CompilerGeneratedAttribute]get { throw null; } [System.Runtime.CompilerServices.CompilerGeneratedAttribute]set { } }
public long PreviousRetryCount { [System.Runtime.CompilerServices.CompilerGeneratedAttribute]get { throw null; } [System.Runtime.CompilerServices.CompilerGeneratedAttribute]set { } }
public System.Exception RetryReason { [System.Runtime.CompilerServices.CompilerGeneratedAttribute]get { throw null; } [System.Runtime.CompilerServices.CompilerGeneratedAttribute]set { } }
}
}

View File

@ -26,12 +26,15 @@ namespace Microsoft.AspNetCore.SignalR.Client
public static readonly System.TimeSpan DefaultServerTimeout;
public HubConnection(Microsoft.AspNetCore.SignalR.Client.IConnectionFactory connectionFactory, Microsoft.AspNetCore.SignalR.Protocol.IHubProtocol protocol, Microsoft.Extensions.Logging.ILoggerFactory loggerFactory) { }
public HubConnection(Microsoft.AspNetCore.SignalR.Client.IConnectionFactory connectionFactory, Microsoft.AspNetCore.SignalR.Protocol.IHubProtocol protocol, System.IServiceProvider serviceProvider, Microsoft.Extensions.Logging.ILoggerFactory loggerFactory) { }
public HubConnection(Microsoft.AspNetCore.SignalR.Client.IConnectionFactory connectionFactory, Microsoft.AspNetCore.SignalR.Protocol.IHubProtocol protocol, System.IServiceProvider serviceProvider, Microsoft.Extensions.Logging.ILoggerFactory loggerFactory, Microsoft.AspNetCore.SignalR.Client.IRetryPolicy reconnectPolicy) { }
public string ConnectionId { get { throw null; } }
public System.TimeSpan HandshakeTimeout { [System.Runtime.CompilerServices.CompilerGeneratedAttribute]get { throw null; } [System.Runtime.CompilerServices.CompilerGeneratedAttribute]set { } }
public System.TimeSpan KeepAliveInterval { [System.Runtime.CompilerServices.CompilerGeneratedAttribute]get { throw null; } [System.Runtime.CompilerServices.CompilerGeneratedAttribute]set { } }
public System.TimeSpan ServerTimeout { [System.Runtime.CompilerServices.CompilerGeneratedAttribute]get { throw null; } [System.Runtime.CompilerServices.CompilerGeneratedAttribute]set { } }
public Microsoft.AspNetCore.SignalR.Client.HubConnectionState State { get { throw null; } }
public event System.Func<System.Exception, System.Threading.Tasks.Task> Closed { add { } remove { } }
public event System.Func<string, System.Threading.Tasks.Task> Reconnected { add { } remove { } }
public event System.Func<System.Exception, System.Threading.Tasks.Task> Reconnecting { add { } remove { } }
[System.Diagnostics.DebuggerStepThroughAttribute]
public System.Threading.Tasks.Task DisposeAsync() { throw null; }
[System.Diagnostics.DebuggerStepThroughAttribute]
@ -64,6 +67,9 @@ namespace Microsoft.AspNetCore.SignalR.Client
public static partial class HubConnectionBuilderExtensions
{
public static Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder ConfigureLogging(this Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder hubConnectionBuilder, System.Action<Microsoft.Extensions.Logging.ILoggingBuilder> configureLogging) { throw null; }
public static Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder WithAutomaticReconnect(this Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder hubConnectionBuilder) { throw null; }
public static Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder WithAutomaticReconnect(this Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder hubConnectionBuilder, Microsoft.AspNetCore.SignalR.Client.IRetryPolicy retryPolicy) { throw null; }
public static Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder WithAutomaticReconnect(this Microsoft.AspNetCore.SignalR.Client.IHubConnectionBuilder hubConnectionBuilder, System.TimeSpan[] reconnectDelays) { throw null; }
}
public static partial class HubConnectionExtensions
{
@ -140,6 +146,8 @@ namespace Microsoft.AspNetCore.SignalR.Client
{
Disconnected = 0,
Connected = 1,
Connecting = 2,
Reconnecting = 3,
}
public partial interface IConnectionFactory
{
@ -150,4 +158,15 @@ namespace Microsoft.AspNetCore.SignalR.Client
{
Microsoft.AspNetCore.SignalR.Client.HubConnection Build();
}
public partial interface IRetryPolicy
{
System.TimeSpan? NextRetryDelay(Microsoft.AspNetCore.SignalR.Client.RetryContext retryContext);
}
public sealed partial class RetryContext
{
public RetryContext() { }
public System.TimeSpan ElapsedTime { [System.Runtime.CompilerServices.CompilerGeneratedAttribute]get { throw null; } [System.Runtime.CompilerServices.CompilerGeneratedAttribute]set { } }
public long PreviousRetryCount { [System.Runtime.CompilerServices.CompilerGeneratedAttribute]get { throw null; } [System.Runtime.CompilerServices.CompilerGeneratedAttribute]set { } }
public System.Exception RetryReason { [System.Runtime.CompilerServices.CompilerGeneratedAttribute]get { throw null; } [System.Runtime.CompilerServices.CompilerGeneratedAttribute]set { } }
}
}

View File

@ -198,6 +198,48 @@ namespace Microsoft.AspNetCore.SignalR.Client
private static readonly Action<ILogger, string, Exception> _completingStream =
LoggerMessage.Define<string>(LogLevel.Trace, new EventId(66, "CompletingStream"), "Sending completion message for stream '{StreamId}'.");
private static readonly Action<ILogger, HubConnectionState, HubConnectionState, HubConnectionState, Exception> _stateTransitionFailed =
LoggerMessage.Define<HubConnectionState, HubConnectionState, HubConnectionState>(LogLevel.Error, new EventId(67, "StateTransitionFailed"), "The HubConnection failed to transition from the {ExpectedState} state to the {NewState} state because it was actually in the {ActualState} state.");
private static readonly Action<ILogger, Exception> _reconnecting =
LoggerMessage.Define(LogLevel.Information, new EventId(68, "Reconnecting"), "HubConnection reconnecting.");
private static readonly Action<ILogger, Exception> _reconnectingWithError =
LoggerMessage.Define(LogLevel.Error, new EventId(69, "ReconnectingWithError"), "HubConnection reconnecting due to an error.");
private static readonly Action<ILogger, long, TimeSpan, Exception> _reconnected =
LoggerMessage.Define<long, TimeSpan>(LogLevel.Information, new EventId(70, "Reconnected"), "HubConnection reconnected successfully after {ReconnectAttempts} attempts and {ElapsedTime} elapsed.");
private static readonly Action<ILogger, long, TimeSpan, Exception> _reconnectAttemptsExhausted =
LoggerMessage.Define<long, TimeSpan>(LogLevel.Information, new EventId(71, "ReconnectAttemptsExhausted"), "Reconnect retries have been exhausted after {ReconnectAttempts} failed attempts and {ElapsedTime} elapsed. Disconnecting.");
private static readonly Action<ILogger, long, TimeSpan, Exception> _awaitingReconnectRetryDelay =
LoggerMessage.Define<long, TimeSpan>(LogLevel.Trace, new EventId(72, "AwaitingReconnectRetryDelay"), "Reconnect attempt number {ReconnectAttempts} will start in {RetryDelay}.");
private static readonly Action<ILogger, Exception> _reconnectAttemptFailed =
LoggerMessage.Define(LogLevel.Trace, new EventId(73, "ReconnectAttemptFailed"), "Reconnect attempt failed.");
private static readonly Action<ILogger, Exception> _errorDuringReconnectingEvent =
LoggerMessage.Define(LogLevel.Error, new EventId(74, "ErrorDuringReconnectingEvent"), "An exception was thrown in the handler for the Reconnecting event.");
private static readonly Action<ILogger, Exception> _errorDuringReconnectedEvent =
LoggerMessage.Define(LogLevel.Error, new EventId(75, "ErrorDuringReconnectedEvent"), "An exception was thrown in the handler for the Reconnected event.");
private static readonly Action<ILogger, Exception> _errorDuringNextRetryDelay =
LoggerMessage.Define(LogLevel.Error, new EventId(76, "ErrorDuringNextRetryDelay"), $"An exception was thrown from {nameof(IRetryPolicy)}.{nameof(IRetryPolicy.NextRetryDelay)}().");
private static readonly Action<ILogger, Exception> _firstReconnectRetryDelayNull =
LoggerMessage.Define(LogLevel.Warning, new EventId(77, "FirstReconnectRetryDelayNull"), "Connection not reconnecting because the IRetryPolicy returned null on the first reconnect attempt.");
private static readonly Action<ILogger, Exception> _reconnectingStoppedDuringRetryDelay =
LoggerMessage.Define(LogLevel.Trace, new EventId(78, "ReconnectingStoppedDueToStateChangeDuringRetryDelay"), "Connection stopped during reconnect delay. Done reconnecting.");
private static readonly Action<ILogger, Exception> _reconnectingStoppedDuringReconnectAttempt =
LoggerMessage.Define(LogLevel.Trace, new EventId(79, "ReconnectingStoppedDueToStateChangeDuringReconnectAttempt"), "Connection stopped during reconnect attempt. Done reconnecting.");
private static readonly Action<ILogger, HubConnectionState, HubConnectionState, Exception> _attemptingStateTransition =
LoggerMessage.Define<HubConnectionState, HubConnectionState>(LogLevel.Trace, new EventId(80, "AttemptingStateTransition"), "The HubConnection is attempting to transition from the {ExpectedState} state to the {NewState} state.");
public static void PreparingNonBlockingInvocation(ILogger logger, string target, int count)
{
_preparingNonBlockingInvocation(logger, target, count, null);
@ -528,6 +570,76 @@ namespace Microsoft.AspNetCore.SignalR.Client
{
_completingStream(logger, streamId, null);
}
public static void StateTransitionFailed(ILogger logger, HubConnectionState expectedState, HubConnectionState newState, HubConnectionState actualState)
{
_stateTransitionFailed(logger, expectedState, newState, actualState, null);
}
public static void Reconnecting(ILogger logger)
{
_reconnecting(logger, null);
}
public static void ReconnectingWithError(ILogger logger, Exception exception)
{
_reconnectingWithError(logger, exception);
}
public static void Reconnected(ILogger logger, long reconnectAttempts, TimeSpan elapsedTime)
{
_reconnected(logger, reconnectAttempts, elapsedTime, null);
}
public static void ReconnectAttemptsExhausted(ILogger logger, long reconnectAttempts, TimeSpan elapsedTime)
{
_reconnectAttemptsExhausted(logger, reconnectAttempts, elapsedTime, null);
}
public static void AwaitingReconnectRetryDelay(ILogger logger, long reconnectAttempts, TimeSpan retryDelay)
{
_awaitingReconnectRetryDelay(logger, reconnectAttempts, retryDelay, null);
}
public static void ReconnectAttemptFailed(ILogger logger, Exception exception)
{
_reconnectAttemptFailed(logger, exception);
}
public static void ErrorDuringReconnectingEvent(ILogger logger, Exception exception)
{
_errorDuringReconnectingEvent(logger, exception);
}
public static void ErrorDuringReconnectedEvent(ILogger logger, Exception exception)
{
_errorDuringReconnectedEvent(logger, exception);
}
public static void ErrorDuringNextRetryDelay(ILogger logger, Exception exception)
{
_errorDuringNextRetryDelay(logger, exception);
}
public static void FirstReconnectRetryDelayNull(ILogger logger)
{
_firstReconnectRetryDelayNull(logger, null);
}
public static void ReconnectingStoppedDuringRetryDelay(ILogger logger)
{
_reconnectingStoppedDuringRetryDelay(logger, null);
}
public static void ReconnectingStoppedDuringReconnectAttempt(ILogger logger)
{
_reconnectingStoppedDuringReconnectAttempt(logger, null);
}
public static void AttemptingStateTransition(ILogger logger, HubConnectionState expectedState, HubConnectionState newState)
{
_attemptingStateTransition(logger, expectedState, newState, null);
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -2,6 +2,7 @@
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using Microsoft.AspNetCore.SignalR.Client.Internal;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
@ -23,5 +24,44 @@ namespace Microsoft.AspNetCore.SignalR.Client
hubConnectionBuilder.Services.AddLogging(configureLogging);
return hubConnectionBuilder;
}
/// <summary>
/// Configures the <see cref="HubConnection"/> to automatically attempt to reconnect if the connection is lost.
/// The client will wait the default 0, 2, 10 and 30 seconds respectively before trying up to four reconnect attempts.
/// </summary>
/// <param name="hubConnectionBuilder">The <see cref="IHubConnectionBuilder" /> to configure.</param>
/// <returns>The same instance of the <see cref="IHubConnectionBuilder"/> for chaining.</returns>
public static IHubConnectionBuilder WithAutomaticReconnect(this IHubConnectionBuilder hubConnectionBuilder)
{
hubConnectionBuilder.Services.AddSingleton<IRetryPolicy>(new DefaultRetryPolicy());
return hubConnectionBuilder;
}
/// <summary>
/// Configures the <see cref="HubConnection"/> to automatically attempt to reconnect if the connection is lost.
/// </summary>
/// <param name="hubConnectionBuilder">The <see cref="IHubConnectionBuilder" /> to configure.</param>
/// <param name="reconnectDelays">
/// An array containing the delays before trying each reconnect attempt.
/// The length of the array represents how many failed reconnect attempts it takes before the client will stop attempting to reconnect.
/// </param>
/// <returns>The same instance of the <see cref="IHubConnectionBuilder"/> for chaining.</returns>
public static IHubConnectionBuilder WithAutomaticReconnect(this IHubConnectionBuilder hubConnectionBuilder, TimeSpan[] reconnectDelays)
{
hubConnectionBuilder.Services.AddSingleton<IRetryPolicy>(new DefaultRetryPolicy(reconnectDelays));
return hubConnectionBuilder;
}
/// <summary>
/// Configures the <see cref="HubConnection"/> to automatically attempt to reconnect if the connection is lost.
/// </summary>
/// <param name="hubConnectionBuilder">The <see cref="IHubConnectionBuilder" /> to configure.</param>
/// <param name="retryPolicy">An <see cref="IRetryPolicy"/> that controls the timing and number of reconnect attempts.</param>
/// <returns>The same instance of the <see cref="IHubConnectionBuilder"/> for chaining.</returns>
public static IHubConnectionBuilder WithAutomaticReconnect(this IHubConnectionBuilder hubConnectionBuilder, IRetryPolicy retryPolicy)
{
hubConnectionBuilder.Services.AddSingleton(retryPolicy);
return hubConnectionBuilder;
}
}
}
}

View File

@ -15,6 +15,14 @@ namespace Microsoft.AspNetCore.SignalR.Client
/// <summary>
/// The hub connection is connected.
/// </summary>
Connected
Connected,
/// <summary>
/// The hub connection is connecting.
/// </summary>
Connecting,
/// <summary>
/// The hub connection is reconnecting.
/// </summary>
Reconnecting,
}
}
}

View File

@ -0,0 +1,27 @@
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
namespace Microsoft.AspNetCore.SignalR.Client
{
/// <summary>
/// An abstraction that controls when the client attempts to reconnect and how many times it does so.
/// </summary>
public interface IRetryPolicy
{
/// <summary>
/// If passed to <see cref="HubConnectionBuilderExtensions.WithAutomaticReconnect(IHubConnectionBuilder, IRetryPolicy)"/>,
/// this will be called after the trasnport loses a connection to determine if and for how long to wait before the next reconnect attempt.
/// </summary>
/// <param name="retryContext">
/// Information related to the next possible reconnect attempt including the number of consecutive failed retries so far, time spent
/// reconnecting so far and the error that lead to this reconnect attempt.
/// </param>
/// <returns>
/// A <see cref="TimeSpan"/> representing the amount of time to wait from now before starting the next reconnect attempt.
/// <see langword="null" /> tells the client to stop retrying and close.
/// </returns>
TimeSpan? NextRetryDelay(RetryContext retryContext);
}
}

View File

@ -0,0 +1,41 @@
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
namespace Microsoft.AspNetCore.SignalR.Client.Internal
{
internal class DefaultRetryPolicy : IRetryPolicy
{
internal static TimeSpan?[] DEFAULT_RETRY_DELAYS_IN_MILLISECONDS = new TimeSpan?[]
{
TimeSpan.Zero,
TimeSpan.FromSeconds(2),
TimeSpan.FromSeconds(10),
TimeSpan.FromSeconds(30),
null,
};
private TimeSpan?[] _retryDelays;
public DefaultRetryPolicy()
{
_retryDelays = DEFAULT_RETRY_DELAYS_IN_MILLISECONDS;
}
public DefaultRetryPolicy(TimeSpan[] retryDelays)
{
_retryDelays = new TimeSpan?[retryDelays.Length + 1];
for (int i = 0; i < retryDelays.Length; i++)
{
_retryDelays[i] = retryDelays[i];
}
}
public TimeSpan? NextRetryDelay(RetryContext retryContext)
{
return _retryDelays[retryContext.PreviousRetryCount];
}
}
}

View File

@ -3,4 +3,5 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Microsoft.AspNetCore.SignalR.Client.FunctionalTests, PublicKey=0024000004800000940000000602000000240000525341310004000001000100f33a29044fa9d740c9b3213a93e57c84b472c84e0b8a0e1ae48e67a9f8f6de9d5f7f3d52ac23e48ac51801f1dc950abe901da34d2a9e3baadb141a17c77ef3c565dd5ee5054b91cf63bb3c6ab83f72ab3aafe93d0fc3c2348b764fafb0b1c0733de51459aeab46580384bf9d74c4e28164b7cde247f891ba07891c9d872ad2bb")]
[assembly: InternalsVisibleTo("Microsoft.AspNetCore.SignalR.Client.Tests, PublicKey=0024000004800000940000000602000000240000525341310004000001000100f33a29044fa9d740c9b3213a93e57c84b472c84e0b8a0e1ae48e67a9f8f6de9d5f7f3d52ac23e48ac51801f1dc950abe901da34d2a9e3baadb141a17c77ef3c565dd5ee5054b91cf63bb3c6ab83f72ab3aafe93d0fc3c2348b764fafb0b1c0733de51459aeab46580384bf9d74c4e28164b7cde247f891ba07891c9d872ad2bb")]

View File

@ -0,0 +1,29 @@
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
namespace Microsoft.AspNetCore.SignalR.Client
{
/// <summary>
/// The context passed to <see cref="IRetryPolicy.NextRetryDelay(RetryContext)"/> to help the policy determine
/// how long to wait before the next retry and whether there should be another retry at all.
/// </summary>
public sealed class RetryContext
{
/// <summary>
/// The number of consecutive failed retries so far.
/// </summary>
public long PreviousRetryCount { get; set; }
/// <summary>
/// The amount of time spent retrying so far.
/// </summary>
public TimeSpan ElapsedTime { get; set; }
/// <summary>
/// The error precipitating the current retry if any.
/// </summary>
public Exception RetryReason { get; set; }
}
}

View File

@ -36,11 +36,25 @@ namespace Microsoft.AspNetCore.SignalR.Client.FunctionalTests
string path = null,
HttpTransportType? transportType = null,
IHubProtocol protocol = null,
ILoggerFactory loggerFactory = null)
ILoggerFactory loggerFactory = null,
bool withAutomaticReconnect = false)
{
var hubConnectionBuilder = new HubConnectionBuilder();
hubConnectionBuilder.Services.AddSingleton(protocol);
hubConnectionBuilder.WithLoggerFactory(loggerFactory);
if (protocol != null)
{
hubConnectionBuilder.Services.AddSingleton(protocol);
}
if (loggerFactory != null)
{
hubConnectionBuilder.WithLoggerFactory(loggerFactory);
}
if (withAutomaticReconnect)
{
hubConnectionBuilder.WithAutomaticReconnect();
}
var delegateConnectionFactory = new DelegateConnectionFactory(
GetHttpConnectionFactory(url, loggerFactory, path, transportType ?? HttpTransportType.LongPolling | HttpTransportType.WebSockets | HttpTransportType.ServerSentEvents),
@ -1617,6 +1631,195 @@ namespace Microsoft.AspNetCore.SignalR.Client.FunctionalTests
}
}
[Theory]
[MemberData(nameof(TransportTypes))]
public async Task CanAutomaticallyReconnect(HttpTransportType transportType)
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
writeContext.EventId.Name == "ReconnectingWithError";
}
using (StartServer<Startup>(out var server, ExpectedErrors))
{
var connection = CreateHubConnection(
server.Url,
path: HubPaths.First(),
transportType: transportType,
loggerFactory: LoggerFactory,
withAutomaticReconnect: true);
try
{
var echoMessage = "test";
var reconnectingTcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
var reconnectedTcs = new TaskCompletionSource<string>(TaskCreationOptions.RunContinuationsAsynchronously);
connection.Reconnecting += _ =>
{
reconnectingTcs.SetResult(null);
return Task.CompletedTask;
};
connection.Reconnected += connectionId =>
{
reconnectedTcs.SetResult(connectionId);
return Task.CompletedTask;
};
await connection.StartAsync().OrTimeout();
var initialConnectionId = connection.ConnectionId;
connection.OnServerTimeout();
await reconnectingTcs.Task.OrTimeout();
var newConnectionId = await reconnectedTcs.Task.OrTimeout();
Assert.NotEqual(initialConnectionId, newConnectionId);
Assert.Equal(connection.ConnectionId, newConnectionId);
var result = await connection.InvokeAsync<string>(nameof(TestHub.Echo), echoMessage).OrTimeout();
Assert.Equal(echoMessage, result);
}
catch (Exception ex)
{
LoggerFactory.CreateLogger<HubConnectionTests>().LogError(ex, "{ExceptionType} from test", ex.GetType().FullName);
throw;
}
finally
{
await connection.DisposeAsync().OrTimeout();
}
}
}
[Fact]
public async Task CanAutomaticallyReconnectAfterRedirect()
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
writeContext.EventId.Name == "ReconnectingWithError";
}
using (StartServer<Startup>(out var server, ExpectedErrors))
{
var connection = new HubConnectionBuilder()
.WithLoggerFactory(LoggerFactory)
.WithUrl(server.Url + "/redirect")
.WithAutomaticReconnect()
.Build();
try
{
var echoMessage = "test";
var reconnectingTcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
var reconnectedTcs = new TaskCompletionSource<string>(TaskCreationOptions.RunContinuationsAsynchronously);
connection.Reconnecting += _ =>
{
reconnectingTcs.SetResult(null);
return Task.CompletedTask;
};
connection.Reconnected += connectionId =>
{
reconnectedTcs.SetResult(connectionId);
return Task.CompletedTask;
};
await connection.StartAsync().OrTimeout();
var initialConnectionId = connection.ConnectionId;
connection.OnServerTimeout();
await reconnectingTcs.Task.OrTimeout();
var newConnectionId = await reconnectedTcs.Task.OrTimeout();
Assert.NotEqual(initialConnectionId, newConnectionId);
Assert.Equal(connection.ConnectionId, newConnectionId);
var result = await connection.InvokeAsync<string>(nameof(TestHub.Echo), echoMessage).OrTimeout();
Assert.Equal(echoMessage, result);
}
catch (Exception ex)
{
LoggerFactory.CreateLogger<HubConnectionTests>().LogError(ex, "{ExceptionType} from test", ex.GetType().FullName);
throw;
}
finally
{
await connection.DisposeAsync().OrTimeout();
}
}
}
[Fact]
public async Task CanAutomaticallyReconnectAfterSkippingNegotiation()
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
writeContext.EventId.Name == "ReconnectingWithError";
}
using (StartServer<Startup>(out var server, ExpectedErrors))
{
var connectionBuilder = new HubConnectionBuilder()
.WithLoggerFactory(LoggerFactory)
.WithUrl(server.Url + HubPaths.First(), HttpTransportType.WebSockets)
.WithAutomaticReconnect();
connectionBuilder.Services.Configure<HttpConnectionOptions>(o =>
{
o.SkipNegotiation = true;
});
var connection = connectionBuilder.Build();
try
{
var echoMessage = "test";
var reconnectingTcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
var reconnectedTcs = new TaskCompletionSource<string>(TaskCreationOptions.RunContinuationsAsynchronously);
connection.Reconnecting += _ =>
{
reconnectingTcs.SetResult(null);
return Task.CompletedTask;
};
connection.Reconnected += connectionId =>
{
reconnectedTcs.SetResult(connectionId);
return Task.CompletedTask;
};
await connection.StartAsync().OrTimeout();
Assert.Null(connection.ConnectionId);
connection.OnServerTimeout();
await reconnectingTcs.Task.OrTimeout();
var newConnectionId = await reconnectedTcs.Task.OrTimeout();
Assert.Null(newConnectionId);
Assert.Null(connection.ConnectionId);
var result = await connection.InvokeAsync<string>(nameof(TestHub.Echo), echoMessage).OrTimeout();
Assert.Equal(echoMessage, result);
}
catch (Exception ex)
{
LoggerFactory.CreateLogger<HubConnectionTests>().LogError(ex, "{ExceptionType} from test", ex.GetType().FullName);
throw;
}
finally
{
await connection.DisposeAsync().OrTimeout();
}
}
}
private class PollTrackingMessageHandler : DelegatingHandler
{
public Task<HttpResponseMessage> ActivePoll { get; private set; }

View File

@ -67,7 +67,7 @@ namespace Microsoft.AspNetCore.SignalR.Client.Tests
}
[Fact]
public async Task StartAsyncWaitsForPreviousStartIfAlreadyStarting()
public async Task StartAsyncThrowsIfPreviousStartIsAlreadyStarting()
{
// Set up StartAsync to wait on the syncPoint when starting
var testConnection = new TestConnection(onStart: SyncPoint.Create(out var syncPoint));
@ -86,9 +86,9 @@ namespace Microsoft.AspNetCore.SignalR.Client.Tests
// Release the sync point
syncPoint.Continue();
// Both starts should finish fine
// The first start should finish fine, but the second throws an InvalidOperationException.
await firstStart;
await secondStart;
await Assert.ThrowsAsync<InvalidOperationException>(() => secondStart);
});
}
@ -147,16 +147,19 @@ namespace Microsoft.AspNetCore.SignalR.Client.Tests
var stopTask = connection.StopAsync().OrTimeout();
// Wait to hit DisposeAsync on TestConnection (which should be after StopAsync has cleared the connection state)
await syncPoint.WaitForSyncPoint();
await syncPoint.WaitForSyncPoint().OrTimeout();
// We should be able to start now, and StopAsync hasn't completed, nor will it complete while Starting
// We should not yet be able to start now because StopAsync hasn't completed
Assert.False(stopTask.IsCompleted);
await connection.StartAsync().OrTimeout();
var startTask = connection.StartAsync().OrTimeout();
Assert.False(stopTask.IsCompleted);
// When we release the sync point, the StopAsync task will finish
syncPoint.Continue();
await stopTask;
// Which will then allow StartAsync to finish.
await startTask;
});
}
@ -240,7 +243,7 @@ namespace Microsoft.AspNetCore.SignalR.Client.Tests
Assert.False(startTask.IsCompleted);
await syncPoint.WaitForSyncPoint();
Assert.Equal(HubConnectionState.Disconnected, connection.State);
Assert.Equal(HubConnectionState.Connecting, connection.State);
// Release the SyncPoint
syncPoint.Continue();
@ -442,6 +445,10 @@ namespace Microsoft.AspNetCore.SignalR.Client.Tests
// Stop and invoke the method. These two aren't synchronizable via a Sync Point any more because the transport is disposed
// outside the lock :(
var disposeTask = connection.StopAsync().OrTimeout();
// Wait to hit DisposeAsync on TestConnection (which should be after StopAsync has cleared the connection state)
await syncPoint.WaitForSyncPoint().OrTimeout();
var targetTask = method(connection).OrTimeout();
// Release the sync point

View File

@ -7,7 +7,7 @@ namespace Microsoft.AspNetCore.SignalR.Client.Tests
{
public partial class HubConnectionTests
{
private static HubConnection CreateHubConnection(TestConnection connection, IHubProtocol protocol = null, ILoggerFactory loggerFactory = null)
private static HubConnection CreateHubConnection(TestConnection connection, IHubProtocol protocol = null, ILoggerFactory loggerFactory = null, IRetryPolicy reconnectPolicy = null)
{
var builder = new HubConnectionBuilder();
@ -27,7 +27,12 @@ namespace Microsoft.AspNetCore.SignalR.Client.Tests
builder.Services.AddSingleton(protocol);
}
if (reconnectPolicy != null)
{
builder.WithAutomaticReconnect(reconnectPolicy);
}
return builder.Build();
}
}
}
}

View File

@ -0,0 +1,913 @@
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
using Microsoft.AspNetCore.Internal;
using Microsoft.AspNetCore.SignalR.Protocol;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Testing;
using Moq;
using Xunit;
namespace Microsoft.AspNetCore.SignalR.Client.Tests
{
public partial class HubConnectionTests
{
[Fact]
public async Task ReconnectIsNotEnabledByDefault()
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
(writeContext.EventId.Name == "ShutdownWithError" ||
writeContext.EventId.Name == "ServerDisconnectedWithError");
}
using (StartVerifiableLog(ExpectedErrors))
{
var exception = new Exception();
var testConnection = new TestConnection();
await using var hubConnection = CreateHubConnection(testConnection, loggerFactory: LoggerFactory);
var reconnectingCalled = false;
var closedErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
hubConnection.Reconnecting += error =>
{
reconnectingCalled = true;
return Task.CompletedTask;
};
hubConnection.Closed += error =>
{
closedErrorTcs.SetResult(error);
return Task.CompletedTask;
};
await hubConnection.StartAsync().OrTimeout();
testConnection.CompleteFromTransport(exception);
Assert.Same(exception, await closedErrorTcs.Task.OrTimeout());
Assert.False(reconnectingCalled);
}
}
[Fact]
public async Task ReconnectCanBeOptedInto()
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
(writeContext.EventId.Name == "ServerDisconnectedWithError" ||
writeContext.EventId.Name == "ReconnectingWithError");
}
var failReconnectTcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
using (StartVerifiableLog(ExpectedErrors))
{
var builder = new HubConnectionBuilder().WithLoggerFactory(LoggerFactory);
var testConnectionFactory = default(ReconnectingConnectionFactory);
var startCallCount = 0;
var originalConnectionId = "originalConnectionId";
var reconnectedConnectionId = "reconnectedConnectionId";
async Task OnTestConnectionStart()
{
startCallCount++;
// Only fail the first reconnect attempt.
if (startCallCount == 2)
{
await failReconnectTcs.Task;
}
var testConnection = await testConnectionFactory.GetNextOrCurrentTestConnection();
// Change the connection id before reconnecting.
if (startCallCount == 3)
{
testConnection.ConnectionId = reconnectedConnectionId;
}
else
{
testConnection.ConnectionId = originalConnectionId;
}
}
testConnectionFactory = new ReconnectingConnectionFactory(() => new TestConnection(OnTestConnectionStart));
builder.Services.AddSingleton<IConnectionFactory>(testConnectionFactory);
var retryContexts = new List<RetryContext>();
var mockReconnectPolicy = new Mock<IRetryPolicy>();
mockReconnectPolicy.Setup(p => p.NextRetryDelay(It.IsAny<RetryContext>())).Returns<RetryContext>(context =>
{
retryContexts.Add(context);
return TimeSpan.Zero;
});
builder.WithAutomaticReconnect(mockReconnectPolicy.Object);
await using var hubConnection = builder.Build();
var reconnectingCount = 0;
var reconnectedCount = 0;
var reconnectingErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
var reconnectedConnectionIdTcs = new TaskCompletionSource<string>(TaskCreationOptions.RunContinuationsAsynchronously);
var closedErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
hubConnection.Reconnecting += error =>
{
reconnectingCount++;
reconnectingErrorTcs.SetResult(error);
return Task.CompletedTask;
};
hubConnection.Reconnected += connectionId =>
{
reconnectedCount++;
reconnectedConnectionIdTcs.SetResult(connectionId);
return Task.CompletedTask;
};
hubConnection.Closed += error =>
{
closedErrorTcs.SetResult(error);
return Task.CompletedTask;
};
await hubConnection.StartAsync().OrTimeout();
Assert.Same(originalConnectionId, hubConnection.ConnectionId);
var firstException = new Exception();
(await testConnectionFactory.GetNextOrCurrentTestConnection()).CompleteFromTransport(firstException);
Assert.Same(firstException, await reconnectingErrorTcs.Task.OrTimeout());
Assert.Single(retryContexts);
Assert.Same(firstException, retryContexts[0].RetryReason);
Assert.Equal(0, retryContexts[0].PreviousRetryCount);
Assert.Equal(TimeSpan.Zero, retryContexts[0].ElapsedTime);
var reconnectException = new Exception();
failReconnectTcs.SetException(reconnectException);
Assert.Same(reconnectedConnectionId, await reconnectedConnectionIdTcs.Task.OrTimeout());
Assert.Equal(2, retryContexts.Count);
Assert.Same(reconnectException, retryContexts[1].RetryReason);
Assert.Equal(1, retryContexts[1].PreviousRetryCount);
Assert.True(TimeSpan.Zero <= retryContexts[1].ElapsedTime);
await hubConnection.StopAsync().OrTimeout();
var closeError = await closedErrorTcs.Task.OrTimeout();
Assert.Null(closeError);
Assert.Equal(1, reconnectingCount);
Assert.Equal(1, reconnectedCount);
}
}
[Fact]
public async Task ReconnectStopsIfTheReconnectPolicyReturnsNull()
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
(writeContext.EventId.Name == "ServerDisconnectedWithError" ||
writeContext.EventId.Name == "ReconnectingWithError");
}
var failReconnectTcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
using (StartVerifiableLog(ExpectedErrors))
{
var builder = new HubConnectionBuilder().WithLoggerFactory(LoggerFactory);
var startCallCount = 0;
Task OnTestConnectionStart()
{
startCallCount++;
// Fail the first reconnect attempts.
if (startCallCount > 1)
{
return failReconnectTcs.Task;
}
return Task.CompletedTask;
}
var testConnectionFactory = new ReconnectingConnectionFactory(() => new TestConnection(OnTestConnectionStart));
builder.Services.AddSingleton<IConnectionFactory>(testConnectionFactory);
var retryContexts = new List<RetryContext>();
var mockReconnectPolicy = new Mock<IRetryPolicy>();
mockReconnectPolicy.Setup(p => p.NextRetryDelay(It.IsAny<RetryContext>())).Returns<RetryContext>(context =>
{
retryContexts.Add(context);
return context.PreviousRetryCount == 0 ? TimeSpan.Zero : (TimeSpan?)null;
});
builder.WithAutomaticReconnect(mockReconnectPolicy.Object);
await using var hubConnection = builder.Build();
var reconnectingCount = 0;
var reconnectedCount = 0;
var reconnectingErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
var closedErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
hubConnection.Reconnecting += error =>
{
reconnectingCount++;
reconnectingErrorTcs.SetResult(error);
return Task.CompletedTask;
};
hubConnection.Reconnected += connectionId =>
{
reconnectedCount++;
return Task.CompletedTask;
};
hubConnection.Closed += error =>
{
closedErrorTcs.SetResult(error);
return Task.CompletedTask;
};
await hubConnection.StartAsync().OrTimeout();
var firstException = new Exception();
(await testConnectionFactory.GetNextOrCurrentTestConnection()).CompleteFromTransport(firstException);
Assert.Same(firstException, await reconnectingErrorTcs.Task.OrTimeout());
Assert.Single(retryContexts);
Assert.Same(firstException, retryContexts[0].RetryReason);
Assert.Equal(0, retryContexts[0].PreviousRetryCount);
Assert.Equal(TimeSpan.Zero, retryContexts[0].ElapsedTime);
var reconnectException = new Exception();
failReconnectTcs.SetException(reconnectException);
var closeError = await closedErrorTcs.Task.OrTimeout();
Assert.IsType<OperationCanceledException>(closeError);
Assert.Equal(2, retryContexts.Count);
Assert.Same(reconnectException, retryContexts[1].RetryReason);
Assert.Equal(1, retryContexts[1].PreviousRetryCount);
Assert.True(TimeSpan.Zero <= retryContexts[1].ElapsedTime);
Assert.Equal(1, reconnectingCount);
Assert.Equal(0, reconnectedCount);
}
}
[Fact]
public async Task ReconnectCanHappenMultipleTimes()
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
(writeContext.EventId.Name == "ServerDisconnectedWithError" ||
writeContext.EventId.Name == "ReconnectingWithError");
}
using (StartVerifiableLog(ExpectedErrors))
{
var builder = new HubConnectionBuilder().WithLoggerFactory(LoggerFactory);
var testConnectionFactory = new ReconnectingConnectionFactory();
builder.Services.AddSingleton<IConnectionFactory>(testConnectionFactory);
var retryContexts = new List<RetryContext>();
var mockReconnectPolicy = new Mock<IRetryPolicy>();
mockReconnectPolicy.Setup(p => p.NextRetryDelay(It.IsAny<RetryContext>())).Returns<RetryContext>(context =>
{
retryContexts.Add(context);
return TimeSpan.Zero;
});
builder.WithAutomaticReconnect(mockReconnectPolicy.Object);
await using var hubConnection = builder.Build();
var reconnectingCount = 0;
var reconnectedCount = 0;
var reconnectingErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
var reconnectedConnectionIdTcs = new TaskCompletionSource<string>(TaskCreationOptions.RunContinuationsAsynchronously);
var closedErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
hubConnection.Reconnecting += error =>
{
reconnectingCount++;
reconnectingErrorTcs.SetResult(error);
return Task.CompletedTask;
};
hubConnection.Reconnected += connectionId =>
{
reconnectedCount++;
reconnectedConnectionIdTcs.SetResult(connectionId);
return Task.CompletedTask;
};
hubConnection.Closed += error =>
{
closedErrorTcs.SetResult(error);
return Task.CompletedTask;
};
await hubConnection.StartAsync().OrTimeout();
var firstException = new Exception();
(await testConnectionFactory.GetNextOrCurrentTestConnection()).CompleteFromTransport(firstException);
Assert.Same(firstException, await reconnectingErrorTcs.Task.OrTimeout());
Assert.Single(retryContexts);
Assert.Same(firstException, retryContexts[0].RetryReason);
Assert.Equal(0, retryContexts[0].PreviousRetryCount);
Assert.Equal(TimeSpan.Zero, retryContexts[0].ElapsedTime);
await reconnectedConnectionIdTcs.Task.OrTimeout();
Assert.Equal(1, reconnectingCount);
Assert.Equal(1, reconnectedCount);
Assert.Equal(TaskStatus.WaitingForActivation, closedErrorTcs.Task.Status);
reconnectingErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
reconnectedConnectionIdTcs = new TaskCompletionSource<string>(TaskCreationOptions.RunContinuationsAsynchronously);
var secondException = new Exception();
(await testConnectionFactory.GetNextOrCurrentTestConnection()).CompleteFromTransport(secondException);
Assert.Same(secondException, await reconnectingErrorTcs.Task.OrTimeout());
Assert.Equal(2, retryContexts.Count);
Assert.Same(secondException, retryContexts[1].RetryReason);
Assert.Equal(0, retryContexts[1].PreviousRetryCount);
Assert.Equal(TimeSpan.Zero, retryContexts[1].ElapsedTime);
await reconnectedConnectionIdTcs.Task.OrTimeout();
Assert.Equal(2, reconnectingCount);
Assert.Equal(2, reconnectedCount);
Assert.Equal(TaskStatus.WaitingForActivation, closedErrorTcs.Task.Status);
await hubConnection.StopAsync().OrTimeout();
var closeError = await closedErrorTcs.Task.OrTimeout();
Assert.Null(closeError);
Assert.Equal(2, reconnectingCount);
Assert.Equal(2, reconnectedCount);
}
}
[Fact]
public async Task ReconnectEventsNotFiredIfFirstRetryDelayIsNull()
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
writeContext.EventId.Name == "ServerDisconnectedWithError";
}
using (StartVerifiableLog(ExpectedErrors))
{
var builder = new HubConnectionBuilder().WithLoggerFactory(LoggerFactory);
var testConnectionFactory = new ReconnectingConnectionFactory();
builder.Services.AddSingleton<IConnectionFactory>(testConnectionFactory);
var mockReconnectPolicy = new Mock<IRetryPolicy>();
mockReconnectPolicy.Setup(p => p.NextRetryDelay(It.IsAny<RetryContext>())).Returns<TimeSpan?>(null);
builder.WithAutomaticReconnect(mockReconnectPolicy.Object);
await using var hubConnection = builder.Build();
var reconnectingCount = 0;
var reconnectedCount = 0;
var closedErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
hubConnection.Reconnecting += error =>
{
reconnectingCount++;
return Task.CompletedTask;
};
hubConnection.Reconnected += connectionId =>
{
reconnectedCount++;
return Task.CompletedTask;
};
hubConnection.Closed += error =>
{
closedErrorTcs.SetResult(error);
return Task.CompletedTask;
};
await hubConnection.StartAsync().OrTimeout();
var firstException = new Exception();
(await testConnectionFactory.GetNextOrCurrentTestConnection()).CompleteFromTransport(firstException);
await closedErrorTcs.Task.OrTimeout();
Assert.Equal(0, reconnectingCount);
Assert.Equal(0, reconnectedCount);
}
}
[Fact]
public async Task ReconnectDoesNotStartIfConnectionIsLostDuringInitialHandshake()
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
(writeContext.EventId.Name == "ErrorReceivingHandshakeResponse" ||
writeContext.EventId.Name == "ErrorStartingConnection");
}
using (StartVerifiableLog(ExpectedErrors))
{
var builder = new HubConnectionBuilder().WithLoggerFactory(LoggerFactory);
var testConnectionFactory = new ReconnectingConnectionFactory(() => new TestConnection(autoHandshake: false));
builder.Services.AddSingleton<IConnectionFactory>(testConnectionFactory);
var mockReconnectPolicy = new Mock<IRetryPolicy>();
mockReconnectPolicy.Setup(p => p.NextRetryDelay(It.IsAny<RetryContext>())).Returns<TimeSpan?>(null);
builder.WithAutomaticReconnect(mockReconnectPolicy.Object);
await using var hubConnection = builder.Build();
var reconnectingCount = 0;
var reconnectedCount = 0;
var closedCount = 0;
hubConnection.Reconnecting += error =>
{
reconnectingCount++;
return Task.CompletedTask;
};
hubConnection.Reconnected += connectionId =>
{
reconnectedCount++;
return Task.CompletedTask;
};
hubConnection.Closed += error =>
{
closedCount++;
return Task.CompletedTask;
};
var startTask = hubConnection.StartAsync().OrTimeout();
var firstException = new Exception();
(await testConnectionFactory.GetNextOrCurrentTestConnection()).CompleteFromTransport(firstException);
Assert.Same(firstException, await Assert.ThrowsAsync<Exception>(() => startTask).OrTimeout());
Assert.Equal(HubConnectionState.Disconnected, hubConnection.State);
Assert.Equal(0, reconnectingCount);
Assert.Equal(0, reconnectedCount);
Assert.Equal(0, closedCount);
}
}
[Fact]
public async Task ReconnectContinuesIfConnectionLostDuringReconnectHandshake()
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
(writeContext.EventId.Name == "ServerDisconnectedWithError" ||
writeContext.EventId.Name == "ReconnectingWithError" ||
writeContext.EventId.Name == "ErrorReceivingHandshakeResponse" ||
writeContext.EventId.Name == "ErrorStartingConnection");
}
using (StartVerifiableLog(ExpectedErrors))
{
var builder = new HubConnectionBuilder().WithLoggerFactory(LoggerFactory);
var testConnectionFactory = new ReconnectingConnectionFactory(() => new TestConnection(autoHandshake: false));
builder.Services.AddSingleton<IConnectionFactory>(testConnectionFactory);
var retryContexts = new List<RetryContext>();
var secondRetryDelayTcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
var mockReconnectPolicy = new Mock<IRetryPolicy>();
mockReconnectPolicy.Setup(p => p.NextRetryDelay(It.IsAny<RetryContext>())).Returns<RetryContext>(context =>
{
retryContexts.Add(context);
if (retryContexts.Count == 2)
{
secondRetryDelayTcs.SetResult(null);
}
return TimeSpan.Zero;
});
builder.WithAutomaticReconnect(mockReconnectPolicy.Object);
await using var hubConnection = builder.Build();
var reconnectingCount = 0;
var reconnectedCount = 0;
var reconnectingErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
var reconnectedConnectionIdTcs = new TaskCompletionSource<string>(TaskCreationOptions.RunContinuationsAsynchronously);
var closedErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
hubConnection.Reconnecting += error =>
{
reconnectingCount++;
reconnectingErrorTcs.SetResult(error);
return Task.CompletedTask;
};
hubConnection.Reconnected += connectionId =>
{
reconnectedCount++;
reconnectedConnectionIdTcs.SetResult(connectionId);
return Task.CompletedTask;
};
hubConnection.Closed += error =>
{
closedErrorTcs.SetResult(error);
return Task.CompletedTask;
};
var startTask = hubConnection.StartAsync();
// Complete handshake
var currentTestConnection = await testConnectionFactory.GetNextOrCurrentTestConnection();
await currentTestConnection.ReadHandshakeAndSendResponseAsync().OrTimeout();
await startTask.OrTimeout();
var firstException = new Exception();
currentTestConnection.CompleteFromTransport(firstException);
Assert.Same(firstException, await reconnectingErrorTcs.Task.OrTimeout());
Assert.Single(retryContexts);
Assert.Same(firstException, retryContexts[0].RetryReason);
Assert.Equal(0, retryContexts[0].PreviousRetryCount);
Assert.Equal(TimeSpan.Zero, retryContexts[0].ElapsedTime);
var secondException = new Exception();
(await testConnectionFactory.GetNextOrCurrentTestConnection()).CompleteFromTransport(secondException);
await secondRetryDelayTcs.Task.OrTimeout();
Assert.Equal(2, retryContexts.Count);
Assert.Same(secondException, retryContexts[1].RetryReason);
Assert.Equal(1, retryContexts[1].PreviousRetryCount);
Assert.True(TimeSpan.Zero <= retryContexts[0].ElapsedTime);
// Complete handshake
currentTestConnection = await testConnectionFactory.GetNextOrCurrentTestConnection();
await currentTestConnection.ReadHandshakeAndSendResponseAsync().OrTimeout();
await reconnectedConnectionIdTcs.Task.OrTimeout();
Assert.Equal(1, reconnectingCount);
Assert.Equal(1, reconnectedCount);
Assert.Equal(TaskStatus.WaitingForActivation, closedErrorTcs.Task.Status);
await hubConnection.StopAsync().OrTimeout();
var closeError = await closedErrorTcs.Task.OrTimeout();
Assert.Null(closeError);
Assert.Equal(1, reconnectingCount);
Assert.Equal(1, reconnectedCount);
}
}
[Fact]
public async Task ReconnectContinuesIfInvalidHandshakeResponse()
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
(writeContext.EventId.Name == "ServerDisconnectedWithError" ||
writeContext.EventId.Name == "ReconnectingWithError" ||
writeContext.EventId.Name == "ErrorReceivingHandshakeResponse" ||
writeContext.EventId.Name == "HandshakeServerError" ||
writeContext.EventId.Name == "ErrorStartingConnection");
}
using (StartVerifiableLog(ExpectedErrors))
{
var builder = new HubConnectionBuilder().WithLoggerFactory(LoggerFactory);
var testConnectionFactory = new ReconnectingConnectionFactory(() => new TestConnection(autoHandshake: false));
builder.Services.AddSingleton<IConnectionFactory>(testConnectionFactory);
var retryContexts = new List<RetryContext>();
var secondRetryDelayTcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
var mockReconnectPolicy = new Mock<IRetryPolicy>();
mockReconnectPolicy.Setup(p => p.NextRetryDelay(It.IsAny<RetryContext>())).Returns<RetryContext>(context =>
{
retryContexts.Add(context);
if (retryContexts.Count == 2)
{
secondRetryDelayTcs.SetResult(null);
}
return TimeSpan.Zero;
});
builder.WithAutomaticReconnect(mockReconnectPolicy.Object);
await using var hubConnection = builder.Build();
var reconnectingCount = 0;
var reconnectedCount = 0;
var reconnectingErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
var reconnectedConnectionIdTcs = new TaskCompletionSource<string>(TaskCreationOptions.RunContinuationsAsynchronously);
var closedErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
hubConnection.Reconnecting += error =>
{
reconnectingCount++;
reconnectingErrorTcs.SetResult(error);
return Task.CompletedTask;
};
hubConnection.Reconnected += connectionId =>
{
reconnectedCount++;
reconnectedConnectionIdTcs.SetResult(connectionId);
return Task.CompletedTask;
};
hubConnection.Closed += error =>
{
closedErrorTcs.SetResult(error);
return Task.CompletedTask;
};
var startTask = hubConnection.StartAsync();
// Complete handshake
var currentTestConnection = await testConnectionFactory.GetNextOrCurrentTestConnection();
await currentTestConnection.ReadHandshakeAndSendResponseAsync().OrTimeout();
await startTask.OrTimeout();
var firstException = new Exception();
currentTestConnection.CompleteFromTransport(firstException);
Assert.Same(firstException, await reconnectingErrorTcs.Task.OrTimeout());
Assert.Single(retryContexts);
Assert.Same(firstException, retryContexts[0].RetryReason);
Assert.Equal(0, retryContexts[0].PreviousRetryCount);
Assert.Equal(TimeSpan.Zero, retryContexts[0].ElapsedTime);
// Respond to handshake with error.
currentTestConnection = await testConnectionFactory.GetNextOrCurrentTestConnection();
await currentTestConnection.ReadSentTextMessageAsync().OrTimeout();
var output = MemoryBufferWriter.Get();
try
{
HandshakeProtocol.WriteResponseMessage(new HandshakeResponseMessage("Error!"), output);
await currentTestConnection.Application.Output.WriteAsync(output.ToArray()).OrTimeout();
}
finally
{
MemoryBufferWriter.Return(output);
}
await secondRetryDelayTcs.Task.OrTimeout();
Assert.Equal(2, retryContexts.Count);
Assert.IsType<HubException>(retryContexts[1].RetryReason);
Assert.Equal(1, retryContexts[1].PreviousRetryCount);
Assert.True(TimeSpan.Zero <= retryContexts[0].ElapsedTime);
// Complete handshake
currentTestConnection = await testConnectionFactory.GetNextOrCurrentTestConnection();
await currentTestConnection.ReadHandshakeAndSendResponseAsync().OrTimeout();
await reconnectedConnectionIdTcs.Task.OrTimeout();
Assert.Equal(1, reconnectingCount);
Assert.Equal(1, reconnectedCount);
Assert.Equal(TaskStatus.WaitingForActivation, closedErrorTcs.Task.Status);
await hubConnection.StopAsync().OrTimeout();
var closeError = await closedErrorTcs.Task.OrTimeout();
Assert.Null(closeError);
Assert.Equal(1, reconnectingCount);
Assert.Equal(1, reconnectedCount);
}
}
[Fact]
public async Task ReconnectCanBeStoppedWhileRestartingUnderlyingConnection()
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
(writeContext.EventId.Name == "ServerDisconnectedWithError" ||
writeContext.EventId.Name == "ReconnectingWithError" ||
writeContext.EventId.Name == "ErrorReceivingHandshakeResponse" ||
writeContext.EventId.Name == "ErrorStartingConnection");
}
using (StartVerifiableLog(ExpectedErrors))
{
var builder = new HubConnectionBuilder().WithLoggerFactory(LoggerFactory);
var connectionStartTcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
async Task OnTestConnectionStart()
{
try
{
await connectionStartTcs.Task;
}
finally
{
connectionStartTcs = new TaskCompletionSource<object>(TaskCreationOptions.RunContinuationsAsynchronously);
}
}
var testConnectionFactory = new ReconnectingConnectionFactory(() => new TestConnection(OnTestConnectionStart));
builder.Services.AddSingleton<IConnectionFactory>(testConnectionFactory);
var retryContexts = new List<RetryContext>();
var mockReconnectPolicy = new Mock<IRetryPolicy>();
mockReconnectPolicy.Setup(p => p.NextRetryDelay(It.IsAny<RetryContext>())).Returns<RetryContext>(context =>
{
retryContexts.Add(context);
return TimeSpan.Zero;
});
builder.WithAutomaticReconnect(mockReconnectPolicy.Object);
await using var hubConnection = builder.Build();
var reconnectingCount = 0;
var reconnectedCount = 0;
var reconnectingErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
var closedErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
hubConnection.Reconnecting += error =>
{
reconnectingCount++;
reconnectingErrorTcs.SetResult(error);
return Task.CompletedTask;
};
hubConnection.Reconnected += connectionId =>
{
reconnectedCount++;
return Task.CompletedTask;
};
hubConnection.Closed += error =>
{
closedErrorTcs.SetResult(error);
return Task.CompletedTask;
};
// Allow the first connection to start successfully.
connectionStartTcs.SetResult(null);
await hubConnection.StartAsync().OrTimeout();
var firstException = new Exception();
(await testConnectionFactory.GetNextOrCurrentTestConnection()).CompleteFromTransport(firstException);
Assert.Same(firstException, await reconnectingErrorTcs.Task.OrTimeout());
Assert.Single(retryContexts);
Assert.Same(firstException, retryContexts[0].RetryReason);
Assert.Equal(0, retryContexts[0].PreviousRetryCount);
Assert.Equal(TimeSpan.Zero, retryContexts[0].ElapsedTime);
var secondException = new Exception();
var stopTask = hubConnection.StopAsync();
connectionStartTcs.SetResult(null);
Assert.IsType<OperationCanceledException>(await closedErrorTcs.Task.OrTimeout());
Assert.Single(retryContexts);
Assert.Equal(1, reconnectingCount);
Assert.Equal(0, reconnectedCount);
await stopTask.OrTimeout();
}
}
[Fact]
public async Task ReconnectCanBeStoppedDuringRetryDelay()
{
bool ExpectedErrors(WriteContext writeContext)
{
return writeContext.LoggerName == typeof(HubConnection).FullName &&
(writeContext.EventId.Name == "ServerDisconnectedWithError" ||
writeContext.EventId.Name == "ReconnectingWithError" ||
writeContext.EventId.Name == "ErrorReceivingHandshakeResponse" ||
writeContext.EventId.Name == "ErrorStartingConnection");
}
using (StartVerifiableLog(ExpectedErrors))
{
var builder = new HubConnectionBuilder().WithLoggerFactory(LoggerFactory);
var testConnectionFactory = new ReconnectingConnectionFactory();
builder.Services.AddSingleton<IConnectionFactory>(testConnectionFactory);
var retryContexts = new List<RetryContext>();
var mockReconnectPolicy = new Mock<IRetryPolicy>();
mockReconnectPolicy.Setup(p => p.NextRetryDelay(It.IsAny<RetryContext>())).Returns<RetryContext>(context =>
{
retryContexts.Add(context);
// Hopefully this test never takes over a minute.
return TimeSpan.FromMinutes(1);
});
builder.WithAutomaticReconnect(mockReconnectPolicy.Object);
await using var hubConnection = builder.Build();
var reconnectingCount = 0;
var reconnectedCount = 0;
var reconnectingErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
var closedErrorTcs = new TaskCompletionSource<Exception>(TaskCreationOptions.RunContinuationsAsynchronously);
hubConnection.Reconnecting += error =>
{
reconnectingCount++;
reconnectingErrorTcs.SetResult(error);
return Task.CompletedTask;
};
hubConnection.Reconnected += connectionId =>
{
reconnectedCount++;
return Task.CompletedTask;
};
hubConnection.Closed += error =>
{
closedErrorTcs.SetResult(error);
return Task.CompletedTask;
};
// Allow the first connection to start successfully.
await hubConnection.StartAsync().OrTimeout();
var firstException = new Exception();
(await testConnectionFactory.GetNextOrCurrentTestConnection()).CompleteFromTransport(firstException);
Assert.Same(firstException, await reconnectingErrorTcs.Task.OrTimeout());
Assert.Single(retryContexts);
Assert.Same(firstException, retryContexts[0].RetryReason);
Assert.Equal(0, retryContexts[0].PreviousRetryCount);
Assert.Equal(TimeSpan.Zero, retryContexts[0].ElapsedTime);
await hubConnection.StopAsync().OrTimeout();
Assert.IsType<OperationCanceledException>(await closedErrorTcs.Task.OrTimeout());
Assert.Single(retryContexts);
Assert.Equal(1, reconnectingCount);
Assert.Equal(0, reconnectedCount);
}
}
private class ReconnectingConnectionFactory : IConnectionFactory
{
public readonly Func<TestConnection> _testConnectionFactory;
public TaskCompletionSource<TestConnection> _testConnectionTcs = new TaskCompletionSource<TestConnection>(TaskCreationOptions.RunContinuationsAsynchronously);
public ReconnectingConnectionFactory()
: this (() => new TestConnection())
{
}
public ReconnectingConnectionFactory(Func<TestConnection> testConnectionFactory)
{
_testConnectionFactory = testConnectionFactory;
}
public Task<TestConnection> GetNextOrCurrentTestConnection()
{
return _testConnectionTcs.Task;
}
public async Task<ConnectionContext> ConnectAsync(TransferFormat transferFormat, CancellationToken cancellationToken = default)
{
var testConnection = _testConnectionFactory();
_testConnectionTcs.SetResult(testConnection);
try
{
return await testConnection.StartAsync(transferFormat);
}
catch
{
_testConnectionTcs = new TaskCompletionSource<TestConnection>(TaskCreationOptions.RunContinuationsAsynchronously);
throw;
}
}
public async Task DisposeAsync(ConnectionContext connection)
{
var disposingTestConnection = await _testConnectionTcs.Task;
_testConnectionTcs = new TaskCompletionSource<TestConnection>(TaskCreationOptions.RunContinuationsAsynchronously);
await disposingTestConnection.DisposeAsync();
}
}
}
}

View File

@ -339,7 +339,6 @@ namespace Microsoft.AspNetCore.SignalR.Client.Tests
// after cancellation, don't send from the pipe
foreach (var number in new[] { 42, 43, 322, 3145, -1234 })
{
await channel.Writer.WriteAsync(number);
}

View File

@ -164,7 +164,7 @@ export class HubConnectionBuilder {
/** Configures the {@link @aspnet/signalr.HubConnection} to automatically attempt to reconnect if the connection is lost.
*
* @param {number[]} reconnectPolicy An {@link @aspnet/signalR.IReconnectPolicy} that controls the timing and number of reconnect attempts.
* @param {IReconnectPolicy} reconnectPolicy An {@link @aspnet/signalR.IReconnectPolicy} that controls the timing and number of reconnect attempts.
*/
public withAutomaticReconnect(reconnectPolicy: IReconnectPolicy): HubConnectionBuilder;
public withAutomaticReconnect(retryDelaysOrReconnectPolicy?: number[] | IReconnectPolicy): HubConnectionBuilder {

View File

@ -1,14 +1,13 @@
// Copyright (c) .NET Foundation. All rights reserved.
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.IO;
using System.Linq;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.SignalR.Client;
using Microsoft.Extensions.CommandLineUtils;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
namespace ClientSample
@ -37,6 +36,11 @@ namespace ClientSample
logging.AddConsole();
});
connectionBuilder.Services.Configure<LoggerFilterOptions>(options =>
{
options.MinLevel = LogLevel.Trace;
});
if (uri.Scheme == "net.tcp")
{
connectionBuilder.WithEndPoint(uri);
@ -46,6 +50,8 @@ namespace ClientSample
connectionBuilder.WithUrl(uri);
}
connectionBuilder.WithAutomaticReconnect();
var connection = connectionBuilder.Build();
Console.CancelKeyPress += (sender, a) =>
@ -68,7 +74,8 @@ namespace ClientSample
return Task.CompletedTask;
};
while (true)
do
{
// Dispose the previous token
closedTokenSource?.Dispose();
@ -77,48 +84,34 @@ namespace ClientSample
closedTokenSource = new CancellationTokenSource();
// Connect to the server
if (!await ConnectAsync(connection))
} while (!await ConnectAsync(connection));
Console.WriteLine("Connected to {0}", uri);
// Handle the connected connection
while (true)
{
try
{
var line = Console.ReadLine();
if (line == null || closedTokenSource.Token.IsCancellationRequested)
{
break;
}
await connection.InvokeAsync<object>("Send", line);
}
catch (ObjectDisposedException)
{
// We're shutting down the client
break;
}
Console.WriteLine("Connected to {0}", uri); ;
// Handle the connected connection
while (true)
catch (Exception ex)
{
try
{
var line = Console.ReadLine();
if (line == null || closedTokenSource.Token.IsCancellationRequested)
{
break;
}
await connection.InvokeAsync<object>("Send", line);
}
catch (IOException)
{
// Process being shutdown
break;
}
catch (OperationCanceledException)
{
// The connection closed
break;
}
catch (ObjectDisposedException)
{
// We're shutting down the client
break;
}
catch (Exception ex)
{
// Send could have failed because the connection closed
System.Console.WriteLine(ex);
break;
}
// Send could have failed because the connection closed
// Continue to loop because we should be reconnecting.
Console.WriteLine(ex);
}
}