
I worked a little bit this weekend on a small new feature in Wolverine that we’ll need as part of our forthcoming “CritterWatch” tooling. What I was doing isn’t that interesting, but the killer problem was that it required me to write an integration test that would:
- Spin up multiple
IHostinstances for the same testing application - Verify that Wolverine was correctly assigning running tasks to only the leader node
- Stop the leader node, see leadership and that same task shift to the newly elected leader
- Make sure that task was really only ever running on the single leader node
Needless to say, it’s a long running test and it turned out to be non trivial to get both the test harness and the necessary code exactly right. Honestly, I didn’t get this done until I stopped and integrated application logging directly into the xUnit.Net test harness (plus integrating a Wolverine specific event observer too) so I could see what the heck was going on inside all of these application instances.
So without further ado, here’s the recipe we’re using (and copy/pasting around) in Wolverine to do that. First off, we need an ILogger and ILoggerProvider implementation that will pipe logging to xUnit’s ITestOutputHelper like so:
public class XUnitLogger : ILogger
{
private readonly string _categoryName;
private readonly List<string> _ignoredStrings = new()
{
"Declared",
"Successfully processed message"
};
private readonly ITestOutputHelper _testOutputHelper;
public XUnitLogger(ITestOutputHelper testOutputHelper, string categoryName)
{
_testOutputHelper = testOutputHelper;
_categoryName = categoryName;
}
public bool IsEnabled(LogLevel logLevel)
{
return logLevel != LogLevel.None;
}
public IDisposable BeginScope<TState>(TState state)
{
return new Disposable();
}
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception exception,
Func<TState, Exception, string> formatter)
{
if (exception is DivideByZeroException)
{
return;
}
if (exception is BadImageFormatException)
{
return;
}
// Obviously this is crude and you would do something different here...
if (_categoryName == "Wolverine.Transports.Sending.BufferedSendingAgent" &&
logLevel == LogLevel.Information) return;
if (_categoryName == "Wolverine.Runtime.WolverineRuntime" &&
logLevel == LogLevel.Information) return;
if (_categoryName == "Microsoft.Hosting.Lifetime" &&
logLevel == LogLevel.Information) return;
if (_categoryName == "Wolverine.Transports.ListeningAgent" &&
logLevel == LogLevel.Information) return;
if (_categoryName == "JasperFx.Resources.ResourceSetupHostService" &&
logLevel == LogLevel.Information) return;
if (_categoryName == "Wolverine.Configuration.HandlerDiscovery" &&
logLevel == LogLevel.Information) return;
var text = formatter(state, exception);
if (_ignoredStrings.Any(x => text.Contains(x))) return;
_testOutputHelper.WriteLine($"{_categoryName}/{logLevel}: {text}");
if (exception != null)
{
_testOutputHelper.WriteLine(exception.ToString());
}
}
public class Disposable : IDisposable
{
public void Dispose()
{
}
}
}
public class OutputLoggerProvider : ILoggerProvider
{
private readonly ITestOutputHelper _output;
public OutputLoggerProvider(ITestOutputHelper output)
{
_output = output;
}
public void Dispose()
{
}
public ILogger CreateLogger(string categoryName)
{
return new XUnitLogger(_output, categoryName);
}
}
And register it inside the test harness like so:
public class leader_pinned_listener : IAsyncDisposable
{
private readonly ITestOutputHelper _output;
public leader_pinned_listener(ITestOutputHelper output)
{
_output = output;
}
private async Task<IHost> startHost()
{
await dropSchemaAsync();
var host = await Host.CreateDefaultBuilder()
.UseWolverine(opts =>
{
// This is where I'm adding in the custom ILoggerProvider
opts.Services.AddSingleton<ILoggerProvider>(new OutputLoggerProvider(_output));
// More configuration that isn't germane...
return host;
}
Hey, it’s crude, but the point here was that this kind of gnarly integration testing, and especially with a lot of asynchronous behavior, is a lot easier to get through when you have more insight into how the code you’re testing is actually behaving.