Welcome Guest! To enable all features please Login or Register.

Notification

Icon
Error

Unstable Test Generation
Albatross
#1 Posted : Wednesday, February 20, 2019 3:21:27 PM(UTC)
Rank: Newbie

Groups: Registered
Joined: 2/20/2019(UTC)
Posts: 6
Location: United Kingdom

Thanks: 1 times
Having installed version 3.23.0.10 I now get the following message:

"NCrunch is unable to safely execute tests in this assembly because it contains unstable test generation. A previous discovery run over this assembly returned 2627 test cases, but when preparing NUnit to execute tests, there are now 2636 test cases. Usually this is caused by generating tests using TestCaseSource using random, inconsistent or otherwise unstable data."

In this case I am not using TestCaseSource and would appreciate help in resolving the problem.

I have many test files that look like this:

using NUnit.Framework;

namespace BS4Library.DataLayerTests.HotelUpdates.GetHotelUpdatesFromTours
{
[NCrunch.Framework.ExclusivelyUses("ASCIntegration")]
[TestFixture]
public class Tests305 : GenericTests
{
private const int TestNumber = 305;

#region Tests

[Test]
public void TestWithoutPackages()
{
WithoutPackages(TestNumber);
}

[Test]
public void TestPackages()
{
Packages(TestNumber);
}

[Test]
public void TestMealPlans()
{
MealPlans(TestNumber);
}

[Test]
public void TestQuoteTourClasses()
{
QuoteTourClasses(TestNumber);
}

[Test]
public void TestDepartments()
{
Departments(TestNumber);
}

[Test]
public void TestParams()
{
Params(TestNumber);
}

#endregion

#region Private Methods

#endregion
}
}

They inherit from:

using System;
using System.Linq;
using ASCLibrary.Security.BusinessLayer;
using ATG.Constants;
using BS4Library.DataLayerTests.Setup;
using NUnit.Framework;

namespace BS4Library.DataLayerTests.HotelUpdates.GetHotelUpdatesFromTours
{
[NCrunch.Framework.ExclusivelyUses("ASCIntegration")]
[TestFixture]
public class GenericTests : AssertionHelper, IDisposable
{
private SecuritySession _ses;
private ThreadSession _myThread;
private DataLayer.HotelUpdates.HotelUpdatesDataAccessor _da;
private bool _disposed;
private const string LocalHost = "localHost";
private SetupHotelUpdateLines _testing;

#region Tests

internal void WithoutPackages(int testNumber)
{
//arrange
using (var testing = new TestingSetup())
{
//act
foreach (var test in _testing.HotelUpdateLinesPackages(Constants.HotelUpdateLineTestData.FromTour, null, testNumber))
{
var records = _da.GetHotelUpdatesFromTours(test.Key, SetupHotelUpdateLines.CancelDays);
//assert
testing.CheckHotelUpdateLines(records.ToList(), test.Value);
}
}
}

internal void Packages(int testNumber)
{
//arrange
using (var testing = new TestingSetup())
{
foreach (var package in _testing.Packages)
{
//act
foreach (var test in _testing.HotelUpdateLinesPackages(Constants.HotelUpdateLineTestData.FromTour, package, testNumber))
{
var records = _da.GetHotelUpdatesFromTours(test.Key, SetupHotelUpdateLines.CancelDays);
//assert
testing.CheckHotelUpdateLines(records.ToList(), test.Value);
}
}
}
}

internal void MealPlans(int testNumber)
{
//arrange
using (var testing = new TestingSetup())
{
foreach (var mealPlan in _testing.MealPlans)
{
//act
foreach (var test in _testing.HotelUpdateLinesMealPlans(Constants.HotelUpdateLineTestData.FromTour, mealPlan, testNumber))
{
var records = _da.GetHotelUpdatesFromTours(test.Key, SetupHotelUpdateLines.CancelDays);
//assert
testing.CheckHotelUpdateLines(records.ToList(), test.Value);
}
}
}
}

internal void QuoteTourClasses(int testNumber)
{
//arrange
using (var testing = new TestingSetup())
{
foreach (var tourClass in _testing.QuoteTourClasses)
{
//act
foreach (var test in _testing.HotelUpdateLinesTourClasses(Constants.HotelUpdateLineTestData.FromTour, tourClass, testNumber))
{
var records = _da.GetHotelUpdatesFromTours(test.Key, SetupHotelUpdateLines.CancelDays);
//assert
testing.CheckHotelUpdateLines(records.ToList(), test.Value);
}
}
}
}

internal void Departments(int testNumber)
{
//arrange
using (var testing = new TestingSetup())
{
{
foreach (var department in _testing.Departments)
{
//act
foreach (var test in _testing.HotelUpdateLinesDepartments(Constants.HotelUpdateLineTestData.FromTour, department, testNumber))
{
var records = _da.GetHotelUpdatesFromTours(test.Key, SetupHotelUpdateLines.CancelDays);
//assert
testing.CheckHotelUpdateLines(records.ToList(), test.Value);
}
}
}
}
}

internal void Params(int testNumber)
{
//arrange
using (var testing = new TestingSetup())
{
foreach (var param in _testing.Params)
{
//act
foreach (var test in _testing.HotelUpdateLinesParams(Constants.HotelUpdateLineTestData.FromTour, param, testNumber))
{
var records = _da.GetHotelUpdatesFromTours(test.Key, SetupHotelUpdateLines.CancelDays);
//assert
testing.CheckHotelUpdateLines(records.ToList(), test.Value);
}
}
}
}

#endregion

[OneTimeSetUp]
public void SetupData()
{
ClearDownData();
_testing = new SetupHotelUpdateLines();
_ses = new SecuritySession(LocalHost, "ASCTest");
Expect(_ses, Is.Not.Null);
Expect(_ses.Environment, Is.Not.Null);
_myThread = new ThreadSession(_ses, "CreateEditAssignedRole");
_da = new DataLayer.HotelUpdates.HotelUpdatesDataAccessor(_myThread, false);
Expect(_da, Is.Not.Null);
}

[OneTimeTearDown]
public static void ClearDownData()
{
TestingSetup.ClearBS4Data();
}

#region Implementation of IDisposable

/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
/// </summary>
/// <filterpriority>2</filterpriority>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}

#endregion

#region ProtectedMethods

/// <summary>
/// Free the resources of the class and alter which fields get disposed.
/// </summary>
/// <param name="cleanManaged">Whether to dispose managed objects as well as native ones.</param>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2213:DisposableFieldsShouldBeDisposed", MessageId = "_myThread")]
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2213:DisposableFieldsShouldBeDisposed", MessageId = "_testing")]
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2213:DisposableFieldsShouldBeDisposed", MessageId = "_ses")]
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2213:DisposableFieldsShouldBeDisposed", MessageId = "_da")]
protected virtual void Dispose(bool cleanManaged)
{
if (_disposed)
return;

if (cleanManaged)
{
_ses?.Dispose();
_da?.Dispose();
_testing?.Dispose();
_myThread?.Dispose();
}

_disposed = true;
}

#endregion

#region Private Methods

#endregion
}
}

Currently only one of the test files is throwing the error. If all of them had the same error I may be able to understand it.

The problem does not occur in 3.22.0.1 as the release notes for 3.23.0.10 indicate that this is an extra check introduced in this version.

I appreciate that this may not be the best way to run tests, but my main priority at this stage is to resolve the error.
Remco
#2 Posted : Wednesday, February 20, 2019 8:39:15 PM(UTC)
Rank: NCrunch Developer

Groups: Administrators
Joined: 4/16/2011(UTC)
Posts: 5,982

Thanks: 781 times
Was thanked: 1008 time(s) in 960 post(s)
Hi, thanks for sharing this issue.

The problem here is that when NCrunch asks NUnit for a list of tests, it's getting a different result even though the assembly is exactly the same.

It isn't possible for NCrunch to know specifically which tests are at fault here. It also isn't possible for NCrunch itself to narrow this down to a source file. The code you've pasted above cannot be responsible for this problem because all the tests in the file are statically generated. There must be something else in this particular project that is causing this.

Run a search on the project for TestCaseSource, ValueSource, DatapointSource. These are three mechanisms in NUnit that I know can generate tests dynamically. Any tests you have using these structures are suspect. If the problem is occurring consistently enough, you may be able to turn on NCrunch logging and cross-check the discovery XML returned by NUnit inside NCrunch's trace logs to try and find which differences are triggering the error. It's probably easier to solve the problem deductively by deactivating parts of your test suite using NUnit's Ignore attribute.

You may be tempted to roll back to an earlier version of NCrunch to avoid this error. I strongly advise you don't do this. The error was introduced to alert you to a very serious problem with your test suite. Earlier versions of NCrunch would end up matching the wrong tests with the wrong results (because the test lists are different) and really crazy things would happen. When this problem exists, no result from your test suite can be trusted.
2 users thanked Remco for this useful post.
Albatross on 2/21/2019(UTC), MendelMonteiro on 5/31/2019(UTC)
Albatross
#3 Posted : Thursday, February 21, 2019 3:52:05 PM(UTC)
Rank: Newbie

Groups: Registered
Joined: 2/20/2019(UTC)
Posts: 6
Location: United Kingdom

Thanks: 1 times
Thank you for the pointer on this.

I have removed some of the places where TestCaseSource is used in tests and this appears to have solved the problem.

I am not sure why the ones I changed have solved the problem, but am pleased that all appears to be running now.

Thank you.
ericschmidt
#4 Posted : Monday, February 24, 2020 7:19:08 PM(UTC)
Rank: Member

Groups: Registered
Joined: 12/30/2016(UTC)
Posts: 16
Location: United States of America

Was thanked: 1 time(s) in 1 post(s)
You listed TestCaseSource, ValueSource or DataPointSource, but could using regular TestCase also lead to this? I am currently writing tests for a parser object and started getting this error as well but I'm only using the TestCase attribute and there's no dynamic generation for the data going in, but it happens to be similar across multiple tests.
Remco
#5 Posted : Monday, February 24, 2020 10:56:25 PM(UTC)
Rank: NCrunch Developer

Groups: Administrators
Joined: 4/16/2011(UTC)
Posts: 5,982

Thanks: 781 times
Was thanked: 1008 time(s) in 960 post(s)
ericschmidt;14468 wrote:
You listed TestCaseSource, ValueSource or DataPointSource, but could using regular TestCase also lead to this? I am currently writing tests for a parser object and started getting this error as well but I'm only using the TestCase attribute and there's no dynamic generation for the data going in, but it happens to be similar across multiple tests.


I personally can't think of a way to make TestCases do this, but that doesn't mean it's impossible. Under the hood, these are very complex constructions.

The main issue is that NCrunch is receiving a different number of tests on repeated discovery runs over the same test assembly. Usually, this is caused by TestCaseSource. It might be possible to narrow down the point of inconsistency by removing or commenting out blocks of your test suite.
Albatross
#7 Posted : Tuesday, February 25, 2020 7:49:30 AM(UTC)
Rank: Newbie

Groups: Registered
Joined: 2/20/2019(UTC)
Posts: 6
Location: United Kingdom

Thanks: 1 times
As an update to my original post:

Even though I have removed all references to TestCaseSource I occasionally get this issue. When it happens I rename the tests that fail by adding 'X' to the end of the test name and then remove the 'X' when they have passed.

Doing this appears to resolve the issue until the next time that a full rebuild and run of all of the tests is triggered.

It is worth noting that the tests that fail with this message appear to be random - not the same tests each time. Also, it is not on every full build and run that this happens.
Remco
#8 Posted : Tuesday, February 25, 2020 8:37:08 AM(UTC)
Rank: NCrunch Developer

Groups: Administrators
Joined: 4/16/2011(UTC)
Posts: 5,982

Thanks: 781 times
Was thanked: 1008 time(s) in 960 post(s)
I recommend setting your Log Verbosity to detailed, then examining and comparing the trace logs that can be extracted from the discovery and test execution tasks inside the processing queue.

When NCrunch reports this issue, it does so by comparing test counts.

So it will discover all the tests in your assembly, and it will take note of the number of tests in the assembly when it does this.

Later, when it goes to execute a batch of tests in the assembly, it performs a discovery step to build the test suite inside the execution process. If the number of reported tests in the suite does not match the number of tests originally found during test discovery, this error will be shown. So the only way you're going to get this error is if you have inconsistent results during test discovery. The trace logs are the primary source of truth to this, as they include the actual XML that NUnit3 reports back to the test runner. If you can compare the XML reported by NUnit during discovery with the XML reported in the test execution process, you should hopefully be able to work out what the difference is between these sets of data and find the inconsistency.

I wanted to implement a diagnostic feature for NCrunch to do the above automatically, but it proved to be a ton of work to implement without introducing some very ugly performance issues.
bartj
#9 Posted : Tuesday, March 17, 2020 9:32:27 PM(UTC)
Rank: Member

Groups: Registered
Joined: 12/4/2013(UTC)
Posts: 19
Location: New Zealand

Thanks: 2 times
Was thanked: 1 time(s) in 1 post(s)
Are you able to give us some pointers on where we can find the information we need in the logs? I have found the output XML from NUnit, which states the test count and discovered tests, but can't seem to find anything useful in the logs that I can compare it to to find where the discrepancy is coming from.

Note that we typically hit this problem when using the NCrunch Reset feature.
Remco
#10 Posted : Tuesday, March 17, 2020 11:51:37 PM(UTC)
Rank: NCrunch Developer

Groups: Administrators
Joined: 4/16/2011(UTC)
Posts: 5,982

Thanks: 781 times
Was thanked: 1008 time(s) in 960 post(s)
Here's a copy/paste of an extract from a log that contains NUnit's discovery data:

[12:49:18.8928-LocalAnalysisTask-43] Calling into task runner to analyse target assembly: F:\Workspaces\992\1\NUnitTestProject3\bin\Debug\netcoreapp2.2\NUnitTestProject3.dll
Process 10352: 12:49:18.9112 [4] - Loading tests into NUnit3

Process 10352: 12:49:18.9636 [4] - Exploring tests using NUnit3

Process 10352: 12:49:18.9722 [4] - <test-suite type="Assembly" id="1002" name="NUnitTestProject3.dll" fullname="NUnitTestProject3.dll" runstate="Runnable" testcasecount="1"><properties><property name="_PID" value="10352" /><property name="_APPDOMAIN" value="nCrunch.TaskRunner.DotNetCore.20.x64" /></properties><test-suite type="TestSuite" id="1003" name="Tests" fullname="Tests" runstate="Runnable" testcasecount="1"><test-suite type="TestFixture" id="1000" name="Tests" fullname="Tests.Tests" classname="Tests.Tests" runstate="Runnable" testcasecount="1"><test-case id="1001" name="Test1" fullname="Tests.Tests.Test1" methodname="Test1" classname="Tests.Tests" runstate="Runnable" seed="1819981412" /></test-suite></test-suite></test-suite>

Process 10352: 12:49:18.9796 [4] - Discovered test: Tests.Tests.Test1

Process 10352:


The <test-suite> tag is the root of an XML structure containing all data obtained from NUnit during a discovery run. If you use a comparison tool like KDiff3 to check this against the output of another discovery run, it can help to highlight the difference causing your problem.
bobby_bt
#11 Posted : Friday, March 27, 2020 8:45:57 PM(UTC)
Rank: Newbie

Groups: Registered
Joined: 3/27/2020(UTC)
Posts: 2
Location: United States of America

Since we upgraded our NCrunch licence we ran into this. I just wanted to mention that we had tests with TestCaseSourceAttribute applied, but the classes with the TestCaseData were defined as getters of static IEnumerable properties with a simple list of yield return statements. So it shouldn't ever be an unstable number of TestCaseDatas to iterate through. What we did have was the TestFixture as a nested class defined in a partial class (the nested TestFixtures in the partial were looking at the same SUT but their code had no relation to each other, it was just for the sake of the naming hierarchy).

So I just took the partial class defs wrapping the nested test cases out and that error went away. If you have nested TestFixtures in a partial using TestCaseSources... try breaking them out.

It looked something like this (this code however doesn't seem to trigger it):

using NUnit.Framework;
using System.Collections;

namespace SomeNamespace
{
public partial class ContainerClass
{
[TestFixture]
[Explicit]
public class Nested1
{
[Test]
public void SomeTest()
{
//...
}
}
}
}

namespace SomeNamespace
{
public partial class ContainerClass
{
[TestFixture]
[Explicit]
public class Nested2
{
[Test, TestCaseSource(typeof(ParameterSetTestData), "Set1TestCases")]
public void ParameterizedSet1Test(string param1, string param2, string param3, string param4)
{
//...
}

[Test, TestCaseSource(typeof(ParameterSetTestData), "Set2TestCases")]
public void ParameterizedSet2Test(string param1, string param2, string param3, string param4, string param5)
{
//...
}
}
}

public class ParameterSetTestData
{
public static IEnumerable Set1TestCases
{
get
{
yield return new TestCaseData("value1", "value2", "value3", "value4");
yield return new TestCaseData("value5", "value6", "value7", "value8");
}
}

public static IEnumerable Set2TestCases
{
get
{
yield return new TestCaseData("value1", "value2", "value3", "value4", "value5");
yield return new TestCaseData("value6", "value7", "value8", "value9", "value0");
}
}
}
}
Remco
#12 Posted : Friday, March 27, 2020 11:33:27 PM(UTC)
Rank: NCrunch Developer

Groups: Administrators
Joined: 4/16/2011(UTC)
Posts: 5,982

Thanks: 781 times
Was thanked: 1008 time(s) in 960 post(s)
Thanks for sharing this. Out of interest, were the declarations for your parameterised data being placed in a different assembly to the one declaring the tests themselves? At first look, this is about the only way I could see such a situation generating an inconsistent number of tests. The discovery system assumes that a test assembly will always return the same number of tests even if one of its dependencies changes, unless the 'Copy referenced assemblies to workspace' setting is on.
bobby_bt
#13 Posted : Monday, March 30, 2020 11:27:53 AM(UTC)
Rank: Newbie

Groups: Registered
Joined: 3/27/2020(UTC)
Posts: 2
Location: United States of America

Remco;14562 wrote:
Thanks for sharing this. Out of interest, were the declarations for your parameterised data being placed in a different assembly to the one declaring the tests themselves? At first look, this is about the only way I could see such a situation generating an inconsistent number of tests. The discovery system assumes that a test assembly will always return the same number of tests even if one of its dependencies changes, unless the 'Copy referenced assemblies to workspace' setting is on.

No, same assembly. And the shared settings for 'Copy referenced assemblies to workspace' is off. Unfortunately the code we were experiencing this under is proprietary; I'd love to duplicate the issue in a dummy project. I will say since the change the test suite not only runs entirely, it also is much faster.
Users browsing this topic
Guest
Forum Jump  
You cannot post new topics in this forum.
You cannot reply to topics in this forum.
You cannot delete your posts in this forum.
You cannot edit your posts in this forum.
You cannot create polls in this forum.
You cannot vote in polls in this forum.

YAF | YAF © 2003-2011, Yet Another Forum.NET
This page was generated in 0.194 seconds.