Skip to main content

Overview

Ryujinx uses comprehensive testing to ensure emulation accuracy and prevent regressions. Tests are written using NUnit and run automatically in CI.
All test projects are in the src/ directory with the Ryujinx.Tests* naming pattern.

Test Project Structure

Ryujinx has several test projects:
ProjectPurposeLocation
Ryujinx.TestsCore CPU/GPU/HLE testssrc/Ryujinx.Tests/
Ryujinx.Tests.MemoryMemory management testssrc/Ryujinx.Tests.Memory/
Ryujinx.Tests.UnicornCPU emulation validationsrc/Ryujinx.Tests.Unicorn/

Running Tests

Run All Tests

# Run all tests
dotnet test

# Run with verbose output
dotnet test -v detailed

# Run in Release configuration
dotnet test -c Release

Run Specific Tests

# Run tests matching a pattern
dotnet test --filter "FullyQualifiedName~ShaderCache"

# Run specific test method
dotnet test --filter "FullyQualifiedName=Ryujinx.Tests.Cpu.CpuTestAlu.Add_S_64bit"

Test Structure

From src/Ryujinx.Tests/Cpu/CpuTest.cs:12-14:
using NUnit.Framework;

namespace Ryujinx.Tests.Cpu
{
    [TestFixture]
    public class CpuTest
    {
        private ExecutionContext _context;
        private UnicornAArch64 _unicornEmu;
        
        [SetUp]
        public void Setup()
        {
            // Initialize test environment
            _context = CpuContext.CreateExecutionContext();
            _unicornEmu = new UnicornAArch64();
        }
        
        [TearDown]
        public void Teardown()
        {
            // Clean up resources
            _context.Dispose();
            _unicornEmu.Dispose();
        }
        
        [Test]
        public void Add_S_64bit()
        {
            // Test implementation
        }
    }
}

Key Components

1

TestFixture attribute

Marks a class as containing tests
[TestFixture]
public class MyTests { }
2

SetUp method

Runs before each test to initialize state
[SetUp]
public void Setup() { }
3

TearDown method

Runs after each test to clean up
[TearDown]
public void Teardown() { }
4

Test methods

Individual test cases
[Test]
public void MyTest() { }

Writing CPU Tests

CPU tests validate ARM instruction emulation against Unicorn Engine.

Example CPU Test

From src/Ryujinx.Tests/Cpu/CpuTestAlu.cs:
[Test]
public void Add_S_64bit([ValueSource(nameof(TestValues))] ulong x0,
                        [ValueSource(nameof(TestValues))] ulong x1)
{
    // Encode ARM instruction: ADD X2, X0, X1
    uint opcode = 0x8B010002; 
    
    // Execute and compare with Unicorn
    SingleOpcode(opcode, x0: x0, x1: x1);
    
    CompareAgainstUnicorn();
}

CPU Test Helpers

From src/Ryujinx.Tests/Cpu/CpuTest.cs:186-224:
protected ExecutionContext SingleOpcode(
    uint opcode,
    ulong x0 = 0, ulong x1 = 0,
    V128 v0 = default,
    bool runUnicorn = true)
{
    Opcode(opcode);
    Opcode(0xD65F03C0); // RET
    SetContext(x0, x1, v0: v0);
    ExecuteOpcodes(runUnicorn);
    return GetContext();
}
protected void CompareAgainstUnicorn(
    Fpsr fpsrMask = Fpsr.None,
    FpSkips fpSkips = FpSkips.None)
{
    // Compare all registers
    Assert.That(_context.GetX(0), Is.EqualTo(_unicornEmu.X[0]));
    Assert.That(_context.GetX(1), Is.EqualTo(_unicornEmu.X[1]));
    // ... all 32 registers
}
protected void SetWorkingMemory(ulong offset, byte[] data)
{
    _memory.Write(DataBaseAddress + offset, data);
    _unicornEmu.MemoryWrite(DataBaseAddress + offset, data);
    _usingMemory = true;
}

Writing Memory Tests

Memory tests validate the memory management system.

Example Memory Test

From src/Ryujinx.Tests.Memory/TrackingTests.cs:
[Test]
public void ReadWriteTracking()
{
    const ulong MemorySize = 0x1000;
    const ulong TestValue = 0x12345678;
    
    MemoryBlock memory = new MemoryBlock(MemorySize);
    
    // Setup tracking
    var tracking = new RegionHandle(memory, 0, MemorySize);
    
    // Write and verify tracking
    memory.Write(0, TestValue);
    Assert.IsTrue(tracking.Dirty);
    
    // Reprotect and verify
    tracking.Reprotect();
    Assert.IsFalse(tracking.Dirty);
    
    memory.Dispose();
}

Writing Audio/Renderer Tests

From src/Ryujinx.Tests/Audio/Renderer/:
using NUnit.Framework;

[TestFixture]
public class VoiceInfoTests
{
    [Test]
    public void TestVoiceInfoInitialization()
    {
        var voiceInfo = new VoiceInfo();
        
        Assert.AreEqual(0, voiceInfo.Volume);
        Assert.AreEqual(VoicePlayState.Stopped, voiceInfo.PlayState);
    }
}

Test Data

Value Sources

Generate test data using ValueSource:
private static ulong[] TestValues => new ulong[]
{
    0x0000000000000000,
    0x0000000000000001,
    0x7FFFFFFFFFFFFFFF,
    0xFFFFFFFFFFFFFFFF,
};

[Test]
public void MyTest([ValueSource(nameof(TestValues))] ulong value)
{
    // Test runs once for each value
}

Random Test Data

From src/Ryujinx.Tests/Cpu/CpuTest.cs:531-541:
protected static ushort GenNormalH()
{
    uint rnd;
    do
        rnd = TestContext.CurrentContext.Random.NextUShort();
    while ((rnd & 0x7C00u) == 0u || (~rnd & 0x7C00u) == 0u);
    return (ushort)rnd;
}

protected static uint GenNormalS()
{
    uint rnd;
    do
        rnd = TestContext.CurrentContext.Random.NextUInt();
    while ((rnd & 0x7F800000u) == 0u || (~rnd & 0x7F800000u) == 0u);
    return rnd;
}

Assertions

NUnit Assertions

Assert.That(actual, Is.EqualTo(expected));
Assert.AreEqual(expected, actual);
Assert.AreNotEqual(unexpected, actual);

Floating-Point Comparisons

// ULP (Units in Last Place) tolerance
Assert.That(actual, Is.EqualTo(expected).Within(1).Ulps);

// Percent tolerance
Assert.That(actual, Is.EqualTo(expected).Within(0.01).Percent);

CI Testing

From .github/workflows/build.yml:56-62:
- name: Test
  uses: TSRBerry/unstable-commands@v1
  with:
    commands: dotnet test --no-build -c "${{ matrix.configuration }}"
    timeout-minutes: 10
    retry-codes: 139
  if: matrix.platform.name != 'linux-arm64'

CI Test Characteristics

Automatic Execution

Tests run on every PR and commit

Multi-Platform

Tests run on Windows, Linux, and macOS

Timeout Protection

10-minute timeout prevents hanging tests

Retry on Crash

Retry on code 139 (segfault) for flaky tests

Test Best Practices

DO

1

Test one thing per test

Each test should verify a single behavior
[Test]
public void Add_WithPositiveNumbers_ReturnsSum() { }

[Test]
public void Add_WithNegativeNumbers_ReturnsSum() { }
2

Use descriptive names

Test names should describe what is being tested
[Test]
public void ShaderCache_GetProgram_WithInvalidId_ReturnsNull() { }
3

Arrange-Act-Assert pattern

[Test]
public void MyTest()
{
    // Arrange
    var sut = new SystemUnderTest();
    
    // Act
    var result = sut.DoSomething();
    
    // Assert
    Assert.That(result, Is.EqualTo(expected));
}
4

Clean up resources

Use TearDown or using statements
[TearDown]
public void Teardown()
{
    _context?.Dispose();
    _memory?.Dispose();
}

DON’T

  • Don’t test implementation details, test behavior
  • Don’t make tests depend on each other
  • Don’t use random data without a seed (makes failures unreproducible)
  • Don’t ignore failing tests - fix them or remove them

Debugging Tests

Debug a Single Test

Right-click test method → Debug Test(s)

Test Output

[Test]
public void MyTest()
{
    // Output appears in test results
    TestContext.WriteLine("Debug information");
    TestContext.Out.WriteLine($"Value: {someValue}");
}

Code Coverage

Generate Coverage Report

# Install coverlet
dotnet tool install -g coverlet.console

# Run tests with coverage
dotnet test /p:CollectCoverage=true /p:CoverletOutputFormat=opencover

# Generate HTML report
reportgenerator -reports:coverage.opencover.xml -targetdir:coverage-report

Coverage Tools

Performance Testing

For micro-benchmarks, see the Performance guide.

Simple Performance Test

[Test]
public void PerformanceTest()
{
    var sw = Stopwatch.StartNew();
    
    // Operation to test
    for (int i = 0; i < 1000000; i++)
    {
        DoOperation();
    }
    
    sw.Stop();
    
    // Assert performance requirement
    Assert.That(sw.ElapsedMilliseconds, Is.LessThan(1000),
        "Operation took too long");
}

Common Test Patterns

Parameterized Tests

[TestCase(0, 0, 0)]
[TestCase(1, 2, 3)]
[TestCase(-1, 1, 0)]
public void Add_ReturnsSum(int a, int b, int expected)
{
    Assert.That(a + b, Is.EqualTo(expected));
}

Combinatorial Tests

[Test]
public void TestAllCombinations(
    [Values(1, 2, 3)] int x,
    [Values("a", "b")] string y)
{
    // Runs 6 times (3 * 2 combinations)
}

Sequential Tests

[Test, Sequential]
public void TestPairs(
    [Values(1, 2, 3)] int x,
    [Values(10, 20, 30)] int y)
{
    // Runs 3 times: (1,10), (2,20), (3,30)
}

Test Configuration

From src/Ryujinx.Tests/Ryujinx.Tests.csproj:21-24:
<ItemGroup>
  <PackageReference Include="Microsoft.NET.Test.Sdk" />
  <PackageReference Include="NUnit" />
  <PackageReference Include="NUnit3TestAdapter" />
</ItemGroup>

Skipping Tests

// Skip always
[Test, Ignore("Not yet implemented")]
public void FutureTest() { }

// Skip conditionally
[Test]
public void PlatformSpecificTest()
{
    if (!OperatingSystem.IsWindows())
    {
        Assert.Ignore("Windows only test");
    }
}

Next Steps

Debugging

Debug failing tests

Performance

Benchmark and optimize

PR Guide

Submit your tests

Build docs developers (and LLMs) love