Skip to main content
Wolfix.Server integrates with a Toxicity AI API to automatically detect and prevent inappropriate content in product titles, descriptions, and reviews.

Overview

The Toxicity API provides:
  • Text analysis - Detect toxic, offensive, or inappropriate content
  • Real-time moderation - Check content before saving to database
  • Product safety - Ensure marketplace quality standards
  • Review filtering - Prevent abusive or harmful reviews

Configuration

Docker Container

The Toxicity API runs as a Docker container orchestrated by .NET Aspire:
Wolfix.AppHost/AppHost.cs
var toxicApi = builder.AddContainer("toxic-api", "iluhahr/toxic-ai-api:latest")
    .WithHttpEndpoint(targetPort: 8000);

builder.AddProject<Projects.Wolfix_API>("api")
    .WithEnvironment("TOXIC_API_BASE_URL", toxicApi.GetEndpoint("http"))
    .WaitFor(toxicApi);

Environment Variable

The API URL is configured via environment variable:
.env
TOXIC_API_BASE_URL=http://localhost:8000

Module Registration

The Catalog module registers the toxicity service:
Wolfix.API/Extensions/WebApplicationBuilderExtension.cs
private static WebApplicationBuilder AddCatalogModule(
    this WebApplicationBuilder builder, 
    string connectionString)
{
    string toxicApiBaseUrl = builder.Configuration.GetOrThrow("TOXIC_API_BASE_URL");
    
    builder.Services.AddCatalogModule(connectionString, toxicApiBaseUrl);

    return builder;
}
Catalog.Endpoints/Extensions/ServiceCollectionExtensions.cs
public static IServiceCollection AddCatalogModule(
    this IServiceCollection services,
    string connectionString,
    string toxicApiBaseUrl)
{
    // Register HTTP client for toxicity service
    services.AddHttpClient<IToxicityService, ToxicityService>(client =>
    {
        client.BaseAddress = new Uri(toxicApiBaseUrl);
        client.Timeout = TimeSpan.FromSeconds(30);
    });
    
    // Other registrations...
    
    return services;
}

Implementation

Toxicity Service

The service sends text to the API for analysis:
Catalog.Infrastructure/Services/ToxicityService.cs
using System.Net.Http.Json;
using Catalog.Application.Contracts;
using Shared.Domain.Models;

namespace Catalog.Infrastructure.Services;

internal sealed class ToxicityService : IToxicityService
{
    private readonly HttpClient _httpClient;
    
    public ToxicityService(HttpClient httpClient)
    {
        _httpClient = httpClient;
    }
    
    public async Task<Result<bool>> IsToxic(string text, CancellationToken ct)
    {
        try
        {
            var payload = new { text };

            var response = await _httpClient.PostAsJsonAsync("check", payload, ct);

            response.EnsureSuccessStatusCode();

            var result = await response.Content.ReadFromJsonAsync<bool>(ct);
            
            return Result<bool>.Success(result);
        }
        catch (HttpRequestException ex)
        {
            return Result<bool>.Failure(
                $"Toxicity API error: {ex.Message}",
                HttpStatusCode.ServiceUnavailable
            );
        }
        catch (Exception ex)
        {
            return Result<bool>.Failure(
                $"Unexpected error: {ex.Message}",
                HttpStatusCode.InternalServerError
            );
        }
    }
}

Service Interface

Catalog.Application/Contracts/IToxicityService.cs
public interface IToxicityService
{
    Task<Result<bool>> IsToxic(string text, CancellationToken ct);
}

Usage

Product Creation

Check product title and description before creation:
Catalog.Application/Services/ProductService.cs
public async Task<Result<ProductDto>> CreateProductAsync(
    CreateProductDto dto, 
    Guid sellerId, 
    CancellationToken ct)
{
    // 1. Validate category exists
    var categoryExists = await _categoryRepository.ExistsAsync(dto.CategoryId, ct);
    if (!categoryExists)
        return Result<ProductDto>.Failure(
            "Category not found", 
            HttpStatusCode.NotFound
        );
    
    // 2. Check title for toxic content
    Result<bool> titleToxicityResult = await _toxicityService.IsToxic(dto.Title, ct);
    if (titleToxicityResult.IsFailure)
        return Result<ProductDto>.Failure(titleToxicityResult);
    
    if (titleToxicityResult.Value)
        return Result<ProductDto>.Failure(
            "Product title contains inappropriate content",
            HttpStatusCode.BadRequest
        );
    
    // 3. Check description for toxic content
    Result<bool> descriptionToxicityResult = await _toxicityService.IsToxic(
        dto.Description, 
        ct
    );
    if (descriptionToxicityResult.IsFailure)
        return Result<ProductDto>.Failure(descriptionToxicityResult);
    
    if (descriptionToxicityResult.Value)
        return Result<ProductDto>.Failure(
            "Product description contains inappropriate content",
            HttpStatusCode.BadRequest
        );
    
    // 4. Create domain entity
    Result<Product> createResult = Product.Create(
        dto.Title,
        dto.Description,
        dto.Price,
        ProductStatus.Draft,
        dto.CategoryId,
        sellerId
    );
    
    if (createResult.IsFailure)
        return Result<ProductDto>.Failure(createResult);
    
    Product product = createResult.Value!;
    
    // 5. Save to repository
    await _productRepository.AddAsync(product, ct);
    await _productRepository.SaveChangesAsync(ct);
    
    // 6. Map to DTO and return
    ProductDto productDto = product.ToDto();
    
    return Result<ProductDto>.Success(productDto, HttpStatusCode.Created);
}

Review Creation

Check review text before allowing it:
Catalog.Application/Services/ProductService.cs
public async Task<VoidResult> AddReviewAsync(
    Guid productId,
    AddReviewDto dto,
    Guid customerId,
    CancellationToken ct)
{
    // 1. Get product
    Product? product = await _productRepository.GetByIdAsync(productId, ct);
    if (product == null)
        return VoidResult.Failure("Product not found", HttpStatusCode.NotFound);
    
    // 2. Check review title for toxicity
    Result<bool> titleToxicityResult = await _toxicityService.IsToxic(dto.Title, ct);
    if (titleToxicityResult.IsSuccess && titleToxicityResult.Value)
        return VoidResult.Failure("Review title contains inappropriate content");
    
    // 3. Check review text for toxicity
    Result<bool> textToxicityResult = await _toxicityService.IsToxic(dto.Text, ct);
    if (textToxicityResult.IsSuccess && textToxicityResult.Value)
        return VoidResult.Failure("Review text contains inappropriate content");
    
    // 4. Add review to product
    VoidResult addReviewResult = product.AddReview(
        dto.Title,
        dto.Text,
        dto.Rating,
        customerId
    );
    
    if (addReviewResult.IsFailure)
        return addReviewResult;
    
    // 5. Save changes
    await _productRepository.UpdateAsync(product, ct);
    await _productRepository.SaveChangesAsync(ct);
    
    return VoidResult.Success();
}

API Contract

Request

POST http://localhost:8000/check
Content-Type: application/json

{
  "text": "This is the text to check"
}

Response

true  // true = toxic, false = not toxic

Error Handling

The service handles various failure scenarios:
public async Task<Result<bool>> IsToxic(string text, CancellationToken ct)
{
    try
    {
        var payload = new { text };
        var response = await _httpClient.PostAsJsonAsync("check", payload, ct);
        response.EnsureSuccessStatusCode();
        var result = await response.Content.ReadFromJsonAsync<bool>(ct);
        return Result<bool>.Success(result);
    }
    catch (HttpRequestException ex) when (ex.StatusCode == HttpStatusCode.ServiceUnavailable)
    {
        // API is down - log and allow content through
        _logger.LogWarning(ex, "Toxicity API unavailable");
        return Result<bool>.Success(false); // Fail open
    }
    catch (TaskCanceledException)
    {
        // Request timed out
        _logger.LogWarning("Toxicity API request timed out");
        return Result<bool>.Success(false); // Fail open
    }
    catch (Exception ex)
    {
        // Unexpected error
        _logger.LogError(ex, "Toxicity API error");
        return Result<bool>.Failure(
            $"Content moderation failed: {ex.Message}",
            HttpStatusCode.InternalServerError
        );
    }
}
The service “fails open” (allows content through) when the API is unavailable to prevent blocking legitimate users.

Testing

Local Testing

The Toxicity API runs automatically when you start Aspire:
cd Wolfix.AppHost
dotnet run
Test with curl:
curl -X POST http://localhost:8000/check \
  -H "Content-Type: application/json" \
  -d '{"text": "This is a test"}'

Mock Service (Unit Tests)

Catalog.Tests/Application/ProductServiceTests.cs
[Fact]
public async Task CreateProduct_WithToxicTitle_ShouldFail()
{
    // Arrange
    var mockRepository = new Mock<IProductRepository>();
    var mockCategoryRepository = new Mock<ICategoryRepository>();
    var mockToxicityService = new Mock<IToxicityService>();
    
    mockCategoryRepository.Setup(x => x.ExistsAsync(It.IsAny<Guid>(), It.IsAny<CancellationToken>()))
        .ReturnsAsync(true);
    
    mockToxicityService.Setup(x => x.IsToxic(It.IsAny<string>(), It.IsAny<CancellationToken>()))
        .ReturnsAsync(Result<bool>.Success(true)); // Toxic content detected
    
    var service = new ProductService(
        mockRepository.Object,
        mockCategoryRepository.Object,
        mockToxicityService.Object
    );
    
    var dto = new CreateProductDto(
        "Offensive title",
        "Description",
        99.99m,
        Guid.NewGuid()
    );
    
    // Act
    var result = await service.CreateProductAsync(dto, Guid.NewGuid(), CancellationToken.None);
    
    // Assert
    Assert.True(result.IsFailure);
    Assert.Contains("inappropriate content", result.ErrorMessage);
}

Batch Checking

Check multiple texts efficiently:
public async Task<Result<Dictionary<string, bool>>> CheckMultipleAsync(
    List<string> texts, 
    CancellationToken ct)
{
    var tasks = texts.Select(text => IsToxic(text, ct));
    var results = await Task.WhenAll(tasks);
    
    var dictionary = new Dictionary<string, bool>();
    for (int i = 0; i < texts.Count; i++)
    {
        if (results[i].IsSuccess)
        {
            dictionary[texts[i]] = results[i].Value;
        }
    }
    
    return Result<Dictionary<string, bool>>.Success(dictionary);
}

Caching Results

Cache toxicity checks to reduce API calls:
public class CachedToxicityService : IToxicityService
{
    private readonly IToxicityService _inner;
    private readonly IMemoryCache _cache;
    
    public CachedToxicityService(IToxicityService inner, IMemoryCache cache)
    {
        _inner = inner;
        _cache = cache;
    }
    
    public async Task<Result<bool>> IsToxic(string text, CancellationToken ct)
    {
        string cacheKey = $"toxicity:{text.GetHashCode()}";
        
        if (_cache.TryGetValue(cacheKey, out bool cachedResult))
        {
            return Result<bool>.Success(cachedResult);
        }
        
        Result<bool> result = await _inner.IsToxic(text, ct);
        
        if (result.IsSuccess)
        {
            _cache.Set(cacheKey, result.Value, TimeSpan.FromHours(24));
        }
        
        return result;
    }
}

Production Deployment

Self-Hosted API

Deploy the toxicity API separately:
# Pull image
docker pull iluhahr/toxic-ai-api:latest

# Run container
docker run -d \
  --name toxic-api \
  -p 8000:8000 \
  --restart unless-stopped \
  iluhahr/toxic-ai-api:latest
Update environment variable:
TOXIC_API_BASE_URL=http://toxic-api:8000

Alternative: OpenAI Moderation API

Use OpenAI’s content moderation instead:
public class OpenAIModerationService : IToxicityService
{
    private readonly HttpClient _httpClient;
    private readonly string _apiKey;
    
    public async Task<Result<bool>> IsToxic(string text, CancellationToken ct)
    {
        var request = new
        {
            input = text
        };
        
        var message = new HttpRequestMessage(HttpMethod.Post, "https://api.openai.com/v1/moderations")
        {
            Content = JsonContent.Create(request),
            Headers =
            {
                { "Authorization", $"Bearer {_apiKey}" }
            }
        };
        
        var response = await _httpClient.SendAsync(message, ct);
        var result = await response.Content.ReadFromJsonAsync<ModerationResponse>(ct);
        
        return Result<bool>.Success(result.Results[0].Flagged);
    }
}

Best Practices

1

Fail Open on Errors

Don’t block legitimate users when the API is down:
catch (HttpRequestException)
{
    _logger.LogWarning("Toxicity API unavailable");
    return Result<bool>.Success(false); // Allow content
}
2

Set Reasonable Timeouts

services.AddHttpClient<IToxicityService, ToxicityService>(client =>
{
    client.Timeout = TimeSpan.FromSeconds(5); // Fast timeout
});
3

Log Toxic Content

if (isToxic)
{
    _logger.LogWarning(
        "Toxic content detected from user {UserId}: {Text}",
        userId,
        text.Substring(0, 50)
    );
}
4

Provide Clear Feedback

Give users helpful error messages:
return Result<ProductDto>.Failure(
    "Your product title may contain inappropriate language. Please revise and try again."
);

Troubleshooting

API Not Responding

Issue: “Toxicity API unavailable” Solution:
  1. Check Docker container is running: docker ps | grep toxic-api
  2. Check logs: docker logs toxic-api
  3. Verify URL: curl http://localhost:8000/check

False Positives

Issue: Legitimate content flagged as toxic. Solution: Implement manual review or allowlist:
private readonly HashSet<string> _allowedPhrases = new()
{
    "kill two birds",
    "shot in the dark"
};

if (_allowedPhrases.Any(phrase => text.Contains(phrase, StringComparison.OrdinalIgnoreCase)))
{
    return Result<bool>.Success(false);
}

Slow Performance

Issue: Toxicity checks slow down requests. Solution: Implement caching or async background checking.

Next Steps

Development Guide

Start building features

Stripe Payments

Payment processing

Build docs developers (and LLMs) love