480 lines
13 KiB
C#
480 lines
13 KiB
C#
using System;
|
|
using System.Collections.Generic;
|
|
using System.Linq;
|
|
using System.Threading.Tasks;
|
|
using BenchmarkDotNet.Attributes;
|
|
using BenchmarkDotNet.Columns;
|
|
using BenchmarkDotNet.Configs;
|
|
using BenchmarkDotNet.Jobs;
|
|
using BenchmarkDotNet.Reports;
|
|
using BenchmarkDotNet.Running;
|
|
using SqrtSpace.SpaceTime.Collections;
|
|
using SqrtSpace.SpaceTime.Core;
|
|
using SqrtSpace.SpaceTime.Linq;
|
|
|
|
namespace SqrtSpace.SpaceTime.Benchmarks;
|
|
|
|
[MemoryDiagnoser]
|
|
[SimpleJob(RuntimeMoniker.Net60)]
|
|
[Config(typeof(Config))]
|
|
public class SortingBenchmarks
|
|
{
|
|
private List<TestItem> _data = null!;
|
|
private IEnumerable<TestItem> _enumerable = null!;
|
|
|
|
[Params(1_000, 10_000, 100_000, 1_000_000)]
|
|
public int Size { get; set; }
|
|
|
|
[GlobalSetup]
|
|
public void Setup()
|
|
{
|
|
var random = new Random(42);
|
|
_data = Enumerable.Range(1, Size)
|
|
.Select(i => new TestItem
|
|
{
|
|
Id = i,
|
|
Value = random.Next(1000000),
|
|
Name = $"Item{i}",
|
|
Date = DateTime.Today.AddDays(-random.Next(365))
|
|
})
|
|
.ToList();
|
|
|
|
_enumerable = _data;
|
|
}
|
|
|
|
[Benchmark(Baseline = true)]
|
|
public List<TestItem> StandardLinqSort()
|
|
{
|
|
return _enumerable.OrderBy(x => x.Value).ToList();
|
|
}
|
|
|
|
[Benchmark]
|
|
public List<TestItem> SpaceTimeExternalSort()
|
|
{
|
|
return _enumerable.OrderByExternal(x => x.Value).ToList();
|
|
}
|
|
|
|
[Benchmark]
|
|
public List<TestItem> SpaceTimeExternalSortWithCustomBuffer()
|
|
{
|
|
var bufferSize = SpaceTimeCalculator.CalculateSqrtInterval(Size);
|
|
return _enumerable.OrderByExternal(x => x.Value, bufferSize: bufferSize).ToList();
|
|
}
|
|
|
|
public class TestItem
|
|
{
|
|
public int Id { get; set; }
|
|
public int Value { get; set; }
|
|
public string Name { get; set; } = "";
|
|
public DateTime Date { get; set; }
|
|
}
|
|
|
|
private class Config : ManualConfig
|
|
{
|
|
public Config()
|
|
{
|
|
AddColumn(StatisticColumn.Mean);
|
|
AddColumn(StatisticColumn.StdDev);
|
|
AddColumn(BaselineRatioColumn.RatioMean);
|
|
AddColumn(new MemoryColumn());
|
|
SummaryStyle = SummaryStyle.Default.WithRatioStyle(RatioStyle.Trend);
|
|
}
|
|
}
|
|
|
|
private class MemoryColumn : IColumn
|
|
{
|
|
public string Id => nameof(MemoryColumn);
|
|
public string ColumnName => "Memory Reduction";
|
|
public string Legend => "Memory reduction compared to baseline";
|
|
public UnitType UnitType => UnitType.Dimensionless;
|
|
public bool AlwaysShow => true;
|
|
public ColumnCategory Category => ColumnCategory.Custom;
|
|
public int PriorityInCategory => 0;
|
|
public bool IsNumeric => true;
|
|
|
|
public bool IsAvailable(Summary summary) => true;
|
|
public bool IsDefault(Summary summary, BenchmarkCase benchmarkCase) => false;
|
|
|
|
public string GetValue(Summary summary, BenchmarkCase benchmarkCase)
|
|
{
|
|
// Calculate memory reduction percentage
|
|
return "N/A";
|
|
}
|
|
|
|
public string GetValue(Summary summary, BenchmarkCase benchmarkCase, SummaryStyle style)
|
|
{
|
|
return GetValue(summary, benchmarkCase);
|
|
}
|
|
}
|
|
}
|
|
|
|
[MemoryDiagnoser]
|
|
[SimpleJob(RuntimeMoniker.Net60)]
|
|
public class GroupingBenchmarks
|
|
{
|
|
private List<Transaction> _transactions = null!;
|
|
|
|
[Params(10_000, 100_000, 1_000_000)]
|
|
public int Size { get; set; }
|
|
|
|
[GlobalSetup]
|
|
public void Setup()
|
|
{
|
|
var random = new Random(42);
|
|
var categories = new[] { "Food", "Electronics", "Clothing", "Books", "Home", "Sports", "Toys", "Health" };
|
|
|
|
_transactions = Enumerable.Range(1, Size)
|
|
.Select(i => new Transaction
|
|
{
|
|
Id = i,
|
|
Category = categories[random.Next(categories.Length)],
|
|
Amount = (decimal)(random.NextDouble() * 1000),
|
|
Date = DateTime.Today.AddDays(-random.Next(365))
|
|
})
|
|
.ToList();
|
|
}
|
|
|
|
[Benchmark(Baseline = true)]
|
|
public List<CategorySummary> StandardLinqGroupBy()
|
|
{
|
|
return _transactions
|
|
.GroupBy(t => t.Category)
|
|
.Select(g => new CategorySummary
|
|
{
|
|
Category = g.Key,
|
|
Count = g.Count(),
|
|
TotalAmount = g.Sum(t => t.Amount),
|
|
AverageAmount = g.Average(t => t.Amount)
|
|
})
|
|
.ToList();
|
|
}
|
|
|
|
[Benchmark]
|
|
public List<CategorySummary> SpaceTimeExternalGroupBy()
|
|
{
|
|
return _transactions
|
|
.GroupByExternal(t => t.Category)
|
|
.Select(g => new CategorySummary
|
|
{
|
|
Category = g.Key,
|
|
Count = g.Count(),
|
|
TotalAmount = g.Sum(t => t.Amount),
|
|
AverageAmount = g.Average(t => t.Amount)
|
|
})
|
|
.ToList();
|
|
}
|
|
|
|
public class Transaction
|
|
{
|
|
public int Id { get; set; }
|
|
public string Category { get; set; } = "";
|
|
public decimal Amount { get; set; }
|
|
public DateTime Date { get; set; }
|
|
}
|
|
|
|
public class CategorySummary
|
|
{
|
|
public string Category { get; set; } = "";
|
|
public int Count { get; set; }
|
|
public decimal TotalAmount { get; set; }
|
|
public decimal AverageAmount { get; set; }
|
|
}
|
|
}
|
|
|
|
[MemoryDiagnoser]
|
|
[SimpleJob(RuntimeMoniker.Net60)]
|
|
public class CollectionBenchmarks
|
|
{
|
|
private readonly Random _random = new(42);
|
|
|
|
[Params(100, 10_000, 100_000)]
|
|
public int Size { get; set; }
|
|
|
|
[Benchmark(Baseline = true)]
|
|
public Dictionary<int, string> StandardDictionary()
|
|
{
|
|
var dict = new Dictionary<int, string>();
|
|
for (int i = 0; i < Size; i++)
|
|
{
|
|
dict[i] = $"Value{i}";
|
|
}
|
|
|
|
// Perform some operations
|
|
for (int i = 0; i < 100; i++)
|
|
{
|
|
var key = _random.Next(Size);
|
|
_ = dict[key];
|
|
dict[key] = $"Updated{key}";
|
|
}
|
|
|
|
return dict;
|
|
}
|
|
|
|
[Benchmark]
|
|
public AdaptiveDictionary<int, string> SpaceTimeAdaptiveDictionary()
|
|
{
|
|
var dict = new AdaptiveDictionary<int, string>();
|
|
for (int i = 0; i < Size; i++)
|
|
{
|
|
dict[i] = $"Value{i}";
|
|
}
|
|
|
|
// Perform some operations
|
|
for (int i = 0; i < 100; i++)
|
|
{
|
|
var key = _random.Next(Size);
|
|
_ = dict[key];
|
|
dict[key] = $"Updated{key}";
|
|
}
|
|
|
|
return dict;
|
|
}
|
|
|
|
[Benchmark]
|
|
public List<string> StandardList()
|
|
{
|
|
var list = new List<string>();
|
|
for (int i = 0; i < Size; i++)
|
|
{
|
|
list.Add($"Item{i}");
|
|
}
|
|
|
|
// Perform some operations
|
|
for (int i = 0; i < 100; i++)
|
|
{
|
|
var index = _random.Next(list.Count);
|
|
list[index] = $"Updated{index}";
|
|
}
|
|
|
|
return list;
|
|
}
|
|
|
|
[Benchmark]
|
|
public AdaptiveList<string> SpaceTimeAdaptiveList()
|
|
{
|
|
var list = new AdaptiveList<string>();
|
|
for (int i = 0; i < Size; i++)
|
|
{
|
|
list.Add($"Item{i}");
|
|
}
|
|
|
|
// Perform some operations
|
|
for (int i = 0; i < 100; i++)
|
|
{
|
|
var index = _random.Next(list.Count);
|
|
list[index] = $"Updated{index}";
|
|
}
|
|
|
|
return list;
|
|
}
|
|
}
|
|
|
|
[MemoryDiagnoser]
|
|
[SimpleJob(RuntimeMoniker.Net60)]
|
|
public class CheckpointingBenchmarks
|
|
{
|
|
private CheckpointManager _checkpointManager = null!;
|
|
private string _checkpointDir = null!;
|
|
|
|
[Params(1_000, 10_000, 100_000)]
|
|
public int OperationCount { get; set; }
|
|
|
|
[GlobalSetup]
|
|
public void Setup()
|
|
{
|
|
_checkpointDir = Path.Combine(Path.GetTempPath(), "benchmark_checkpoints", Guid.NewGuid().ToString());
|
|
Directory.CreateDirectory(_checkpointDir);
|
|
_checkpointManager = new CheckpointManager(_checkpointDir, strategy: CheckpointStrategy.SqrtN);
|
|
}
|
|
|
|
[GlobalCleanup]
|
|
public void Cleanup()
|
|
{
|
|
if (Directory.Exists(_checkpointDir))
|
|
{
|
|
Directory.Delete(_checkpointDir, true);
|
|
}
|
|
}
|
|
|
|
[Benchmark(Baseline = true)]
|
|
public async Task ProcessWithoutCheckpointing()
|
|
{
|
|
var sum = 0L;
|
|
for (int i = 0; i < OperationCount; i++)
|
|
{
|
|
sum += await SimulateWork(i);
|
|
}
|
|
}
|
|
|
|
[Benchmark]
|
|
public async Task ProcessWithCheckpointing()
|
|
{
|
|
var sum = 0L;
|
|
var state = new ProcessingState();
|
|
|
|
// Try to restore from previous checkpoint
|
|
var previousState = await _checkpointManager.RestoreLatestCheckpointAsync<ProcessingState>();
|
|
if (previousState != null)
|
|
{
|
|
state = previousState;
|
|
sum = state.Sum;
|
|
}
|
|
|
|
for (int i = state.ProcessedCount; i < OperationCount; i++)
|
|
{
|
|
sum += await SimulateWork(i);
|
|
state.ProcessedCount = i;
|
|
state.Sum = sum;
|
|
|
|
if (_checkpointManager.ShouldCheckpoint())
|
|
{
|
|
await _checkpointManager.CreateCheckpointAsync(state);
|
|
}
|
|
}
|
|
}
|
|
|
|
private async Task<long> SimulateWork(int value)
|
|
{
|
|
// Simulate some CPU work
|
|
var result = 0L;
|
|
for (int i = 0; i < 100; i++)
|
|
{
|
|
result += value * i;
|
|
}
|
|
|
|
if (value % 1000 == 0)
|
|
await Task.Yield();
|
|
|
|
return result;
|
|
}
|
|
|
|
private class ProcessingState
|
|
{
|
|
public int ProcessedCount { get; set; }
|
|
public long Sum { get; set; }
|
|
}
|
|
}
|
|
|
|
[MemoryDiagnoser]
|
|
[SimpleJob(RuntimeMoniker.Net60)]
|
|
public class StreamingBenchmarks
|
|
{
|
|
private List<DataRecord> _data = null!;
|
|
|
|
[Params(10_000, 100_000)]
|
|
public int Size { get; set; }
|
|
|
|
[GlobalSetup]
|
|
public void Setup()
|
|
{
|
|
var random = new Random(42);
|
|
_data = Enumerable.Range(1, Size)
|
|
.Select(i => new DataRecord
|
|
{
|
|
Id = i,
|
|
Name = $"Record{i}",
|
|
Value = random.NextDouble() * 1000,
|
|
Timestamp = DateTime.UtcNow.AddMinutes(-random.Next(10000)),
|
|
Data = new byte[random.Next(100, 1000)]
|
|
})
|
|
.ToList();
|
|
|
|
random.NextBytes(_data.Last().Data);
|
|
}
|
|
|
|
[Benchmark(Baseline = true)]
|
|
public async Task<List<DataRecord>> StandardProcessing()
|
|
{
|
|
var results = new List<DataRecord>();
|
|
|
|
foreach (var record in _data)
|
|
{
|
|
var processed = await ProcessRecord(record);
|
|
results.Add(processed);
|
|
}
|
|
|
|
return results;
|
|
}
|
|
|
|
[Benchmark]
|
|
public async Task<List<DataRecord>> BatchedProcessing()
|
|
{
|
|
var results = new List<DataRecord>();
|
|
|
|
foreach (var batch in _data.BatchBySqrtN())
|
|
{
|
|
foreach (var record in batch)
|
|
{
|
|
var processed = await ProcessRecord(record);
|
|
results.Add(processed);
|
|
}
|
|
}
|
|
|
|
return results;
|
|
}
|
|
|
|
[Benchmark]
|
|
public async Task<List<DataRecord>> StreamedProcessing()
|
|
{
|
|
var results = new List<DataRecord>();
|
|
|
|
await foreach (var record in StreamRecordsAsync())
|
|
{
|
|
results.Add(record);
|
|
}
|
|
|
|
return results;
|
|
}
|
|
|
|
private async IAsyncEnumerable<DataRecord> StreamRecordsAsync()
|
|
{
|
|
foreach (var batch in _data.BatchBySqrtN())
|
|
{
|
|
foreach (var record in batch)
|
|
{
|
|
yield return await ProcessRecord(record);
|
|
}
|
|
|
|
await Task.Yield(); // Allow other work
|
|
}
|
|
}
|
|
|
|
private async Task<DataRecord> ProcessRecord(DataRecord record)
|
|
{
|
|
// Simulate processing
|
|
await Task.Yield();
|
|
return new DataRecord
|
|
{
|
|
Id = record.Id,
|
|
Name = record.Name.ToUpper(),
|
|
Value = record.Value * 1.1,
|
|
Timestamp = DateTime.UtcNow,
|
|
Data = record.Data
|
|
};
|
|
}
|
|
|
|
public class DataRecord
|
|
{
|
|
public int Id { get; set; }
|
|
public string Name { get; set; } = "";
|
|
public double Value { get; set; }
|
|
public DateTime Timestamp { get; set; }
|
|
public byte[] Data { get; set; } = Array.Empty<byte>();
|
|
}
|
|
}
|
|
|
|
public class Program
|
|
{
|
|
public static void Main(string[] args)
|
|
{
|
|
var config = DefaultConfig.Instance
|
|
.WithOptions(ConfigOptions.DisableOptimizationsValidator)
|
|
.AddDiagnoser(BenchmarkDotNet.Diagnosers.MemoryDiagnoser.Default);
|
|
|
|
var summary = BenchmarkRunner.Run<SortingBenchmarks>(config);
|
|
BenchmarkRunner.Run<GroupingBenchmarks>(config);
|
|
BenchmarkRunner.Run<CollectionBenchmarks>(config);
|
|
BenchmarkRunner.Run<CheckpointingBenchmarks>(config);
|
|
BenchmarkRunner.Run<StreamingBenchmarks>(config);
|
|
}
|
|
} |