Files
gitea/sdk/csharp/Gitea.SDK/ChunkedUpload.cs
logikonline e35aa8d878 sdk: add C# and Java SDK libraries with chunked upload support
Both SDKs provide:
- Full API client for users, repositories, and releases
- Chunked upload with parallel workers
- Progress tracking with speed/ETA
- SHA256 checksum verification
- Comprehensive exception handling

C# SDK (.NET 8.0):
- Modern record types for models
- Async/await pattern throughout
- System.Text.Json serialization

Java SDK (Java 17):
- Standard Maven project
- Jackson for JSON
- HttpClient for HTTP
- ExecutorService for parallel uploads

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-09 12:19:17 -05:00

173 lines
5.7 KiB
C#

// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
using System.Diagnostics;
using System.Security.Cryptography;
namespace Gitea.SDK;
/// <summary>
/// Handles chunked file uploads with parallel workers.
/// </summary>
public class ChunkedUpload
{
private readonly GiteaClient _client;
private readonly string _owner;
private readonly string _repo;
private readonly long _releaseId;
private readonly ChunkedUploadOptions _options;
private UploadSession? _session;
internal ChunkedUpload(
GiteaClient client,
string owner,
string repo,
long releaseId,
ChunkedUploadOptions options)
{
_client = client;
_owner = owner;
_repo = repo;
_releaseId = releaseId;
_options = options;
}
/// <summary>
/// Gets the current upload session.
/// </summary>
public UploadSession? Session => _session;
/// <summary>
/// Uploads a file using chunked upload.
/// </summary>
public async Task<UploadResult> UploadAsync(
Stream fileStream,
string filename,
CancellationToken cancellationToken = default)
{
var fileSize = fileStream.Length;
var chunkSize = _options.ChunkSize;
var totalChunks = (int)Math.Ceiling((double)fileSize / chunkSize);
// Calculate checksum if requested
string? checksum = null;
if (_options.VerifyChecksum)
{
checksum = await ComputeChecksumAsync(fileStream, cancellationToken);
fileStream.Position = 0;
}
// Create upload session
_session = await CreateSessionAsync(filename, fileSize, chunkSize, totalChunks, checksum, cancellationToken);
// Track progress
var stopwatch = Stopwatch.StartNew();
var chunksCompleted = 0;
var bytesCompleted = 0L;
var progressLock = new object();
// Upload chunks in parallel
var semaphore = new SemaphoreSlim(_options.Parallel);
var tasks = new List<Task>();
var chunks = new List<(int Index, byte[] Data)>();
// Read all chunks
for (var i = 0; i < totalChunks; i++)
{
var offset = i * chunkSize;
var size = (int)Math.Min(chunkSize, fileSize - offset);
var buffer = new byte[size];
fileStream.Position = offset;
await fileStream.ReadAsync(buffer.AsMemory(0, size), cancellationToken);
chunks.Add((i, buffer));
}
// Upload chunks
foreach (var (index, data) in chunks)
{
await semaphore.WaitAsync(cancellationToken);
var task = Task.Run(async () =>
{
try
{
await UploadChunkAsync(_session.Id, index, data, cancellationToken);
lock (progressLock)
{
chunksCompleted++;
bytesCompleted += data.Length;
var elapsed = stopwatch.Elapsed.TotalSeconds;
var speed = elapsed > 0 ? bytesCompleted / elapsed : 0;
var remaining = fileSize - bytesCompleted;
var eta = speed > 0 ? TimeSpan.FromSeconds(remaining / speed) : TimeSpan.Zero;
_options.OnProgress?.Invoke(new Progress
{
BytesDone = bytesCompleted,
BytesTotal = fileSize,
ChunksDone = chunksCompleted,
ChunksTotal = totalChunks,
Percent = (double)bytesCompleted / fileSize * 100,
Speed = speed,
Eta = eta
});
}
}
finally
{
semaphore.Release();
}
}, cancellationToken);
tasks.Add(task);
}
await Task.WhenAll(tasks);
// Complete the upload
return await CompleteUploadAsync(_session.Id, cancellationToken);
}
private async Task<string> ComputeChecksumAsync(Stream stream, CancellationToken cancellationToken)
{
using var sha256 = SHA256.Create();
var hash = await sha256.ComputeHashAsync(stream, cancellationToken);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private async Task<UploadSession> CreateSessionAsync(
string filename,
long fileSize,
long chunkSize,
int totalChunks,
string? checksum,
CancellationToken cancellationToken)
{
var path = $"/api/v1/repos/{Uri.EscapeDataString(_owner)}/{Uri.EscapeDataString(_repo)}/releases/{_releaseId}/assets/upload";
var body = new
{
filename,
file_size = fileSize,
chunk_size = chunkSize,
total_chunks = totalChunks,
checksum
};
return await _client.PostAsync<UploadSession>(path, body, cancellationToken);
}
private async Task UploadChunkAsync(string sessionId, int chunkIndex, byte[] data, CancellationToken cancellationToken)
{
var path = $"/api/v1/repos/uploads/{Uri.EscapeDataString(sessionId)}/chunks/{chunkIndex}";
await _client.PutBinaryAsync(path, data, cancellationToken);
}
private async Task<UploadResult> CompleteUploadAsync(string sessionId, CancellationToken cancellationToken)
{
var path = $"/api/v1/repos/uploads/{Uri.EscapeDataString(sessionId)}/complete";
return await _client.PostAsync<UploadResult>(path, null, cancellationToken);
}
}