From 3fcf9e3dde41adc30e83c1ac55b8930f703b7020 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Mon, 22 Dec 2025 16:42:17 +0100 Subject: [PATCH 01/19] Feature/save interfaces (#1) * Provide Stream Saver Interfaces * Add IIOStreamProvider file implementation * Add IAsyncIOStreamProvider http implementation Currently missing some constructor parameters for HttpClient initialization. * Add IStreamSerializer and IAsyncStreamSerializer implementation for Json * Rewrite SaveFile to use interfaces. Additionally, expose a static class SaveFile with the method CreateGZipJsonFile. This returns a SaveFile using File Input/Output, Json Serialization and GZip compression. This is likely how the SaveFile will mostly be used. * parameter should be interface instead of class * Add documentation * Exhaustive overloads for SaveFile.CreateGZipJsonFile Now you can call this method with a JsonSerializerContext or JsonTypeInfo parameter. * Functionally tested HttpIO The HttpIO is ready for the field! It now exposes the necessary headers and optional Request uris, some easy-to-use constructors and properly disposes its objects. * Remove null-forgiving operators from deserialize extension methods * Remove ConfigureAwait(false) from all async calls as it introduces save issues * Fix ContentLength = 0 on Write * Add namespaces for Compression, IO, and Serialization * Fix IAsyncIOStreamProvider We need to compress the memory stream and leave it open BEFORE writing it with our IAsyncIOStreamProvider. Note that this isn't a completely correct name, since it doesn't provide a write / input stream, it accepts one. Will need to change this once a good name has been devised. * Add HttpIO documentation * Fix naming in static SaveFile Gzip should be GZip --- .../test/src/SaveFileTest.cs | 61 ++-- .../Chickensoft.SaveFileBuilder.csproj | 1 + .../src/Compression/BrotliCompression.cs | 17 + .../src/Compression/DeflateCompression.cs | 19 ++ .../src/Compression/GZipCompression.cs | 19 ++ .../Compression/ICompressionStreamProvider.cs | 21 ++ Chickensoft.SaveFileBuilder/src/IO/FileIO.cs | 50 +++ Chickensoft.SaveFileBuilder/src/IO/HttpIO.cs | 201 +++++++++++ .../src/IO/IIOStreamProvider.cs | 49 +++ Chickensoft.SaveFileBuilder/src/SaveFile.cs | 311 +++++++++++++++--- .../src/Serialization/IStreamSerializer.cs | 54 +++ .../src/Serialization/JsonStreamSerializer.cs | 136 ++++++++ 12 files changed, 856 insertions(+), 83 deletions(-) create mode 100644 Chickensoft.SaveFileBuilder/src/Compression/BrotliCompression.cs create mode 100644 Chickensoft.SaveFileBuilder/src/Compression/DeflateCompression.cs create mode 100644 Chickensoft.SaveFileBuilder/src/Compression/GZipCompression.cs create mode 100644 Chickensoft.SaveFileBuilder/src/Compression/ICompressionStreamProvider.cs create mode 100644 Chickensoft.SaveFileBuilder/src/IO/FileIO.cs create mode 100644 Chickensoft.SaveFileBuilder/src/IO/HttpIO.cs create mode 100644 Chickensoft.SaveFileBuilder/src/IO/IIOStreamProvider.cs create mode 100644 Chickensoft.SaveFileBuilder/src/Serialization/IStreamSerializer.cs create mode 100644 Chickensoft.SaveFileBuilder/src/Serialization/JsonStreamSerializer.cs diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs index dd8f04a..d0f8d19 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs @@ -3,7 +3,6 @@ namespace Chickensoft.SaveFileBuilder.Tests; using System.Threading.Tasks; using Chickensoft.GoDotTest; using Godot; -using Shouldly; public class SaveFileTest(Node testScene) : TestClass(testScene) { @@ -12,40 +11,40 @@ private sealed record SaveData { } [Test] public async Task SavesAndLoads() { - var onSave = Task.CompletedTask; - var data = new SaveData(); - - var saveFile = new SaveFile( - root: new SaveChunk( - onSave: (chunk) => new SaveData(), - onLoad: (chunk, data) => { } - ), - onSave: _ => onSave, - onLoad: () => Task.FromResult(data) - ); - - await Should.NotThrowAsync(async () => - { - await saveFile.Load(); - await saveFile.Save(); - }); + //var onSave = Task.CompletedTask; + //var data = new SaveData(); + + //var saveFile = new SaveFile( + // root: new SaveChunk( + // onSave: (chunk) => new SaveData(), + // onLoad: (chunk, data) => { } + // ), + // onSave: _ => onSave, + // onLoad: () => Task.FromResult(data) + //); + + //await Should.NotThrowAsync(async () => + //{ + // await saveFile.Load(); + // await saveFile.Save(); + //}); } [Test] public async Task DoesNotLoadIfNull() { - var onSave = Task.CompletedTask; - var data = new SaveData(); - - var saveFile = new SaveFile( - root: new SaveChunk( - onSave: (chunk) => new SaveData(), - onLoad: (chunk, data) => { } - ), - onSave: _ => onSave, - onLoad: () => Task.FromResult(null) - ); - - await Should.NotThrowAsync(saveFile.Load); + //var onSave = Task.CompletedTask; + //var data = new SaveData(); + + //var saveFile = new SaveFile( + // root: new SaveChunk( + // onSave: (chunk) => new SaveData(), + // onLoad: (chunk, data) => { } + // ), + // onSave: _ => onSave, + // onLoad: () => Task.FromResult(null) + //); + + //await Should.NotThrowAsync(saveFile.Load); } } diff --git a/Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj b/Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj index 48a80ee..7d9963c 100644 --- a/Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj +++ b/Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj @@ -48,5 +48,6 @@ runtime; build; native; contentfiles; analyzers; buildtransitive all + diff --git a/Chickensoft.SaveFileBuilder/src/Compression/BrotliCompression.cs b/Chickensoft.SaveFileBuilder/src/Compression/BrotliCompression.cs new file mode 100644 index 0000000..aa401d9 --- /dev/null +++ b/Chickensoft.SaveFileBuilder/src/Compression/BrotliCompression.cs @@ -0,0 +1,17 @@ +namespace Chickensoft.SaveFileBuilder.Compression; + +using System; +using System.IO; +using System.IO.Compression; + +/// Provides a Brotli compression and decompression stream. +public readonly struct BrotliCompression : ICompressionStreamProvider +{ + /// + /// + public Stream CompressionStream(Stream stream, CompressionLevel compressionLevel = default, bool leaveOpen = default) => new BrotliStream(stream, compressionLevel, leaveOpen); + + /// + public Stream DecompressionStream(Stream stream, bool leaveOpen = default) => new BrotliStream(stream, CompressionMode.Decompress, leaveOpen); +} + diff --git a/Chickensoft.SaveFileBuilder/src/Compression/DeflateCompression.cs b/Chickensoft.SaveFileBuilder/src/Compression/DeflateCompression.cs new file mode 100644 index 0000000..7e3bcd6 --- /dev/null +++ b/Chickensoft.SaveFileBuilder/src/Compression/DeflateCompression.cs @@ -0,0 +1,19 @@ +namespace Chickensoft.SaveFileBuilder.Compression; + +using System; +using System.IO; +using System.IO.Compression; + +/// Provides a Deflate compression and decompression stream. +public readonly struct DeflateCompression : ICompressionStreamProvider +{ + /// + /// + /// + public Stream CompressionStream(Stream stream, CompressionLevel compressionLevel = default, bool leaveOpen = default) => new DeflateStream(stream, compressionLevel, leaveOpen); + + /// + /// + /// + public Stream DecompressionStream(Stream stream, bool leaveOpen = default) => new DeflateStream(stream, CompressionMode.Decompress, leaveOpen); +} diff --git a/Chickensoft.SaveFileBuilder/src/Compression/GZipCompression.cs b/Chickensoft.SaveFileBuilder/src/Compression/GZipCompression.cs new file mode 100644 index 0000000..6f34e26 --- /dev/null +++ b/Chickensoft.SaveFileBuilder/src/Compression/GZipCompression.cs @@ -0,0 +1,19 @@ +namespace Chickensoft.SaveFileBuilder.Compression; + +using System; +using System.IO; +using System.IO.Compression; + +/// Provides a GZip compression and decompression stream. +public readonly struct GZipCompression : ICompressionStreamProvider +{ + /// + /// + /// + public Stream CompressionStream(Stream stream, CompressionLevel compressionLevel = default, bool leaveOpen = default) => new GZipStream(stream, compressionLevel, leaveOpen); + + /// + /// + /// + public Stream DecompressionStream(Stream stream, bool leaveOpen = default) => new GZipStream(stream, CompressionMode.Decompress, leaveOpen); +} diff --git a/Chickensoft.SaveFileBuilder/src/Compression/ICompressionStreamProvider.cs b/Chickensoft.SaveFileBuilder/src/Compression/ICompressionStreamProvider.cs new file mode 100644 index 0000000..e31eb42 --- /dev/null +++ b/Chickensoft.SaveFileBuilder/src/Compression/ICompressionStreamProvider.cs @@ -0,0 +1,21 @@ +namespace Chickensoft.SaveFileBuilder.Compression; + +using System.IO; +using System.IO.Compression; + +/// Provides a compression- and decompression based on the base that can be written to or -read from. +public interface ICompressionStreamProvider +{ + /// Provide a compression stream using the compression level, and optionally leaves the base stream open. + /// The base stream. + /// Compression level whether to emphasize speed or efficiency. + /// to leave open after disposing the compression stream; otherwise . + /// The compression stream. + Stream CompressionStream(Stream stream, CompressionLevel compressionLevel = default, bool leaveOpen = default); + + /// Provide a decompression stream, and optionally leaves the base stream open. + /// The base stream. + /// to leave open after disposing the decompression stream; otherwise . + /// The decompressed stream. + Stream DecompressionStream(Stream stream, bool leaveOpen = default); +} diff --git a/Chickensoft.SaveFileBuilder/src/IO/FileIO.cs b/Chickensoft.SaveFileBuilder/src/IO/FileIO.cs new file mode 100644 index 0000000..98d993b --- /dev/null +++ b/Chickensoft.SaveFileBuilder/src/IO/FileIO.cs @@ -0,0 +1,50 @@ +namespace Chickensoft.SaveFileBuilder.IO; + +using System.IO; + +/// Provides a read- and write from a file. +public class FileIO : IIOStreamProvider +{ + /// The of the file. + public FileInfo FileInfo { get; } + + /// Initializes a new instance of the class. + /// The of the file. + public FileIO(FileInfo fileInfo) + { + FileInfo = fileInfo; + } + + /// Initializes a new instance of the class. + /// The filename of the file. + public FileIO(string fileName) + { + FileInfo = new FileInfo(fileName); + } + + /// + public Stream Read() => FileInfo.Open(FileMode.Open, FileAccess.Read); + + /// + public Stream Write() + { + FileInfo.Refresh(); + if (FileInfo.DirectoryName == null) + { + throw new DirectoryNotFoundException("The directory of the file does not exist."); + } + + Directory.CreateDirectory(FileInfo.DirectoryName); + return FileInfo.Open(FileMode.OpenOrCreate, FileAccess.Write); + } + + /// + public bool Exists() + { + FileInfo.Refresh(); + return FileInfo.Exists; + } + + /// + public void Delete() => FileInfo.Delete(); +} diff --git a/Chickensoft.SaveFileBuilder/src/IO/HttpIO.cs b/Chickensoft.SaveFileBuilder/src/IO/HttpIO.cs new file mode 100644 index 0000000..f5a8e48 --- /dev/null +++ b/Chickensoft.SaveFileBuilder/src/IO/HttpIO.cs @@ -0,0 +1,201 @@ +namespace Chickensoft.SaveFileBuilder.IO; + +using System; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Threading; +using System.Threading.Tasks; + +/// Defines the relative 's used for specific HTTP requests by the . +/// The relative used for read requests. +/// The relative used for write requests. +/// The relative used for exists requests. +/// The relative used for delete requests. +public readonly record struct HttpIORequestUris( + Uri? ReadUri = null, + Uri? WriteUri = null, + Uri? ExistsUri = null, + Uri? DeleteUri = null +) +{ + /// + /// The relative address used for read requests. + /// The relative address used for write requests. + /// The relative address used for exists requests. + /// The relative address used for delete requests. + public HttpIORequestUris( + string? readUri = null, + string? writeUri = null, + string? existsUri = null, + string? deleteUri = null + ) : this( + readUri is not null ? new Uri(readUri) : null, + writeUri is not null ? new Uri(writeUri) : null, + existsUri is not null ? new Uri(existsUri) : null, + deleteUri is not null ? new Uri(deleteUri) : null + ) + { } +} + +/// Provides a read from- and requests a write for an Http address. +public class HttpIO : IAsyncIOStreamProvider, IDisposable +{ + private bool _isDisposed; + + private readonly HttpClient _httpClient; + private readonly bool _disposeClient; + private readonly HttpContent _emptyContent = new ByteArrayContent([]) + { + Headers = { ContentLength = null } + }; + + /// Gets the relative 's used for specific requests. + /// The relative 's used for specific requests. + public HttpIORequestUris RequestUris { get; init; } + + /// Gets the to be sent when reading data. + /// The to be sent when reading data. + public HttpRequestHeaders ReadHeaders => _httpClient.DefaultRequestHeaders; + + /// Gets the , as defined in RFC 2616, to be sent when writing data. + /// The , as defined in RFC 2616, to be sent when writing data. + /// If the is left null, it will be set to the length of the stream being written. In most cases, this is the desired behavior. + public HttpContentHeaders WriteHeaders => _emptyContent.Headers; + + /// Initializes a new instance of the class. + public HttpIO() + : this(new HttpClient()) + { } + + /// Initializes a new instance of the class with the specified timeout. + /// + public HttpIO(TimeSpan timeout) + : this(new HttpClient() + { + Timeout = timeout + }) + { } + + /// + public HttpIO(Uri baseAddress) + : this(new HttpClient() + { + BaseAddress = baseAddress, + }) + { } + + /// + public HttpIO(Uri baseAddress, TimeSpan timeout) + : this(new HttpClient() + { + BaseAddress = baseAddress, + Timeout = timeout + }) + { } + + /// Initializes a new instance of the class with the specified address. + /// + public HttpIO(string baseAddress) + : this(new Uri(baseAddress)) + { } + + /// Initializes a new instance of the class with the specified address and timeout. + /// The base address used when sending requests. + /// The timespan to wait before a request times out. + public HttpIO(string baseAddress, TimeSpan timeout) + : this(new Uri(baseAddress), timeout) + { } + + /// Initializes a new instance of the class with the specified client, and specifies whether that client should be disposed when this instance is disposed. + /// The to use for requests. + /// if the inner client should be disposed of by ; if you intend to reuse the client. + public HttpIO(HttpClient client, bool disposeClient = true) + { + _httpClient = client; + _disposeClient = disposeClient; + } + + /// + public async Task ReadAsync(CancellationToken cancellationToken = default) + { + using var response = await _httpClient.GetAsync(RequestUris.ReadUri, cancellationToken); + + try + { + response.EnsureSuccessStatusCode(); + } + catch (HttpRequestException) + when (response.StatusCode is HttpStatusCode.NotFound) + { + return new MemoryStream(); + } + + await using var contentStream = await response.Content.ReadAsStreamAsync(); + + var readStream = new MemoryStream(); + await contentStream.CopyToAsync(readStream, cancellationToken); + readStream.Position = 0; + return readStream; + } + + /// + public async Task WriteAsync(Stream stream, CancellationToken cancellationToken = default) + { + using var content = new StreamContent(stream) + { + Headers = { ContentLength = WriteHeaders.ContentLength ?? stream.Length } + }; + foreach (var header in WriteHeaders) + { + if (header.Key.Equals("Content-Length", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + content.Headers.Add(header.Key, header.Value); + } + + await _httpClient.PostAsync(RequestUris.WriteUri, content, cancellationToken); + } + + /// + public async Task ExistsAsync(CancellationToken cancellationToken = default) + { + using var response = await _httpClient.GetAsync(RequestUris.ExistsUri, HttpCompletionOption.ResponseHeadersRead, cancellationToken); + return response.IsSuccessStatusCode; + } + + /// + public async Task DeleteAsync(CancellationToken cancellationToken = default) + { + using var response = await _httpClient.DeleteAsync(RequestUris.DeleteUri, cancellationToken); + return response.IsSuccessStatusCode; + } + + /// + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + /// + protected virtual void Dispose(bool disposing) + { + if (!_isDisposed) + { + if (disposing) + { + if (_disposeClient) + { + _httpClient.Dispose(); + } + _emptyContent.Dispose(); + } + + _isDisposed = true; + } + } +} diff --git a/Chickensoft.SaveFileBuilder/src/IO/IIOStreamProvider.cs b/Chickensoft.SaveFileBuilder/src/IO/IIOStreamProvider.cs new file mode 100644 index 0000000..99f8839 --- /dev/null +++ b/Chickensoft.SaveFileBuilder/src/IO/IIOStreamProvider.cs @@ -0,0 +1,49 @@ +namespace Chickensoft.SaveFileBuilder.IO; + +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +/// Provides a read- and write from an input / output source. +public interface IIOStreamProvider +{ + /// Returns a read-only from the io source. + /// A new read-only object from the io source. + Stream Read(); + + /// Returns a write-only from the io source. + /// A new write-only object from the io source. + Stream Write(); + + /// Determines whether the io source exists. + /// if the io source exists; otherwise, . + bool Exists(); + + /// Permanently deletes the io source. + void Delete(); +} + +/// Provides a read from- and requests a write for an input / output source asynchronously. +public interface IAsyncIOStreamProvider +{ + /// Asynchronously reads the underlying data and returns a read-only from the io source. + /// A cancellation token that can be used to cancel the asynchronous read operation. + /// A task that represents the asynchronous read operation. The value of the task is a read-only from the io source. + Task ReadAsync(CancellationToken cancellationToken = default); + + /// Requests a write stream to write data to the underlying source asynchronously. + /// The stream to write to the io source. + /// A cancellation token that can be used to cancel the asynchronous write operation. + /// A task that represents the asynchronous write operation. + Task WriteAsync(Stream stream, CancellationToken cancellationToken = default); + + /// Asynchronously determines whether the io source exists. + /// A cancellation token that can be used to cancel the asynchronous exists operation. + /// A task that represents the asynchronous exists operation. The value of the task is if the io source exists; otherwise, . + Task ExistsAsync(CancellationToken cancellationToken = default); + + /// Asynchronously deletes the io source. + /// A cancellation token that can be used to cancel the asynchronous delete operation. + /// A task that represents the asynchronous delete operation. The value of the task is if the io source was deleted; otherwise, . + Task DeleteAsync(CancellationToken cancellationToken = default); +} diff --git a/Chickensoft.SaveFileBuilder/src/SaveFile.cs b/Chickensoft.SaveFileBuilder/src/SaveFile.cs index a746c35..ac081a7 100644 --- a/Chickensoft.SaveFileBuilder/src/SaveFile.cs +++ b/Chickensoft.SaveFileBuilder/src/SaveFile.cs @@ -1,42 +1,64 @@ namespace Chickensoft.SaveFileBuilder; using System; +using System.IO; +using System.IO.Compression; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text.Json.Serialization.Metadata; +using System.Threading; using System.Threading.Tasks; +using Chickensoft.SaveFileBuilder.Compression; +using Chickensoft.SaveFileBuilder.IO; +using Chickensoft.SaveFileBuilder.Serialization; -/// -/// Represents a save file composed of one or more save chunks. -/// -/// Type of data represented by the save file. -/// +/// Represents a save file composed of one or more save chunks. +/// Type of data represented by the save file. public interface ISaveFile where TData : class { - /// - /// Callback that saves the data to the save file. - /// - Func OnSave { get; } - - /// - /// Callback that loads the data from the save file. - /// - /// Save data. - Func> OnLoad { get; } - - /// - /// Root save chunk from which the save file contents are composed. - /// + /// Root save chunk from which the save file contents are composed. ISaveChunk Root { get; } - /// - /// Collects save data from the save file chunk tree and saves it. - /// - /// Asynchronous task. - Task Save(); - - /// - /// Loads save data and restores the save file chunk tree. - /// - /// Asynchronous task. - Task Load(); + /// Gets a value indicating whether the content can be saved using a synchronous operation. + /// If , attempts to save synchronously may not be supported and could result in an exception or undefined behavior. Check this property before invoking synchronous save methods to ensure compatibility. + bool CanSaveSynchronously { get; } + + /// + /// + void Save(CompressionLevel compressionLevel = default); + + /// + /// + void Load(); + + /// if the save file exists; otherwise, . + /// + bool Exists(); + + /// + /// + void Delete(); + + /// Collects save data from the chunk tree and saves it. + /// Compression level whether to emphasize speed or efficiency when compressing. + /// A cancellation token that can be used to cancel the asynchronous save operation. + /// A task that represents the asynchronous save operation. + ValueTask SaveAsync(CompressionLevel compressionLevel = default, CancellationToken cancellationToken = default); + + /// Loads save data and restores the chunk tree. + /// A cancellation token that can be used to cancel the asynchronous load operation. + /// A task that represents the asynchronous load operation. + ValueTask LoadAsync(CancellationToken cancellationToken = default); + + /// Determines whether the save file exists. + /// A cancellation token that can be used to cancel the asynchronous exists operation. + /// A task that represents the asynchronous exists operation. The value of the task is if the save file exists; otherwise, . + ValueTask ExistsAsync(CancellationToken cancellationToken = default); + + /// Deletes the save file. + /// A cancellation token that can be used to cancel the asynchronous delete operation. + /// A task that represents the asynchronous delete operation. The value of the task is if the io source was deleted; otherwise, . + ValueTask DeleteAsync(CancellationToken cancellationToken = default); } /// @@ -45,45 +67,230 @@ public class SaveFile : ISaveFile where TData : class /// public ISaveChunk Root { get; } - /// - public Func OnSave { get; } + /// +#if NET5_0_OR_GREATER + [System.Diagnostics.CodeAnalysisMemberNotNullWhen(true, nameof(_io), nameof(_serializer))] +#endif + public bool CanSaveSynchronously => _io is not null && _serializer is not null; - /// - public Func> OnLoad { get; } + private static InvalidOperationException SynchronousOperationNotAllowedException() + => new($"Synchronous operation is not allowed because either the {nameof(IIOStreamProvider)} or the {nameof(IStreamSerializer)} of the {nameof(SaveFile<>)} is null."); + + private readonly IIOStreamProvider? _io; + private readonly IAsyncIOStreamProvider? _asyncIO; + private readonly IStreamSerializer? _serializer; + private readonly IAsyncStreamSerializer? _asyncSerializer; + private readonly ICompressionStreamProvider? _compressor; /// - /// - /// - /// - /// Function that saves the data. - /// Function that loads the data. - public SaveFile( + /// + /// Input/output source which the save file reads from and writes to. + /// Input/output source which the save file reads from and writes to asynchronously. + /// Serializer which the save file uses to serialize and deserialize data. + /// Serializer which the save file uses to serialize and deserialize data asynchronously. + /// Compressor which the save file uses to compress and decompress data. + private SaveFile( ISaveChunk root, - Func onSave, - Func> onLoad + IIOStreamProvider? io, + IAsyncIOStreamProvider? asyncIO, + IStreamSerializer? serializer, + IAsyncStreamSerializer? asyncSerializer, + ICompressionStreamProvider? compressor ) { Root = root; - OnSave = onSave; - OnLoad = onLoad; + _io = io; + _asyncIO = asyncIO; + _serializer = serializer; + _asyncSerializer = asyncSerializer; + _compressor = compressor; + } + + /// + public SaveFile( + ISaveChunk root, + IIOStreamProvider io, + IStreamSerializer serializer, + ICompressionStreamProvider? compressor = null + ) : this(root, io, io as IAsyncIOStreamProvider, serializer, serializer as IAsyncStreamSerializer, compressor) + { } + + /// + public SaveFile( + ISaveChunk root, + IIOStreamProvider io, + IAsyncStreamSerializer asyncSerializer, + ICompressionStreamProvider? compressor = null + ) : this(root, io, io as IAsyncIOStreamProvider, asyncSerializer as IStreamSerializer, asyncSerializer, compressor) + { } + + /// + public SaveFile( + ISaveChunk root, + IAsyncIOStreamProvider asyncIO, + IStreamSerializer serializer, + ICompressionStreamProvider? compressor = null + ) : this(root, asyncIO as IIOStreamProvider, asyncIO, serializer, serializer as IAsyncStreamSerializer, compressor) + { } + + /// + public SaveFile( + ISaveChunk root, + IAsyncIOStreamProvider asyncIO, + IAsyncStreamSerializer asyncSerializer, + ICompressionStreamProvider? compressor = null + ) : this(root, asyncIO as IIOStreamProvider, asyncIO, asyncSerializer as IStreamSerializer, asyncSerializer, compressor) + { } + + /// + public void Save(CompressionLevel compressionLevel = default) + { + if (!CanSaveSynchronously) + { + throw SynchronousOperationNotAllowedException(); + } + + using var ioStream = _io!.Write(); + using var compressionStream = _compressor?.CompressionStream(ioStream, compressionLevel); + _serializer!.Serialize(compressionStream ?? ioStream, Root.GetSaveData()); + } + + /// + public void Load() + { + if (!CanSaveSynchronously) + { + throw SynchronousOperationNotAllowedException(); + } + + using var ioStream = _io!.Read(); + using var decompressionStream = _compressor?.DecompressionStream(ioStream); + var data = _serializer!.Deserialize(decompressionStream ?? ioStream); + if (data is null) + { + return; + } + + Root.LoadSaveData(data); + } + + /// + public bool Exists() => _io is not null ? _io.Exists() : throw SynchronousOperationNotAllowedException(); + + /// + public void Delete() + { + if (_io is null) + { + throw SynchronousOperationNotAllowedException(); + } + + _io.Delete(); } - /// - public Task Save() => OnSave(Root.GetSaveData()); + /// + public async ValueTask SaveAsync(CompressionLevel compressionLevel = default, CancellationToken cancellationToken = default) + { + if (_asyncIO is null) + { + await using var ioStream = _io!.Write(); + await using var compressionStream = _compressor?.CompressionStream(ioStream, compressionLevel); + await serialize(compressionStream ?? ioStream); + } + else + { + await using var writeStream = new MemoryStream(); + await using (var compressionStream = _compressor?.CompressionStream(writeStream, compressionLevel, true)) + { + await serialize(compressionStream ?? writeStream); + } + writeStream.Position = 0; + + await _asyncIO.WriteAsync(writeStream, cancellationToken); + } + + async Task serialize(Stream stream) + { + if (_asyncSerializer is not null) + { + await _asyncSerializer.SerializeAsync(stream, Root.GetSaveData(), cancellationToken); + } + else + { + _serializer!.Serialize(stream, Root.GetSaveData()); + } + } + } - /// - public async Task Load() + /// + public async ValueTask LoadAsync(CancellationToken cancellationToken = default) { - // Loading save data is asynchronous since it's usually coming from - // the disk or network. - var data = await OnLoad(); + await using var ioStream = _asyncIO is not null + ? await _asyncIO.ReadAsync(cancellationToken) + : _io!.Read(); + + await using var decompressionStream = _compressor?.DecompressionStream(ioStream); + + var data = _asyncSerializer is not null + ? await _asyncSerializer.DeserializeAsync(decompressionStream ?? ioStream, cancellationToken) + : _serializer!.Deserialize(decompressionStream ?? ioStream); if (data is null) { return; } - // Actually restoring the loaded data is synchronous. Root.LoadSaveData(data); } + + /// + public async ValueTask ExistsAsync(CancellationToken cancellationToken = default) + { + if (_asyncIO is not null) + { + return await _asyncIO.ExistsAsync(cancellationToken); + } + + return _io!.Exists(); + } + + /// + public async ValueTask DeleteAsync(CancellationToken cancellationToken = default) + { + if (_asyncIO is not null) + { + return await _asyncIO.DeleteAsync(cancellationToken); + } + + _io!.Delete(); + return true; + } +} + +/// Provides factory methods for creating common save file configurations. +public static class SaveFile +{ + /// Creates a new that uses JSON serialization and GZip compression. + public static SaveFile CreateGZipJsonFile(ISaveChunk root, string filePath, JsonSerializerOptions? options = null) where TData : class => new( + root: root, + io: new FileIO(filePath), + serializer: new JsonStreamSerializer(options), + compressor: new GZipCompression() + ); + + /// + public static SaveFile CreateGZipJsonFile(ISaveChunk root, string filePath, JsonSerializerContext context) where TData : class => new( + root: root, + io: new FileIO(filePath), + serializer: new JsonStreamSerializer(context), + compressor: new GZipCompression() + ); + + /// + public static SaveFile CreateGZipJsonFile(ISaveChunk root, string filePath, JsonTypeInfo jsonTypeInfo) where TData : class => new( + root: root, + io: new FileIO(filePath), + serializer: new JsonStreamSerializer(jsonTypeInfo), + compressor: new GZipCompression() + ); } diff --git a/Chickensoft.SaveFileBuilder/src/Serialization/IStreamSerializer.cs b/Chickensoft.SaveFileBuilder/src/Serialization/IStreamSerializer.cs new file mode 100644 index 0000000..822356d --- /dev/null +++ b/Chickensoft.SaveFileBuilder/src/Serialization/IStreamSerializer.cs @@ -0,0 +1,54 @@ +namespace Chickensoft.SaveFileBuilder.Serialization; + +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +/// Provides functionality to serialize from- and deserialize to objects or value types. +public interface IStreamSerializer +{ + /// + void Serialize(Stream stream, object? value, Type inputType); + + /// + object? Deserialize(Stream stream, Type returnType); +} + +/// Provides functionality to serialize from- and deserialize to objects or value types asynchronously. +public interface IAsyncStreamSerializer +{ + /// Serializes the specified value to the stream. + /// The stream to serialize to. + /// The object to serialize. + /// The type of the object to serialize. + /// The that can be used to cancel the serialization operation. + Task SerializeAsync(Stream stream, object? value, Type inputType, CancellationToken cancellationToken = default); + + /// Deserializes the stream to the specified type. + /// The stream to deserialize from. + /// The type of the object to deserialize. + /// The that can be used to cancel the deserialization operation. + /// The deserialized object. + ValueTask DeserializeAsync(Stream stream, Type returnType, CancellationToken cancellationToken = default); +} + +/// Provides extension methods for and . +public static class IStreamSerializerExtensions +{ + /// + /// The type of the object to serialize. + public static void Serialize(this IStreamSerializer serializer, Stream stream, TValue value) => serializer.Serialize(stream, value, typeof(TValue)); + + /// + /// The type of the object to deserialize. + public static TValue? Deserialize(this IStreamSerializer serializer, Stream stream) => (TValue?)serializer.Deserialize(stream, typeof(TValue)); + + /// + /// The type of the object to serialize. + public static Task SerializeAsync(this IAsyncStreamSerializer serializer, Stream stream, TValue value, CancellationToken cancellationToken = default) => serializer.SerializeAsync(stream, value, typeof(TValue), cancellationToken); + + /// + /// The type of the object to deserialize. + public static async ValueTask DeserializeAsync(this IAsyncStreamSerializer serializer, Stream stream, CancellationToken cancellationToken = default) => (TValue?)await serializer.DeserializeAsync(stream, typeof(TValue), cancellationToken); +} diff --git a/Chickensoft.SaveFileBuilder/src/Serialization/JsonStreamSerializer.cs b/Chickensoft.SaveFileBuilder/src/Serialization/JsonStreamSerializer.cs new file mode 100644 index 0000000..424fff6 --- /dev/null +++ b/Chickensoft.SaveFileBuilder/src/Serialization/JsonStreamSerializer.cs @@ -0,0 +1,136 @@ +namespace Chickensoft.SaveFileBuilder.Serialization; + +using System; +using System.Diagnostics.CodeAnalysis; +using System.IO; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text.Json.Serialization.Metadata; +using System.Threading; +using System.Threading.Tasks; + +/// Provides functionality to serialize from- and deserialize to objects or value types using the . +public class JsonStreamSerializer : IStreamSerializer, IAsyncStreamSerializer +{ + private static class DynamicCodeSuppress + { + public static class IL2026 + { + public const string CATEGORY = "Trimming"; + public const string CHECK_ID = "IL2026:Members annotated with 'RequiresUnreferencedCodeAttribute' require dynamic access otherwise can break functionality when trimming application code"; + } + + public static class IL3050 + { + public const string CATEGORY = "AOT"; + public const string CHECK_ID = "IL3050:Calling members annotated with 'RequiresDynamicCodeAttribute' may break functionality when AOT compiling."; + } + + public const string JUSTIFICATION = "Members annotated with the 'RequiresUnreferencedCodeAttribute' and 'RequiresDynamicCodeAttribute' will not be called because the initialization steps required for these members are already decorated with these attributes."; + } + + private readonly JsonTypeInfo? _jsonTypeInfo; + private readonly JsonSerializerOptions? _options; + private readonly JsonSerializerContext? _context; + + /// Initializes a new instance of the class. + /// Metadata about the type to convert. + public JsonStreamSerializer(JsonTypeInfo jsonTypeInfo) + { + _jsonTypeInfo = jsonTypeInfo; + } + + /// Initializes a new instance of the class. + /// Options to control serialization behavior. +#if NET8_0_OR_GREATER + [RequiresUnreferencedCode("Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [RequiresDynamicCode("Use System.Text.Json source generation for native AOT applications.")] +#endif + public JsonStreamSerializer(JsonSerializerOptions? options = null) + { + _options = options; + } + + /// Initializes a new instance of the class. + /// A metadata provider for serializable types. + public JsonStreamSerializer(JsonSerializerContext context) + { + _context = context; + } + + /// + [SuppressMessage(DynamicCodeSuppress.IL2026.CATEGORY, DynamicCodeSuppress.IL2026.CHECK_ID, Justification = DynamicCodeSuppress.JUSTIFICATION)] + [SuppressMessage(DynamicCodeSuppress.IL3050.CATEGORY, DynamicCodeSuppress.IL3050.CHECK_ID, Justification = DynamicCodeSuppress.JUSTIFICATION)] + public void Serialize(Stream stream, object? value, Type inputType) + { + if (_jsonTypeInfo != null) + { + JsonSerializer.Serialize(stream, value, _jsonTypeInfo); + } + else if (_context != null) + { + JsonSerializer.Serialize(stream, value, inputType, _context); + } + else + { + JsonSerializer.Serialize(stream, value, inputType, _options); + } + } + + /// + [SuppressMessage(DynamicCodeSuppress.IL2026.CATEGORY, DynamicCodeSuppress.IL2026.CHECK_ID, Justification = DynamicCodeSuppress.JUSTIFICATION)] + [SuppressMessage(DynamicCodeSuppress.IL3050.CATEGORY, DynamicCodeSuppress.IL3050.CHECK_ID, Justification = DynamicCodeSuppress.JUSTIFICATION)] + public Task SerializeAsync(Stream stream, object? value, Type inputType, CancellationToken cancellationToken = default) + { + if (_jsonTypeInfo != null) + { + return JsonSerializer.SerializeAsync(stream, value, _jsonTypeInfo, cancellationToken); + } + else if (_context != null) + { + return JsonSerializer.SerializeAsync(stream, value, inputType, _context, cancellationToken); + } + else + { + return JsonSerializer.SerializeAsync(stream, value, inputType, _options, cancellationToken); + } + } + + /// + [SuppressMessage(DynamicCodeSuppress.IL2026.CATEGORY, DynamicCodeSuppress.IL2026.CHECK_ID, Justification = DynamicCodeSuppress.JUSTIFICATION)] + [SuppressMessage(DynamicCodeSuppress.IL3050.CATEGORY, DynamicCodeSuppress.IL3050.CHECK_ID, Justification = DynamicCodeSuppress.JUSTIFICATION)] + public object? Deserialize(Stream stream, Type returnType) + { + if (_jsonTypeInfo != null) + { + return JsonSerializer.Deserialize(stream, _jsonTypeInfo); + } + else if (_context != null) + { + return JsonSerializer.Deserialize(stream, returnType, _context); + } + else + { + return JsonSerializer.Deserialize(stream, returnType, _options); + } + } + + /// + [SuppressMessage(DynamicCodeSuppress.IL2026.CATEGORY, DynamicCodeSuppress.IL2026.CHECK_ID, Justification = DynamicCodeSuppress.JUSTIFICATION)] + [SuppressMessage(DynamicCodeSuppress.IL3050.CATEGORY, DynamicCodeSuppress.IL3050.CHECK_ID, Justification = DynamicCodeSuppress.JUSTIFICATION)] + public ValueTask DeserializeAsync(Stream stream, Type returnType, CancellationToken cancellationToken = default) + { + if (_jsonTypeInfo != null) + { + return JsonSerializer.DeserializeAsync(stream, _jsonTypeInfo, cancellationToken); + } + else if (_context != null) + { + return JsonSerializer.DeserializeAsync(stream, returnType, _context, cancellationToken); + } + else + { + return JsonSerializer.DeserializeAsync(stream, returnType, _options, cancellationToken); + } + } +} From 2d7e65162850caf5bbfbf64651405d750c809974 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Wed, 24 Dec 2025 16:49:20 +0100 Subject: [PATCH 02/19] Rewrite Godot tests as unit tests The dependency on Godot for running tests is unnecessary, as all of the functionality in this library is based on native C#. All of the tests have been rewritten to use xunit for quicker testing. Moq has been used for testing the SaveFile class and its many possible configurations based on the given interfaces. --- .../Chickensoft.SaveFileBuilder.Tests.csproj | 17 +- .../badges/.gdignore | 0 .../coverage/.gdignore | 0 .../project.godot | 24 -- .../test/Tests.cs | 13 - .../test/Tests.tscn | 6 - .../test/src/SaveChunkTest.cs | 142 ++++++----- .../test/src/SaveFileAsyncTest.cs | 187 ++++++++++++++ .../test/src/SaveFileTest.cs | 236 ++++++++++++++---- 9 files changed, 460 insertions(+), 165 deletions(-) delete mode 100644 Chickensoft.SaveFileBuilder.Tests/badges/.gdignore delete mode 100644 Chickensoft.SaveFileBuilder.Tests/coverage/.gdignore delete mode 100644 Chickensoft.SaveFileBuilder.Tests/project.godot delete mode 100644 Chickensoft.SaveFileBuilder.Tests/test/Tests.cs delete mode 100644 Chickensoft.SaveFileBuilder.Tests/test/Tests.tscn create mode 100644 Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs diff --git a/Chickensoft.SaveFileBuilder.Tests/Chickensoft.SaveFileBuilder.Tests.csproj b/Chickensoft.SaveFileBuilder.Tests/Chickensoft.SaveFileBuilder.Tests.csproj index 73090ea..45d3b6d 100644 --- a/Chickensoft.SaveFileBuilder.Tests/Chickensoft.SaveFileBuilder.Tests.csproj +++ b/Chickensoft.SaveFileBuilder.Tests/Chickensoft.SaveFileBuilder.Tests.csproj @@ -1,7 +1,7 @@ - + net8.0 - disable + enable enable true preview @@ -20,13 +20,18 @@ - + - - - + + + + + + + + diff --git a/Chickensoft.SaveFileBuilder.Tests/badges/.gdignore b/Chickensoft.SaveFileBuilder.Tests/badges/.gdignore deleted file mode 100644 index e69de29..0000000 diff --git a/Chickensoft.SaveFileBuilder.Tests/coverage/.gdignore b/Chickensoft.SaveFileBuilder.Tests/coverage/.gdignore deleted file mode 100644 index e69de29..0000000 diff --git a/Chickensoft.SaveFileBuilder.Tests/project.godot b/Chickensoft.SaveFileBuilder.Tests/project.godot deleted file mode 100644 index 82ce87f..0000000 --- a/Chickensoft.SaveFileBuilder.Tests/project.godot +++ /dev/null @@ -1,24 +0,0 @@ -; Engine configuration file. -; It's best edited using the editor UI and not directly, -; since the parameters that go here are not all obvious. -; -; Format: -; [section] ; section goes between [] -; param=value ; assign values to parameters - -config_version=5 - -[application] - -config/name="Chickensoft.SaveFileBuilder.Tests" -run/main_scene="res://test/Tests.tscn" -config/features=PackedStringArray("4.2", "C#", "Mobile") -config/icon="res://icon.svg" - -[dotnet] - -project/assembly_name="Chickensoft.SaveFileBuilder.Tests" - -[rendering] - -renderer/rendering_method="mobile" diff --git a/Chickensoft.SaveFileBuilder.Tests/test/Tests.cs b/Chickensoft.SaveFileBuilder.Tests/test/Tests.cs deleted file mode 100644 index f62161c..0000000 --- a/Chickensoft.SaveFileBuilder.Tests/test/Tests.cs +++ /dev/null @@ -1,13 +0,0 @@ -namespace Chickensoft.SaveFileBuilder.Tests; - -using System.Reflection; -using Chickensoft.GoDotTest; -using Godot; - -public partial class Tests : Node2D -{ - public override void _Ready() => CallDeferred(MethodName.RunTests); - - public void RunTests() => - GoTest.RunTests(Assembly.GetExecutingAssembly(), this); -} diff --git a/Chickensoft.SaveFileBuilder.Tests/test/Tests.tscn b/Chickensoft.SaveFileBuilder.Tests/test/Tests.tscn deleted file mode 100644 index 14ebf60..0000000 --- a/Chickensoft.SaveFileBuilder.Tests/test/Tests.tscn +++ /dev/null @@ -1,6 +0,0 @@ -[gd_scene load_steps=2 format=3 uid="uid://bv5dxd8hrc5g4"] - -[ext_resource type="Script" path="res://test/Tests.cs" id="1_310o6"] - -[node name="Node2D" type="Node2D"] -script = ExtResource("1_310o6") diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveChunkTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveChunkTest.cs index 5fe255c..9b6fbbe 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveChunkTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveChunkTest.cs @@ -1,95 +1,93 @@ namespace Chickensoft.SaveFileBuilder.Tests; -using System.Threading.Tasks; -using Chickensoft.GoDotTest; -using Godot; -using Shouldly; - -public class SaveChunkTest(Node testScene) : TestClass(testScene) +public class SaveChunkTest { - private sealed record SaveData { } + public string ChunkData { get; set; } + public SaveChunk Chunk { get; set; } + public string ChildChunkData { get; set; } + public SaveChunk ChildChunk { get; set; } - [Test] - public void SavesAndLoads() + public SaveChunkTest() { - var onSave = Task.CompletedTask; - var data = new SaveData(); - - var loaded = false; - - var saveChunk = new SaveChunk( - onSave: (chunk) => data, - onLoad: (chunk, data) => loaded = true + ChunkData = string.Empty; + Chunk = new SaveChunk( + onSave: (chunk) => ChunkData, + onLoad: (chunk, data) => ChunkData = data ); - - saveChunk.ShouldNotBeNull(); - - saveChunk.GetSaveData().ShouldBeSameAs(data); - saveChunk.LoadSaveData(data); - loaded.ShouldBeTrue(); + ChildChunkData = string.Empty; + ChildChunk = new SaveChunk( + onSave: (chunk) => ChildChunkData, + onLoad: (chunk, data) => ChildChunkData = data + ); } - [Test] - public void AddsAndGetsChunk() + [Fact] + public void GetSaveData_ReturnsChunkData() { - var onSave = Task.CompletedTask; - var data = new SaveData(); - - var saveChunk = new SaveChunk( - onSave: (chunk) => data, - onLoad: (chunk, data) => { } - ); - - var childLoaded = false; - var childData = new SaveData(); - var child = new SaveChunk( - onSave: (chunk) => childData, - onLoad: (chunk, data) => childLoaded = true - ); - - saveChunk.AddChunk(child); - - var childChunk = saveChunk.GetChunk(); + ChunkData = "test"; + Assert.Equal("test", Chunk.GetSaveData()); + } - childChunk.ShouldBeSameAs(child); + [Fact] + public void LoadSaveData_SetsChunkData() + { + Chunk.LoadSaveData("test"); + Assert.Equal("test", ChunkData); + } - saveChunk.GetChunkSaveData().ShouldBeSameAs(childData); - saveChunk.LoadChunkSaveData(childData); - childLoaded.ShouldBeTrue(); + [Fact] + public void AddChunk_DoesNotThrow() + { + var exception = Record.Exception(() => Chunk.AddChunk(ChildChunk)); + Assert.Null(exception); } - [Test] - public void OverwritesAndGetsChunk() + [Fact] + public void GetChunk_ReturnsAddedChunk() { - var onSave = Task.CompletedTask; - var data = new SaveData(); + Chunk.AddChunk(ChildChunk); + Assert.True(ReferenceEquals(ChildChunk, Chunk.GetChunk())); + } - var saveChunk = new SaveChunk( - onSave: (chunk) => data, - onLoad: (chunk, data) => { } - ); + [Fact] + public void GetChunkSaveData_ReturnsChildChunkData() + { + Chunk.AddChunk(ChildChunk); + ChildChunkData = "child test"; + Assert.Equal("child test", Chunk.GetChunkSaveData()); + } - var childData = new SaveData(); - var child = new SaveChunk( - onSave: (chunk) => childData, - onLoad: (chunk, data) => { } - ); + [Fact] + public void LoadChunkSaveData_SetsChildChunkData() + { + Chunk.AddChunk(ChildChunk); + Chunk.LoadChunkSaveData("child test"); + Assert.Equal("child test", ChildChunkData); + } - var otherChildData = new SaveData(); - var otherChild = new SaveChunk( - onSave: (chunk) => otherChildData, - onLoad: (chunk, data) => { } - ); + [Fact] + public void AddDuplicateChunk_ThrowsException() + { + Chunk.AddChunk(ChildChunk); + var exception = Record.Exception(() => Chunk.AddChunk(It.IsAny>())); + Assert.NotNull(exception); + } - saveChunk.AddChunk(child); - saveChunk.OverwriteChunk(otherChild); + [Fact] + public void OverwriteChunk_WithoutExistingChunk_AddsChunk() + { + Chunk.OverwriteChunk(ChildChunk); + Assert.True(ReferenceEquals(ChildChunk, Chunk.GetChunk())); + } - var childChunk = saveChunk.GetChunk(); + [Fact] + public void OverwriteChunk_WithExistingChunk_UpdatesExistingChunk() + { + var mockChunk = It.IsAny>(); - childChunk.ShouldNotBeSameAs(child); - childChunk.ShouldBeSameAs(otherChild); + Chunk.AddChunk(ChildChunk); + Chunk.OverwriteChunk(mockChunk); - saveChunk.GetChunkSaveData().ShouldNotBeSameAs(childData); - saveChunk.GetChunkSaveData().ShouldBeSameAs(otherChildData); + Assert.True(ReferenceEquals(mockChunk, Chunk.GetChunk())); } } diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs new file mode 100644 index 0000000..5e7e535 --- /dev/null +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs @@ -0,0 +1,187 @@ +namespace Chickensoft.SaveFileBuilder.Tests; + +using System.IO.Compression; +using Chickensoft.SaveFileBuilder.Compression; +using Chickensoft.SaveFileBuilder.IO; +using Chickensoft.SaveFileBuilder.Serialization; + +public class SaveFileAsyncTest +{ + private CancellationToken CancellationToken { get; } + + public Mock MockAsyncIO { get; set; } + public Mock MockAsyncSerializer { get; set; } + public Mock MockCompresser { get; set; } + + public Mock> MockChunk { get; set; } + + public SaveFile SaveFile { get; set; } + + public SaveFileAsyncTest(ITestContextAccessor testContextAccessor) + { + CancellationToken = testContextAccessor.Current.CancellationToken; + + MockAsyncIO = new Mock(); + MockAsyncSerializer = new Mock(); + MockCompresser = new Mock(); + + MockChunk = new Mock>(); + + SaveFile = new SaveFile(MockChunk.Object, MockAsyncIO.Object, MockAsyncSerializer.Object, MockCompresser.Object); + } + + [Fact] + public void CanSaveSynchronously_IsFalse() => Assert.False(SaveFile.CanSaveSynchronously); + + [Fact] + public void Save_ThrowsInvalidOperationException() => Assert.Throws(() => SaveFile.Save()); + + [Fact] + public void Load_ThrowsInvalidOperationException() => Assert.Throws(SaveFile.Load); + + [Fact] + public void Exists_ThrowsInvalidOperationException() => Assert.Throws(() => SaveFile.Exists()); + + [Fact] + public void Delete_ThrowsInvalidOperationException() => Assert.Throws(SaveFile.Delete); + + [Fact] + public async Task SaveAsync_WritesCompressesAndSerializes() + { + // Arrange + MemoryStream? ioStream = null; + var compressionStream = new MemoryStream(); + + MockChunk.Setup(chunk => chunk.GetSaveData()).Returns("test").Verifiable(); + MockCompresser.Setup(compresser => compresser.CompressionStream(It.IsAny(), It.IsAny(), true)).Callback((stream, _, _) => ioStream = (MemoryStream)stream).Returns(compressionStream).Verifiable(); + MockAsyncSerializer.Setup(serializer => serializer.SerializeAsync(compressionStream, "test", typeof(string), CancellationToken)).Verifiable(); + MockAsyncIO.Setup(io => io.WriteAsync(It.Is(stream => ioStream == stream), CancellationToken)).Verifiable(); + + // Act + await SaveFile.SaveAsync(cancellationToken: CancellationToken); + + // Assert + MockChunk.Verify(); + MockCompresser.Verify(); + MockAsyncSerializer.Verify(); + MockAsyncIO.Verify(); + } + + [Fact] + public async Task SaveAsync_CompressorIsNull_WritesAndSerializesWithoutCompressing() + { + // Arrange + SaveFile = new SaveFile(MockChunk.Object, MockAsyncIO.Object, MockAsyncSerializer.Object, null); + + MemoryStream? ioStream = null; + MockChunk.Setup(chunk => chunk.GetSaveData()).Returns("test").Verifiable(); + MockAsyncSerializer.Setup(serializer => serializer.SerializeAsync(It.IsAny(), "test", typeof(string), CancellationToken)).Callback((stream, _, _, _) => ioStream = (MemoryStream)stream).Returns(Task.CompletedTask).Verifiable(); + MockAsyncIO.Setup(io => io.WriteAsync(It.Is(stream => ioStream == stream), CancellationToken)).Verifiable(); + + // Act + await SaveFile.SaveAsync(cancellationToken: CancellationToken); + + // Assert + MockChunk.Verify(); + MockAsyncSerializer.Verify(); + MockAsyncIO.Verify(); + } + + [Fact] + public async Task SaveAsync_CompressionLevel_UsedByCompressor() + { + // Arrange + MockCompresser.Setup(compressor => compressor.CompressionStream(It.IsAny(), CompressionLevel.Fastest, true)).Verifiable(); + + // Act + await SaveFile.SaveAsync(CompressionLevel.Fastest, CancellationToken); + + // Assert + MockCompresser.Verify(); + } + + [Fact] + public async Task LoadAsync_ReadsDecompressesAndDeserializes() + { + // Arrange + var ioStream = new MemoryStream(); + var decompressionStream = new MemoryStream(); + + MockAsyncIO.Setup(io => io.ReadAsync(CancellationToken)).ReturnsAsync(ioStream).Verifiable(); + MockCompresser.Setup(compresser => compresser.DecompressionStream(ioStream)).Returns(decompressionStream).Verifiable(); + MockAsyncSerializer.Setup(serializer => serializer.DeserializeAsync(decompressionStream, typeof(string), CancellationToken)).ReturnsAsync("test").Verifiable(); + MockChunk.Setup(chunk => chunk.LoadSaveData("test")).Verifiable(); + + // Act + await SaveFile.LoadAsync(CancellationToken); + + // Assert + MockAsyncIO.Verify(); + MockCompresser.Verify(); + MockAsyncSerializer.Verify(); + MockChunk.Verify(); + } + + [Fact] + public async Task LoadAsync_CompressorIsNull_ReadsAndDeserializesWithoutDecompressing() + { + // Arrange + SaveFile = new SaveFile(MockChunk.Object, MockAsyncIO.Object, MockAsyncSerializer.Object, null); + + var ioStream = new MemoryStream(); + MockAsyncIO.Setup(io => io.ReadAsync(CancellationToken)).ReturnsAsync(ioStream).Verifiable(); + MockAsyncSerializer.Setup(serializer => serializer.DeserializeAsync(ioStream, typeof(string), CancellationToken)).ReturnsAsync("test").Verifiable(); + MockChunk.Setup(chunk => chunk.LoadSaveData("test")).Verifiable(); + + // Act + await SaveFile.LoadAsync(CancellationToken); + + // Assert + MockAsyncIO.Verify(); + MockAsyncSerializer.Verify(); + MockChunk.Verify(); + } + + [Fact] + public async Task LoadAsync_DataIsNull_DoesNotSetChunkData() + { + // Arrange + MockAsyncSerializer.Setup(serializer => serializer.DeserializeAsync(It.IsAny(), It.IsAny(), CancellationToken)).ReturnsAsync((string?)null).Verifiable(); + MockChunk.Setup(chunk => chunk.LoadSaveData(It.IsAny())).Verifiable(Times.Never); + + // Act + await SaveFile.LoadAsync(CancellationToken); + + // Assert + MockAsyncSerializer.Verify(); + MockChunk.Verify(); + } + + [Fact] + public async Task ExistsAsync_ReturnsIOStreamExistsAsyncResult() + { + // Arrange + MockAsyncIO.Setup(io => io.ExistsAsync(CancellationToken)).ReturnsAsync(true).Verifiable(); + + // Act + var result = await SaveFile.ExistsAsync(cancellationToken: CancellationToken); + + // Assert + MockAsyncIO.Verify(); + Assert.True(result); + } + + [Fact] + public async Task DeleteAsync_CallsIOStreamDeleteAsync() + { + // Arrange + MockAsyncIO.Setup(io => io.DeleteAsync(CancellationToken)).ReturnsAsync(true).Verifiable(); + + // Act + var result = await SaveFile.DeleteAsync(cancellationToken: CancellationToken); + + // Assert + MockAsyncIO.Verify(); + Assert.True(result); + } +} diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs index d0f8d19..61c3f18 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs @@ -1,50 +1,198 @@ namespace Chickensoft.SaveFileBuilder.Tests; -using System.Threading.Tasks; -using Chickensoft.GoDotTest; -using Godot; +using System.IO.Compression; +using Chickensoft.SaveFileBuilder.Compression; +using Chickensoft.SaveFileBuilder.IO; +using Chickensoft.SaveFileBuilder.Serialization; -public class SaveFileTest(Node testScene) : TestClass(testScene) +public class SaveFileTest { - private sealed record SaveData { } - - [Test] - public async Task SavesAndLoads() - { - //var onSave = Task.CompletedTask; - //var data = new SaveData(); - - //var saveFile = new SaveFile( - // root: new SaveChunk( - // onSave: (chunk) => new SaveData(), - // onLoad: (chunk, data) => { } - // ), - // onSave: _ => onSave, - // onLoad: () => Task.FromResult(data) - //); - - //await Should.NotThrowAsync(async () => - //{ - // await saveFile.Load(); - // await saveFile.Save(); - //}); - } - - [Test] - public async Task DoesNotLoadIfNull() - { - //var onSave = Task.CompletedTask; - //var data = new SaveData(); - - //var saveFile = new SaveFile( - // root: new SaveChunk( - // onSave: (chunk) => new SaveData(), - // onLoad: (chunk, data) => { } - // ), - // onSave: _ => onSave, - // onLoad: () => Task.FromResult(null) - //); - - //await Should.NotThrowAsync(saveFile.Load); + public Mock MockIO { get; set; } + public Mock MockSerializer { get; set; } + public Mock MockCompresser { get; set; } + + public Mock> MockChunk { get; set; } + + public SaveFile SaveFile { get; set; } + + public SaveFileTest() + { + MockIO = new Mock(); + MockSerializer = new Mock(); + MockCompresser = new Mock(); + + MockChunk = new Mock>(); + + SaveFile = new SaveFile(MockChunk.Object, MockIO.Object, MockSerializer.Object, MockCompresser.Object); + } + + [Fact] + public void CanSaveSynchronously_IsTrue() => Assert.True(SaveFile.CanSaveSynchronously); + + [Fact] + public void Save_WritesCompressesAndSerializes() + { + // Arrange + var io = new MemoryStream(); + var compressionStream = new MemoryStream(); + + MockChunk.Setup(chunk => chunk.GetSaveData()).Returns("test").Verifiable(); + MockIO.Setup(io => io.Write()).Returns(io).Verifiable(); + MockCompresser.Setup(compresser => compresser.CompressionStream(io)).Returns(compressionStream).Verifiable(); + MockSerializer.Setup(serializer => serializer.Serialize(compressionStream, "test", typeof(string))).Verifiable(); + + // Act + SaveFile.Save(); + + // Assert + MockChunk.Verify(); + MockIO.Verify(); + MockCompresser.Verify(); + MockSerializer.Verify(); + } + + [Fact] + public void Save_CompressorIsNull_WritesAndSerializesWithoutCompressing() + { + // Arrange + SaveFile = new SaveFile(MockChunk.Object, MockIO.Object, MockSerializer.Object, null); + + var ioStream = new MemoryStream(); + MockChunk.Setup(chunk => chunk.GetSaveData()).Returns("test").Verifiable(); + MockIO.Setup(io => io.Write()).Returns(ioStream).Verifiable(); + MockSerializer.Setup(serializer => serializer.Serialize(ioStream, "test", typeof(string))).Verifiable(); + + // Act + SaveFile.Save(); + + // Assert + MockChunk.Verify(); + MockIO.Verify(); + MockSerializer.Verify(); + } + + [Fact] + public void Save_CompressionLevel_UsedByCompressor() + { + // Arrange + MockCompresser.Setup(compressor => compressor.CompressionStream(It.IsAny(), CompressionLevel.Fastest)).Verifiable(); + + // Act + SaveFile.Save(CompressionLevel.Fastest); + + // Assert + MockCompresser.Verify(); + } + + [Fact] + public void Load_ReadsDecompressesAndDeserializes() + { + // Arrange + var ioStream = new MemoryStream(); + var compressionStream = new MemoryStream(); + + MockIO.Setup(io => io.Read()).Returns(ioStream).Verifiable(); + MockCompresser.Setup(compresser => compresser.DecompressionStream(ioStream)).Returns(compressionStream).Verifiable(); + MockSerializer.Setup(serializer => serializer.Deserialize(compressionStream, typeof(string))).Returns("test").Verifiable(); + MockChunk.Setup(chunk => chunk.LoadSaveData("test")).Verifiable(); + + // Act + SaveFile.Load(); + + // Assert + MockIO.Verify(); + MockCompresser.Verify(); + MockSerializer.Verify(); + MockChunk.Verify(); + } + + [Fact] + public void Load_CompressorIsNull_ReadsAndDeserializesWithoutDecompressing() + { + // Arrange + SaveFile = new SaveFile(MockChunk.Object, MockIO.Object, MockSerializer.Object, null); + + var ioStream = new MemoryStream(); + MockIO.Setup(io => io.Read()).Returns(ioStream).Verifiable(); + MockSerializer.Setup(serializer => serializer.Deserialize(ioStream, typeof(string))).Returns("test").Verifiable(); + MockChunk.Setup(chunk => chunk.LoadSaveData("test")).Verifiable(); + + // Act + SaveFile.Load(); + + // Assert + MockIO.Verify(); + MockSerializer.Verify(); + MockChunk.Verify(); + } + + [Fact] + public void Load_DataIsNull_DoesNotSetChunkData() + { + // Arrange + MockSerializer.Setup(serializer => serializer.Deserialize(It.IsAny(), It.IsAny())).Returns((string?)null).Verifiable(); + MockChunk.Setup(chunk => chunk.LoadSaveData(It.IsAny())).Verifiable(Times.Never); + + // Act + SaveFile.Load(); + + // Assert + MockSerializer.Verify(); + MockChunk.Verify(); + } + + [Fact] + public void Exists_ReturnsIOExists() + { + // Arrange + MockIO.Setup(io => io.Exists()).Returns(true).Verifiable(); + + // Act + var result = SaveFile.Exists(); + + // Assert + MockIO.Verify(); + Assert.True(result); + } + + [Fact] + public void Delete_CallsIODelete() + { + // Arrange + MockIO.Setup(io => io.Delete()).Verifiable(); + + // Act + SaveFile.Delete(); + + // Assert + MockIO.Verify(); + } + + [Fact] + public void SaveAsync_CompletedSynchronously() + { + var task = SaveFile.SaveAsync(cancellationToken: TestContext.Current.CancellationToken); + Assert.True(task.IsCompletedSuccessfully); + } + + [Fact] + public void LoadAsync_CompletedSynchronously() + { + var task = SaveFile.LoadAsync(TestContext.Current.CancellationToken); + Assert.True(task.IsCompletedSuccessfully); + } + + [Fact] + public void ExistsAsync_CompletedSynchronously() + { + var task = SaveFile.ExistsAsync(TestContext.Current.CancellationToken); + Assert.True(task.IsCompletedSuccessfully); + } + + [Fact] + public void DeleteAsync_CompletedSynchronously() + { + var task = SaveFile.DeleteAsync(TestContext.Current.CancellationToken); + Assert.True(task.IsCompletedSuccessfully); } } From f9a62712a21285a179771692a8ecea539d7c241f Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Thu, 1 Jan 2026 11:52:55 +0100 Subject: [PATCH 03/19] Change interface naming IIOStreamProvider -> IStreamIO IAsyncIOStreamProvider -> IAsyncStreamIO ICompressionStreamProvider -> IStreamCompressor These namechanges make it easier to form a connection between IStreamIO, IStreamCompresor, and IStreamSerializer as part of a bigger whole. --- .../test/src/SaveFileAsyncTest.cs | 14 +-- .../test/src/SaveFileTest.cs | 14 +-- .../src/Compression/BrotliCompression.cs | 17 --- .../src/Compression/BrotliStreamCompressor.cs | 17 +++ ...pression.cs => DeflateStreamCompressor.cs} | 6 +- ...Compression.cs => GZipStreamCompressor.cs} | 6 +- ...StreamProvider.cs => IStreamCompressor.cs} | 6 +- .../src/IO/{FileIO.cs => FileStreamIO.cs} | 10 +- .../src/IO/{HttpIO.cs => HttpStreamIO.cs} | 36 +++--- .../IO/{IIOStreamProvider.cs => IStreamIO.cs} | 4 +- Chickensoft.SaveFileBuilder/src/SaveFile.cs | 116 +++++++++++++----- 11 files changed, 147 insertions(+), 99 deletions(-) delete mode 100644 Chickensoft.SaveFileBuilder/src/Compression/BrotliCompression.cs create mode 100644 Chickensoft.SaveFileBuilder/src/Compression/BrotliStreamCompressor.cs rename Chickensoft.SaveFileBuilder/src/Compression/{DeflateCompression.cs => DeflateStreamCompressor.cs} (51%) rename Chickensoft.SaveFileBuilder/src/Compression/{GZipCompression.cs => GZipStreamCompressor.cs} (52%) rename Chickensoft.SaveFileBuilder/src/Compression/{ICompressionStreamProvider.cs => IStreamCompressor.cs} (83%) rename Chickensoft.SaveFileBuilder/src/IO/{FileIO.cs => FileStreamIO.cs} (87%) rename Chickensoft.SaveFileBuilder/src/IO/{HttpIO.cs => HttpStreamIO.cs} (85%) rename Chickensoft.SaveFileBuilder/src/IO/{IIOStreamProvider.cs => IStreamIO.cs} (97%) diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs index 5e7e535..faf1b0b 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs @@ -9,9 +9,9 @@ public class SaveFileAsyncTest { private CancellationToken CancellationToken { get; } - public Mock MockAsyncIO { get; set; } + public Mock MockAsyncIO { get; set; } public Mock MockAsyncSerializer { get; set; } - public Mock MockCompresser { get; set; } + public Mock MockCompresser { get; set; } public Mock> MockChunk { get; set; } @@ -21,9 +21,9 @@ public SaveFileAsyncTest(ITestContextAccessor testContextAccessor) { CancellationToken = testContextAccessor.Current.CancellationToken; - MockAsyncIO = new Mock(); + MockAsyncIO = new Mock(); MockAsyncSerializer = new Mock(); - MockCompresser = new Mock(); + MockCompresser = new Mock(); MockChunk = new Mock>(); @@ -53,7 +53,7 @@ public async Task SaveAsync_WritesCompressesAndSerializes() var compressionStream = new MemoryStream(); MockChunk.Setup(chunk => chunk.GetSaveData()).Returns("test").Verifiable(); - MockCompresser.Setup(compresser => compresser.CompressionStream(It.IsAny(), It.IsAny(), true)).Callback((stream, _, _) => ioStream = (MemoryStream)stream).Returns(compressionStream).Verifiable(); + MockCompresser.Setup(compresser => compresser.Compress(It.IsAny(), It.IsAny(), true)).Callback((stream, _, _) => ioStream = (MemoryStream)stream).Returns(compressionStream).Verifiable(); MockAsyncSerializer.Setup(serializer => serializer.SerializeAsync(compressionStream, "test", typeof(string), CancellationToken)).Verifiable(); MockAsyncIO.Setup(io => io.WriteAsync(It.Is(stream => ioStream == stream), CancellationToken)).Verifiable(); @@ -91,7 +91,7 @@ public async Task SaveAsync_CompressorIsNull_WritesAndSerializesWithoutCompressi public async Task SaveAsync_CompressionLevel_UsedByCompressor() { // Arrange - MockCompresser.Setup(compressor => compressor.CompressionStream(It.IsAny(), CompressionLevel.Fastest, true)).Verifiable(); + MockCompresser.Setup(compressor => compressor.Compress(It.IsAny(), CompressionLevel.Fastest, true)).Verifiable(); // Act await SaveFile.SaveAsync(CompressionLevel.Fastest, CancellationToken); @@ -108,7 +108,7 @@ public async Task LoadAsync_ReadsDecompressesAndDeserializes() var decompressionStream = new MemoryStream(); MockAsyncIO.Setup(io => io.ReadAsync(CancellationToken)).ReturnsAsync(ioStream).Verifiable(); - MockCompresser.Setup(compresser => compresser.DecompressionStream(ioStream)).Returns(decompressionStream).Verifiable(); + MockCompresser.Setup(compresser => compresser.Decompress(ioStream)).Returns(decompressionStream).Verifiable(); MockAsyncSerializer.Setup(serializer => serializer.DeserializeAsync(decompressionStream, typeof(string), CancellationToken)).ReturnsAsync("test").Verifiable(); MockChunk.Setup(chunk => chunk.LoadSaveData("test")).Verifiable(); diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs index 61c3f18..c9b7ea2 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs @@ -7,9 +7,9 @@ namespace Chickensoft.SaveFileBuilder.Tests; public class SaveFileTest { - public Mock MockIO { get; set; } + public Mock MockIO { get; set; } public Mock MockSerializer { get; set; } - public Mock MockCompresser { get; set; } + public Mock MockCompresser { get; set; } public Mock> MockChunk { get; set; } @@ -17,9 +17,9 @@ public class SaveFileTest public SaveFileTest() { - MockIO = new Mock(); + MockIO = new Mock(); MockSerializer = new Mock(); - MockCompresser = new Mock(); + MockCompresser = new Mock(); MockChunk = new Mock>(); @@ -38,7 +38,7 @@ public void Save_WritesCompressesAndSerializes() MockChunk.Setup(chunk => chunk.GetSaveData()).Returns("test").Verifiable(); MockIO.Setup(io => io.Write()).Returns(io).Verifiable(); - MockCompresser.Setup(compresser => compresser.CompressionStream(io)).Returns(compressionStream).Verifiable(); + MockCompresser.Setup(compresser => compresser.Compress(io)).Returns(compressionStream).Verifiable(); MockSerializer.Setup(serializer => serializer.Serialize(compressionStream, "test", typeof(string))).Verifiable(); // Act @@ -75,7 +75,7 @@ public void Save_CompressorIsNull_WritesAndSerializesWithoutCompressing() public void Save_CompressionLevel_UsedByCompressor() { // Arrange - MockCompresser.Setup(compressor => compressor.CompressionStream(It.IsAny(), CompressionLevel.Fastest)).Verifiable(); + MockCompresser.Setup(compressor => compressor.Compress(It.IsAny(), CompressionLevel.Fastest)).Verifiable(); // Act SaveFile.Save(CompressionLevel.Fastest); @@ -92,7 +92,7 @@ public void Load_ReadsDecompressesAndDeserializes() var compressionStream = new MemoryStream(); MockIO.Setup(io => io.Read()).Returns(ioStream).Verifiable(); - MockCompresser.Setup(compresser => compresser.DecompressionStream(ioStream)).Returns(compressionStream).Verifiable(); + MockCompresser.Setup(compresser => compresser.Decompress(ioStream)).Returns(compressionStream).Verifiable(); MockSerializer.Setup(serializer => serializer.Deserialize(compressionStream, typeof(string))).Returns("test").Verifiable(); MockChunk.Setup(chunk => chunk.LoadSaveData("test")).Verifiable(); diff --git a/Chickensoft.SaveFileBuilder/src/Compression/BrotliCompression.cs b/Chickensoft.SaveFileBuilder/src/Compression/BrotliCompression.cs deleted file mode 100644 index aa401d9..0000000 --- a/Chickensoft.SaveFileBuilder/src/Compression/BrotliCompression.cs +++ /dev/null @@ -1,17 +0,0 @@ -namespace Chickensoft.SaveFileBuilder.Compression; - -using System; -using System.IO; -using System.IO.Compression; - -/// Provides a Brotli compression and decompression stream. -public readonly struct BrotliCompression : ICompressionStreamProvider -{ - /// - /// - public Stream CompressionStream(Stream stream, CompressionLevel compressionLevel = default, bool leaveOpen = default) => new BrotliStream(stream, compressionLevel, leaveOpen); - - /// - public Stream DecompressionStream(Stream stream, bool leaveOpen = default) => new BrotliStream(stream, CompressionMode.Decompress, leaveOpen); -} - diff --git a/Chickensoft.SaveFileBuilder/src/Compression/BrotliStreamCompressor.cs b/Chickensoft.SaveFileBuilder/src/Compression/BrotliStreamCompressor.cs new file mode 100644 index 0000000..ab01783 --- /dev/null +++ b/Chickensoft.SaveFileBuilder/src/Compression/BrotliStreamCompressor.cs @@ -0,0 +1,17 @@ +namespace Chickensoft.SaveFileBuilder.Compression; + +using System; +using System.IO; +using System.IO.Compression; + +/// Provides a Brotli compression and decompression stream. +public readonly struct BrotliStreamCompressor : IStreamCompressor +{ + /// + /// + public Stream Compress(Stream stream, CompressionLevel compressionLevel = default, bool leaveOpen = default) => new BrotliStream(stream, compressionLevel, leaveOpen); + + /// + public Stream Decompress(Stream stream, bool leaveOpen = default) => new BrotliStream(stream, CompressionMode.Decompress, leaveOpen); +} + diff --git a/Chickensoft.SaveFileBuilder/src/Compression/DeflateCompression.cs b/Chickensoft.SaveFileBuilder/src/Compression/DeflateStreamCompressor.cs similarity index 51% rename from Chickensoft.SaveFileBuilder/src/Compression/DeflateCompression.cs rename to Chickensoft.SaveFileBuilder/src/Compression/DeflateStreamCompressor.cs index 7e3bcd6..56d8ee2 100644 --- a/Chickensoft.SaveFileBuilder/src/Compression/DeflateCompression.cs +++ b/Chickensoft.SaveFileBuilder/src/Compression/DeflateStreamCompressor.cs @@ -5,15 +5,15 @@ namespace Chickensoft.SaveFileBuilder.Compression; using System.IO.Compression; /// Provides a Deflate compression and decompression stream. -public readonly struct DeflateCompression : ICompressionStreamProvider +public readonly struct DeflateStreamCompressor : IStreamCompressor { /// /// /// - public Stream CompressionStream(Stream stream, CompressionLevel compressionLevel = default, bool leaveOpen = default) => new DeflateStream(stream, compressionLevel, leaveOpen); + public Stream Compress(Stream stream, CompressionLevel compressionLevel = default, bool leaveOpen = default) => new DeflateStream(stream, compressionLevel, leaveOpen); /// /// /// - public Stream DecompressionStream(Stream stream, bool leaveOpen = default) => new DeflateStream(stream, CompressionMode.Decompress, leaveOpen); + public Stream Decompress(Stream stream, bool leaveOpen = default) => new DeflateStream(stream, CompressionMode.Decompress, leaveOpen); } diff --git a/Chickensoft.SaveFileBuilder/src/Compression/GZipCompression.cs b/Chickensoft.SaveFileBuilder/src/Compression/GZipStreamCompressor.cs similarity index 52% rename from Chickensoft.SaveFileBuilder/src/Compression/GZipCompression.cs rename to Chickensoft.SaveFileBuilder/src/Compression/GZipStreamCompressor.cs index 6f34e26..b99ece2 100644 --- a/Chickensoft.SaveFileBuilder/src/Compression/GZipCompression.cs +++ b/Chickensoft.SaveFileBuilder/src/Compression/GZipStreamCompressor.cs @@ -5,15 +5,15 @@ namespace Chickensoft.SaveFileBuilder.Compression; using System.IO.Compression; /// Provides a GZip compression and decompression stream. -public readonly struct GZipCompression : ICompressionStreamProvider +public readonly struct GZipStreamCompressor : IStreamCompressor { /// /// /// - public Stream CompressionStream(Stream stream, CompressionLevel compressionLevel = default, bool leaveOpen = default) => new GZipStream(stream, compressionLevel, leaveOpen); + public Stream Compress(Stream stream, CompressionLevel compressionLevel = default, bool leaveOpen = default) => new GZipStream(stream, compressionLevel, leaveOpen); /// /// /// - public Stream DecompressionStream(Stream stream, bool leaveOpen = default) => new GZipStream(stream, CompressionMode.Decompress, leaveOpen); + public Stream Decompress(Stream stream, bool leaveOpen = default) => new GZipStream(stream, CompressionMode.Decompress, leaveOpen); } diff --git a/Chickensoft.SaveFileBuilder/src/Compression/ICompressionStreamProvider.cs b/Chickensoft.SaveFileBuilder/src/Compression/IStreamCompressor.cs similarity index 83% rename from Chickensoft.SaveFileBuilder/src/Compression/ICompressionStreamProvider.cs rename to Chickensoft.SaveFileBuilder/src/Compression/IStreamCompressor.cs index e31eb42..ca3c4e3 100644 --- a/Chickensoft.SaveFileBuilder/src/Compression/ICompressionStreamProvider.cs +++ b/Chickensoft.SaveFileBuilder/src/Compression/IStreamCompressor.cs @@ -4,18 +4,18 @@ namespace Chickensoft.SaveFileBuilder.Compression; using System.IO.Compression; /// Provides a compression- and decompression based on the base that can be written to or -read from. -public interface ICompressionStreamProvider +public interface IStreamCompressor { /// Provide a compression stream using the compression level, and optionally leaves the base stream open. /// The base stream. /// Compression level whether to emphasize speed or efficiency. /// to leave open after disposing the compression stream; otherwise . /// The compression stream. - Stream CompressionStream(Stream stream, CompressionLevel compressionLevel = default, bool leaveOpen = default); + Stream Compress(Stream stream, CompressionLevel compressionLevel = default, bool leaveOpen = default); /// Provide a decompression stream, and optionally leaves the base stream open. /// The base stream. /// to leave open after disposing the decompression stream; otherwise . /// The decompressed stream. - Stream DecompressionStream(Stream stream, bool leaveOpen = default); + Stream Decompress(Stream stream, bool leaveOpen = default); } diff --git a/Chickensoft.SaveFileBuilder/src/IO/FileIO.cs b/Chickensoft.SaveFileBuilder/src/IO/FileStreamIO.cs similarity index 87% rename from Chickensoft.SaveFileBuilder/src/IO/FileIO.cs rename to Chickensoft.SaveFileBuilder/src/IO/FileStreamIO.cs index 98d993b..f06f85f 100644 --- a/Chickensoft.SaveFileBuilder/src/IO/FileIO.cs +++ b/Chickensoft.SaveFileBuilder/src/IO/FileStreamIO.cs @@ -3,21 +3,21 @@ namespace Chickensoft.SaveFileBuilder.IO; using System.IO; /// Provides a read- and write from a file. -public class FileIO : IIOStreamProvider +public class FileStreamIO : IStreamIO { /// The of the file. public FileInfo FileInfo { get; } - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// The of the file. - public FileIO(FileInfo fileInfo) + public FileStreamIO(FileInfo fileInfo) { FileInfo = fileInfo; } - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// The filename of the file. - public FileIO(string fileName) + public FileStreamIO(string fileName) { FileInfo = new FileInfo(fileName); } diff --git a/Chickensoft.SaveFileBuilder/src/IO/HttpIO.cs b/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs similarity index 85% rename from Chickensoft.SaveFileBuilder/src/IO/HttpIO.cs rename to Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs index f5a8e48..6223658 100644 --- a/Chickensoft.SaveFileBuilder/src/IO/HttpIO.cs +++ b/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs @@ -8,7 +8,7 @@ namespace Chickensoft.SaveFileBuilder.IO; using System.Threading; using System.Threading.Tasks; -/// Defines the relative 's used for specific HTTP requests by the . +/// Defines the relative 's used for specific HTTP requests by the . /// The relative used for read requests. /// The relative used for write requests. /// The relative used for exists requests. @@ -40,7 +40,7 @@ public HttpIORequestUris( } /// Provides a read from- and requests a write for an Http address. -public class HttpIO : IAsyncIOStreamProvider, IDisposable +public class HttpStreamIO : IAsyncStreamIO, IDisposable { private bool _isDisposed; @@ -64,30 +64,30 @@ public class HttpIO : IAsyncIOStreamProvider, IDisposable /// If the is left null, it will be set to the length of the stream being written. In most cases, this is the desired behavior. public HttpContentHeaders WriteHeaders => _emptyContent.Headers; - /// Initializes a new instance of the class. - public HttpIO() + /// Initializes a new instance of the class. + public HttpStreamIO() : this(new HttpClient()) { } - /// Initializes a new instance of the class with the specified timeout. - /// - public HttpIO(TimeSpan timeout) + /// Initializes a new instance of the class with the specified timeout. + /// + public HttpStreamIO(TimeSpan timeout) : this(new HttpClient() { Timeout = timeout }) { } - /// - public HttpIO(Uri baseAddress) + /// + public HttpStreamIO(Uri baseAddress) : this(new HttpClient() { BaseAddress = baseAddress, }) { } - /// - public HttpIO(Uri baseAddress, TimeSpan timeout) + /// + public HttpStreamIO(Uri baseAddress, TimeSpan timeout) : this(new HttpClient() { BaseAddress = baseAddress, @@ -95,23 +95,23 @@ public HttpIO(Uri baseAddress, TimeSpan timeout) }) { } - /// Initializes a new instance of the class with the specified address. - /// - public HttpIO(string baseAddress) + /// Initializes a new instance of the class with the specified address. + /// + public HttpStreamIO(string baseAddress) : this(new Uri(baseAddress)) { } - /// Initializes a new instance of the class with the specified address and timeout. + /// Initializes a new instance of the class with the specified address and timeout. /// The base address used when sending requests. /// The timespan to wait before a request times out. - public HttpIO(string baseAddress, TimeSpan timeout) + public HttpStreamIO(string baseAddress, TimeSpan timeout) : this(new Uri(baseAddress), timeout) { } - /// Initializes a new instance of the class with the specified client, and specifies whether that client should be disposed when this instance is disposed. + /// Initializes a new instance of the class with the specified client, and specifies whether that client should be disposed when this instance is disposed. /// The to use for requests. /// if the inner client should be disposed of by ; if you intend to reuse the client. - public HttpIO(HttpClient client, bool disposeClient = true) + public HttpStreamIO(HttpClient client, bool disposeClient = true) { _httpClient = client; _disposeClient = disposeClient; diff --git a/Chickensoft.SaveFileBuilder/src/IO/IIOStreamProvider.cs b/Chickensoft.SaveFileBuilder/src/IO/IStreamIO.cs similarity index 97% rename from Chickensoft.SaveFileBuilder/src/IO/IIOStreamProvider.cs rename to Chickensoft.SaveFileBuilder/src/IO/IStreamIO.cs index 99f8839..c4a23de 100644 --- a/Chickensoft.SaveFileBuilder/src/IO/IIOStreamProvider.cs +++ b/Chickensoft.SaveFileBuilder/src/IO/IStreamIO.cs @@ -5,7 +5,7 @@ namespace Chickensoft.SaveFileBuilder.IO; using System.Threading.Tasks; /// Provides a read- and write from an input / output source. -public interface IIOStreamProvider +public interface IStreamIO { /// Returns a read-only from the io source. /// A new read-only object from the io source. @@ -24,7 +24,7 @@ public interface IIOStreamProvider } /// Provides a read from- and requests a write for an input / output source asynchronously. -public interface IAsyncIOStreamProvider +public interface IAsyncStreamIO { /// Asynchronously reads the underlying data and returns a read-only from the io source. /// A cancellation token that can be used to cancel the asynchronous read operation. diff --git a/Chickensoft.SaveFileBuilder/src/SaveFile.cs b/Chickensoft.SaveFileBuilder/src/SaveFile.cs index ac081a7..7e40b66 100644 --- a/Chickensoft.SaveFileBuilder/src/SaveFile.cs +++ b/Chickensoft.SaveFileBuilder/src/SaveFile.cs @@ -74,13 +74,13 @@ public class SaveFile : ISaveFile where TData : class public bool CanSaveSynchronously => _io is not null && _serializer is not null; private static InvalidOperationException SynchronousOperationNotAllowedException() - => new($"Synchronous operation is not allowed because either the {nameof(IIOStreamProvider)} or the {nameof(IStreamSerializer)} of the {nameof(SaveFile<>)} is null."); + => new($"Synchronous operation is not allowed because either the {nameof(IStreamIO)} or the {nameof(IStreamSerializer)} of the {nameof(SaveFile<>)} is null."); - private readonly IIOStreamProvider? _io; - private readonly IAsyncIOStreamProvider? _asyncIO; + private readonly IStreamIO? _io; + private readonly IAsyncStreamIO? _asyncIO; private readonly IStreamSerializer? _serializer; private readonly IAsyncStreamSerializer? _asyncSerializer; - private readonly ICompressionStreamProvider? _compressor; + private readonly IStreamCompressor? _compressor; /// /// @@ -91,11 +91,11 @@ private static InvalidOperationException SynchronousOperationNotAllowedException /// Compressor which the save file uses to compress and decompress data. private SaveFile( ISaveChunk root, - IIOStreamProvider? io, - IAsyncIOStreamProvider? asyncIO, + IStreamIO? io, + IAsyncStreamIO? asyncIO, IStreamSerializer? serializer, IAsyncStreamSerializer? asyncSerializer, - ICompressionStreamProvider? compressor + IStreamCompressor? compressor ) { Root = root; @@ -106,40 +106,40 @@ private SaveFile( _compressor = compressor; } - /// + /// public SaveFile( ISaveChunk root, - IIOStreamProvider io, + IStreamIO io, IStreamSerializer serializer, - ICompressionStreamProvider? compressor = null - ) : this(root, io, io as IAsyncIOStreamProvider, serializer, serializer as IAsyncStreamSerializer, compressor) + IStreamCompressor? compressor = null + ) : this(root, io, io as IAsyncStreamIO, serializer, serializer as IAsyncStreamSerializer, compressor) { } - /// + /// public SaveFile( ISaveChunk root, - IIOStreamProvider io, + IStreamIO io, IAsyncStreamSerializer asyncSerializer, - ICompressionStreamProvider? compressor = null - ) : this(root, io, io as IAsyncIOStreamProvider, asyncSerializer as IStreamSerializer, asyncSerializer, compressor) + IStreamCompressor? compressor = null + ) : this(root, io, io as IAsyncStreamIO, asyncSerializer as IStreamSerializer, asyncSerializer, compressor) { } - /// + /// public SaveFile( ISaveChunk root, - IAsyncIOStreamProvider asyncIO, + IAsyncStreamIO asyncIO, IStreamSerializer serializer, - ICompressionStreamProvider? compressor = null - ) : this(root, asyncIO as IIOStreamProvider, asyncIO, serializer, serializer as IAsyncStreamSerializer, compressor) + IStreamCompressor? compressor = null + ) : this(root, asyncIO as IStreamIO, asyncIO, serializer, serializer as IAsyncStreamSerializer, compressor) { } - /// + /// public SaveFile( ISaveChunk root, - IAsyncIOStreamProvider asyncIO, + IAsyncStreamIO asyncIO, IAsyncStreamSerializer asyncSerializer, - ICompressionStreamProvider? compressor = null - ) : this(root, asyncIO as IIOStreamProvider, asyncIO, asyncSerializer as IStreamSerializer, asyncSerializer, compressor) + IStreamCompressor? compressor = null + ) : this(root, asyncIO as IStreamIO, asyncIO, asyncSerializer as IStreamSerializer, asyncSerializer, compressor) { } /// @@ -151,7 +151,7 @@ public void Save(CompressionLevel compressionLevel = default) } using var ioStream = _io!.Write(); - using var compressionStream = _compressor?.CompressionStream(ioStream, compressionLevel); + using var compressionStream = _compressor?.Compress(ioStream, compressionLevel); _serializer!.Serialize(compressionStream ?? ioStream, Root.GetSaveData()); } @@ -164,7 +164,7 @@ public void Load() } using var ioStream = _io!.Read(); - using var decompressionStream = _compressor?.DecompressionStream(ioStream); + using var decompressionStream = _compressor?.Decompress(ioStream); var data = _serializer!.Deserialize(decompressionStream ?? ioStream); if (data is null) { @@ -194,13 +194,13 @@ public async ValueTask SaveAsync(CompressionLevel compressionLevel = default, Ca if (_asyncIO is null) { await using var ioStream = _io!.Write(); - await using var compressionStream = _compressor?.CompressionStream(ioStream, compressionLevel); + await using var compressionStream = _compressor?.Compress(ioStream, compressionLevel); await serialize(compressionStream ?? ioStream); } else { await using var writeStream = new MemoryStream(); - await using (var compressionStream = _compressor?.CompressionStream(writeStream, compressionLevel, true)) + await using (var compressionStream = _compressor?.Compress(writeStream, compressionLevel, true)) { await serialize(compressionStream ?? writeStream); } @@ -229,7 +229,7 @@ public async ValueTask LoadAsync(CancellationToken cancellationToken = default) ? await _asyncIO.ReadAsync(cancellationToken) : _io!.Read(); - await using var decompressionStream = _compressor?.DecompressionStream(ioStream); + await using var decompressionStream = _compressor?.Decompress(ioStream); var data = _asyncSerializer is not null ? await _asyncSerializer.DeserializeAsync(decompressionStream ?? ioStream, cancellationToken) @@ -273,24 +273,72 @@ public static class SaveFile /// Creates a new that uses JSON serialization and GZip compression. public static SaveFile CreateGZipJsonFile(ISaveChunk root, string filePath, JsonSerializerOptions? options = null) where TData : class => new( root: root, - io: new FileIO(filePath), + io: new FileStreamIO(filePath), serializer: new JsonStreamSerializer(options), - compressor: new GZipCompression() + compressor: new GZipStreamCompressor() ); /// public static SaveFile CreateGZipJsonFile(ISaveChunk root, string filePath, JsonSerializerContext context) where TData : class => new( root: root, - io: new FileIO(filePath), + io: new FileStreamIO(filePath), serializer: new JsonStreamSerializer(context), - compressor: new GZipCompression() + compressor: new GZipStreamCompressor() ); /// public static SaveFile CreateGZipJsonFile(ISaveChunk root, string filePath, JsonTypeInfo jsonTypeInfo) where TData : class => new( root: root, - io: new FileIO(filePath), + io: new FileStreamIO(filePath), serializer: new JsonStreamSerializer(jsonTypeInfo), - compressor: new GZipCompression() + compressor: new GZipStreamCompressor() + ); + + /// Creates a new that uses the specified io, JSON serialization and GZip compression. + public static SaveFile CreateGZipJsonFile(ISaveChunk root, IStreamIO io, JsonSerializerOptions? options = null) where TData : class => new( + root: root, + io: io, + serializer: new JsonStreamSerializer(options), + compressor: new GZipStreamCompressor() + ); + + /// + public static SaveFile CreateGZipJsonFile(ISaveChunk root, IStreamIO io, JsonSerializerContext context) where TData : class => new( + root: root, + io: io, + serializer: new JsonStreamSerializer(context), + compressor: new GZipStreamCompressor() + ); + + /// + public static SaveFile CreateGZipJsonFile(ISaveChunk root, IStreamIO io, JsonTypeInfo jsonTypeInfo) where TData : class => new( + root: root, + io: io, + serializer: new JsonStreamSerializer(jsonTypeInfo), + compressor: new GZipStreamCompressor() + ); + + /// Creates a new that uses the specified io, JSON serialization and GZip compression. + public static SaveFile CreateGZipJsonFile(ISaveChunk root, IAsyncStreamIO asyncIO, JsonSerializerOptions? options = null) where TData : class => new( + root: root, + asyncIO: asyncIO, + serializer: new JsonStreamSerializer(options), + compressor: new GZipStreamCompressor() + ); + + /// + public static SaveFile CreateGZipJsonFile(ISaveChunk root, IAsyncStreamIO asyncIO, JsonSerializerContext context) where TData : class => new( + root: root, + asyncIO: asyncIO, + serializer: new JsonStreamSerializer(context), + compressor: new GZipStreamCompressor() + ); + + /// + public static SaveFile CreateGZipJsonFile(ISaveChunk root, IAsyncStreamIO asyncIO, JsonTypeInfo jsonTypeInfo) where TData : class => new( + root: root, + asyncIO: asyncIO, + serializer: new JsonStreamSerializer(jsonTypeInfo), + compressor: new GZipStreamCompressor() ); } From b0e3dfa338900ac35485b53de8d65f83fed102d2 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Thu, 1 Jan 2026 11:53:37 +0100 Subject: [PATCH 04/19] Update System.Text.Json package --- Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj b/Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj index 7d9963c..a1e8a1c 100644 --- a/Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj +++ b/Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj @@ -48,6 +48,6 @@ runtime; build; native; contentfiles; analyzers; buildtransitive all - + From fcd410565994c9d099026767d9e1793b8b928c98 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Thu, 1 Jan 2026 12:01:58 +0100 Subject: [PATCH 05/19] Rewrite the documentation with a bigger focus on standalone applications This makes the documentation more generally applicable: SaveFile could be easier implemented into standard C# applications without a need or understanding of the rest of the Chickensoft Ecosystem. --- Chickensoft.SaveFileBuilder/godot-icon.png | 3 + README.md | 289 ++++++++++++++++----- 2 files changed, 228 insertions(+), 64 deletions(-) create mode 100644 Chickensoft.SaveFileBuilder/godot-icon.png diff --git a/Chickensoft.SaveFileBuilder/godot-icon.png b/Chickensoft.SaveFileBuilder/godot-icon.png new file mode 100644 index 0000000..27b9c64 --- /dev/null +++ b/Chickensoft.SaveFileBuilder/godot-icon.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:758bf83313bf2e634ee1b5de06c12e7cab609e944f14c6de200c6e7186dd9b46 +size 1242 diff --git a/README.md b/README.md index ed056c7..88a44d2 100644 --- a/README.md +++ b/README.md @@ -2,15 +2,13 @@ [![Chickensoft Badge][chickensoft-badge]][chickensoft-website] [![Discord][discord-badge]][discord] [![Read the docs][read-the-docs-badge]][docs] ![line coverage][line-coverage] ![branch coverage][branch-coverage] -Compose chunks of save data into a single data type by creating loosely coupled save chunks at various points in the scene tree. - ---- +Compose chunks of save data into a single data type by creating loosely coupled save chunks at various points in your application.

Chickensoft.SaveFileBuilder

-## 🥚 Getting Started +## 🥚 Installation Find the latest version of [`Chickensoft.SaveFileBuilder`][nuget] on nuget. @@ -18,9 +16,217 @@ Find the latest version of [`Chickensoft.SaveFileBuilder`][nuget] on nuget. dotnet add package Chickensoft.SaveFileBuilder ``` -## 📄 SaveFile and Root SaveChunk +## :hatching_chick: Quick Start + +```csharp +// Define your (serializable!) save data +public class UserData +{ + public string Name { get; set; } + public DateTime Birthday { get; set; } +} + +// Define your class responsible for saving and loading. +public class User +{ + public string Name { get; set; } + public string Birthday { get; set; } + + public SaveFile SaveFile { get; } + public ISaveChunk SaveChunk { get; } + + public User() + { + // Define your saving and loading behavior at the start, and never again! + SaveChunk = new SaveChunk( + onSave: (chunk) => new UserData() + { + Name = Name, + Birthday = Birthday + }, + onLoad: (chunk, data) => + { + Name = data.Name; + Birthday = data.Birthday; + } + ); + + // Let SaveFile take care of the rest. + SaveFile = SaveFile.CreateGZipJsonFile(SaveChunk, "savefile.json.gz"); + } + + public Task OnSave() => SaveFile.SaveAsync(); + public Task OnLoad() => SaveFile.LoadAsync(); +} +``` + +> [!TIP] +> You can define easily serializable types with [Chickensoft.Serialization]. + +## 🍪 Save Chunks & Modularity + +SaveChunks are smaller pieces of save data that are composed together into the overall save file. + +```csharp +// User data contains preferences data separately. +public class UserData +{ + public string Name { get; set; } + public DateTime Birthday { get; set; } + public PreferencesData Preferences { get; set; } +} + +// This allows us to keep our save data and -logic modular. +public class PreferencesData +{ + public bool IsDarkMode { get; set; } + public string Language { get; set; } +} +``` + +This modularity allows us to separate concerns when saving and loading data. The `User` class is only concerned with user data, while the `UserPreferences` class is only concerned with preferences data. + +We can link our save chunks together using: +- `GetChunkSaveData` to retrieve child chunk data during save. +- `LoadChunkSaveData` to load child chunk data during load. +- `AddChunk` to compose our save data. + +```csharp +// Handle user logic. +public class User +{ + public string Name { get; set; } + public DateTime Birthday { get; set; } + + public ISaveChunk SaveChunk { get; } + + public User() + { + // Define our user chunk with a nested preferences chunk. + SaveChunk = new SaveChunk( + onSave: (chunk) => new UserData() + { + Name = Name, + Birthday = Birthday, + Preferences = chunk.GetChunkSaveData() + }, + onLoad: (chunk, data) => + { + Name = data.Name; + Birthday = data.Birthday; + chunk.LoadChunkSaveData(data.Preferences); + } + ); + } +} + +// Handle preferences logic. +public class UserPreferences +{ + public bool IsDarkMode { get; set; } + public string Language { get; set; } + + public ISaveChunk SaveChunk { get; } + + public UserPreferences(User user) + { + // Define our preferences chunk. + SaveChunk = new SaveChunk( + onSave: (chunk) => new PreferencesData() + { + IsDarkMode = IsDarkMode, + Language = Language + }, + onLoad: (chunk, data) => + { + IsDarkMode = data.IsDarkMode; + Language = data.Language; + } + ); + + // Add our preferences chunk as a child of the user chunk. + user.SaveChunk.AddChunk(SaveChunk); + } +} +``` + +## :floppy_disk: SaveFile & Flexibility + +> [!TIP] +> If you just want to save some data to a file, call the following: `SaveFile.CreateGZipJsonFile(Root, "savefile.json.gz");` + +Saving a file involves 2 to 3 steps: +- input / output (io) +- (preferably) compression +- serialization + +SaveFile handles these steps for you, and optimally at that! By using [Streams] under the hood, SaveFile can efficiently save and load data without unnecessary memory allocations. + +But the :zap: REAL POWER :zap: of SaveFile comes from its flexibility. You can define your own IO providers, compression algorithms, and serialization formats by implementing the relevant interfaces: +- IStreamIO / IAsyncStreamIO for io +- IStreamCompressor for compression +- IStreamSerializer / IAsyncStreamSerializer for serialization + +```csharp +public class AzureStreamIO : IAsyncIOStreamProvider +{ + public Stream ReadAsync() => //... + public void WriteAsync(Stream stream) => //... + public bool ExistsAsync() => //... + public bool DeleteAsync() => //... +} + +public class SnappyStreamCompressor : IStreamCompressor +{ + public Stream Compress(Stream stream, CompressionLevel compressionLevel, bool leaveOpen) => //... + public Stream Decompress(Stream stream, bool leaveOpen) => //... +} + +public class YamlStreamSerializer : IStreamSerializer +{ + public void Serialize(Stream stream, object? value, Type inputType) => //... + public object? Deserialize(Stream stream, Type returnType) => //... +} +``` + +You can then provide them to your SaveFile and mix- and match them with existing types. + +```csharp +public class App +{ + SaveFile AzureSaveFile { get; set; } + SaveFile LocalSaveFile { get; set; } + + public void Save() + { + // Define a SaveChunk AzureChunk + // Define a SaveChunk LocalChunk + + AzureSaveFile = new + ( + root: AzureChunk, + asyncIO: new AzureStreamIO(), + serializer: new JsonStreamSerializer(), + compressor: new SnappyStreamCompressor() + ); + + LocalSaveFile = new + ( + root: LocalChunk, + io: new FileStreamIO(), + serializer: new YamlStreamSerializer(), + compressor: new BrotliStreamCompressor() + ); + } +} +``` + +> [!NOTE] +> If you write your own implementations of these interfaces, consider contributing them back to the Chickensoft community by opening a PR! + +## Usage in Godot -Find the highest node in your scene tree that needs to be concerned with save data to use as the root of your save file. Use [AutoInject] to provide the root save chunk to all its descendant nodes. +Using [Introspection] and [AutoInject], you can link chunks together in Godot by providing- and accessing dependencies in your scene tree. Mark the relevant nodes as `IAutoNode`'s, provide dependencies from parent nodes, and access them in child nodes. > [!TIP] > Check out the Chickensoft [Game Demo] for a complete, working example of using SaveFileBuilder to save composed states of everything that needs to be persisted in a game. @@ -31,6 +237,7 @@ using Chickensoft.AutoInject; using Chickensoft.SaveFileBuilder; using Godot; +// Game is the root node in the scene. It provides the dependency to descendant nodes. [Meta(typeof(IAutoNode))] public partial class Game : Node3D { @@ -41,58 +248,12 @@ public partial class Game : Node3D public void Setup() { - SaveFile = new SaveFile( - root: new SaveChunk( - onSave: (chunk) => { - // Use root chunk to get child chunks that were added to us - // lower in the scene tree. - var gameData = new GameData() - { - MapData = chunk.GetChunkSaveData(), - PlayerData = chunk.GetChunkSaveData(), - PlayerCameraData = chunk.GetChunkSaveData() - }; - - return gameData; - }, - onLoad: (chunk, data) => - { - // Break up the game data and send it to the child chunks so that - // they can load the data into the nodes they belong to. - chunk.LoadChunkSaveData(data.MapData); - chunk.LoadChunkSaveData(data.PlayerData); - chunk.LoadChunkSaveData(data.PlayerCameraData); - } - ), - onSave: async (GameData data) => - { - // Save the game data to disk. - var json = JsonSerializer.Serialize(data, JsonOptions); - await FileSystem.File.WriteAllTextAsync(SaveFilePath, json); - }, - onLoad: async () => - { - // Load the game data from disk. - if (!FileSystem.File.Exists(SaveFilePath)) { - GD.Print("No save file to load :'("); - return null; - } - - var json = await FileSystem.File.ReadAllTextAsync(SaveFilePath); - return JsonSerializer.Deserialize(json, JsonOptions); - } - ); - - ... + var root = new SaveChunk(onSave: ..., onLoad: ...); + SaveFile = SaveFile.CreateGZipJsonFile(root, SaveFilePath, JsonOptions); } } -``` - -## 🍪 Defining Save Chunks -SaveChunks are smaller pieces of save data that are composed together into the overall save file's data. Simply add a chunk to a descendant node of the scene with the root SaveChunk and register it with the root save chunk once you've resolved dependencies with AutoInject. - -```csharp +// Player is a child node of the Game node. It accesses the dependency provided by the Game class. [Meta(typeof(IAutoNode))] public partial class Player : CharacterBody3D { @@ -100,15 +261,18 @@ public partial class Player : CharacterBody3D public ISaveChunk GameChunk => this.DependOn>(); public ISaveChunk PlayerChunk { get; set; } = default!; + // Player uses a StateMachine, or LogicBlock, to handle its state. + public IPlayerLogic PlayerLogic { get; set; } = default!; + public void Setup() { - ... + PlayerLogic = new PlayerLogic(); PlayerChunk = new SaveChunk( onSave: (chunk) => new PlayerData() { GlobalTransform = GlobalTransform, - StateMachine = (PlayerLogic)PlayerLogic, + StateMachine = PlayerLogic, Velocity = Velocity }, onLoad: (chunk, data) => @@ -119,8 +283,6 @@ public partial class Player : CharacterBody3D PlayerLogic.Start(); } ); - - ... } public void OnResolved() @@ -128,16 +290,12 @@ public partial class Player : CharacterBody3D // Add a child to our parent save chunk (the game chunk) so that it can // look up the player chunk when loading and saving the game. GameChunk.AddChunk(PlayerChunk); - - ... } } ``` -Once a save chunk has been added to a parent save chunk, the parent save chunk can access it from the callbacks specified by `onSave` and `onLoad`, querying its data or forcing it load data into its node. - > [!TIP] -> You can define easily serializable types, as well as serialize entire [LogicBlocks] with [Chickensoft.Serialization]. +> You can easily serialize entire [LogicBlocks] with [Chickensoft.Serialization]. --- @@ -152,8 +310,11 @@ Once a save chunk has been added to a parent save chunk, the parent save chunk c [line-coverage]: Chickensoft.SaveFileBuilder.Tests/badges/line_coverage.svg [branch-coverage]: Chickensoft.SaveFileBuilder.Tests/badges/branch_coverage.svg +[Introspection]: https://github.com/chickensoft-games/Introspection [AutoInject]: https://github.com/chickensoft-games/AutoInject [Game Demo]: https://github.com/chickensoft-games/GameDemo [LogicBlocks]: https://github.com/chickensoft-games/LogicBlocks [Chickensoft.Serialization]: https://github.com/chickensoft-games/Serialization [nuget]: https://www.nuget.org/packages/Chickensoft.SaveFileBuilder + +[Streams]: https://learn.microsoft.com/en-us/dotnet/api/system.io.stream] From 7be1508cbb82a911ea282752e12cbc78394fb6d8 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Thu, 1 Jan 2026 12:05:52 +0100 Subject: [PATCH 06/19] Change the position of a Tip in the documentation --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 88a44d2..47edf0f 100644 --- a/README.md +++ b/README.md @@ -228,9 +228,6 @@ public class App Using [Introspection] and [AutoInject], you can link chunks together in Godot by providing- and accessing dependencies in your scene tree. Mark the relevant nodes as `IAutoNode`'s, provide dependencies from parent nodes, and access them in child nodes. -> [!TIP] -> Check out the Chickensoft [Game Demo] for a complete, working example of using SaveFileBuilder to save composed states of everything that needs to be persisted in a game. - ```csharp using Chickensoft.Introspection; using Chickensoft.AutoInject; @@ -297,6 +294,9 @@ public partial class Player : CharacterBody3D > [!TIP] > You can easily serialize entire [LogicBlocks] with [Chickensoft.Serialization]. +> [!TIP] +> Check out the Chickensoft [Game Demo] for a complete, working example of using SaveFileBuilder to save composed states of everything that needs to be persisted in a game. + --- 🐣 Package generated from a 🐤 Chickensoft Template — From 8066b499f7221bafc397166e5d6f3e183edad159 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Thu, 1 Jan 2026 12:08:06 +0100 Subject: [PATCH 07/19] Fix a mistake in the documentation --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 47edf0f..22a19ed 100644 --- a/README.md +++ b/README.md @@ -168,7 +168,7 @@ But the :zap: REAL POWER :zap: of SaveFile comes from its flexibility. You can d - IStreamSerializer / IAsyncStreamSerializer for serialization ```csharp -public class AzureStreamIO : IAsyncIOStreamProvider +public class AzureStreamIO : IAsyncStreamIO { public Stream ReadAsync() => //... public void WriteAsync(Stream stream) => //... From 1e6a43441939aaf4854cc4452b0519ece775dd64 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Thu, 1 Jan 2026 12:14:21 +0100 Subject: [PATCH 08/19] Change order of saving steps in documentation In reality, the order of saving goes io, compression, serialization. But from a user-perspective, the order of priority goes io, serialization, compression. In order to make the documentation come across a little more natural, the order of priority is applied in the documentation. --- README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 22a19ed..ceaf33f 100644 --- a/README.md +++ b/README.md @@ -157,15 +157,15 @@ public class UserPreferences Saving a file involves 2 to 3 steps: - input / output (io) -- (preferably) compression - serialization +- (preferably) compression SaveFile handles these steps for you, and optimally at that! By using [Streams] under the hood, SaveFile can efficiently save and load data without unnecessary memory allocations. But the :zap: REAL POWER :zap: of SaveFile comes from its flexibility. You can define your own IO providers, compression algorithms, and serialization formats by implementing the relevant interfaces: - IStreamIO / IAsyncStreamIO for io -- IStreamCompressor for compression - IStreamSerializer / IAsyncStreamSerializer for serialization +- IStreamCompressor for compression ```csharp public class AzureStreamIO : IAsyncStreamIO @@ -176,17 +176,17 @@ public class AzureStreamIO : IAsyncStreamIO public bool DeleteAsync() => //... } -public class SnappyStreamCompressor : IStreamCompressor -{ - public Stream Compress(Stream stream, CompressionLevel compressionLevel, bool leaveOpen) => //... - public Stream Decompress(Stream stream, bool leaveOpen) => //... -} - public class YamlStreamSerializer : IStreamSerializer { public void Serialize(Stream stream, object? value, Type inputType) => //... public object? Deserialize(Stream stream, Type returnType) => //... } + +public class SnappyStreamCompressor : IStreamCompressor +{ + public Stream Compress(Stream stream, CompressionLevel compressionLevel, bool leaveOpen) => //... + public Stream Decompress(Stream stream, bool leaveOpen) => //... +} ``` You can then provide them to your SaveFile and mix- and match them with existing types. From a12ec1da4791e264ce4a9b5e8edcdfbdb8da78d3 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Fri, 2 Jan 2026 13:47:20 +0100 Subject: [PATCH 09/19] Fix spelling In multiple places, "compressor" was written as "compresser". --- .../test/src/SaveFileAsyncTest.cs | 18 +++++++++--------- .../test/src/SaveFileTest.cs | 18 +++++++++--------- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs index faf1b0b..da96826 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs @@ -11,7 +11,7 @@ public class SaveFileAsyncTest public Mock MockAsyncIO { get; set; } public Mock MockAsyncSerializer { get; set; } - public Mock MockCompresser { get; set; } + public Mock MockCompressor { get; set; } public Mock> MockChunk { get; set; } @@ -23,11 +23,11 @@ public SaveFileAsyncTest(ITestContextAccessor testContextAccessor) MockAsyncIO = new Mock(); MockAsyncSerializer = new Mock(); - MockCompresser = new Mock(); + MockCompressor = new Mock(); MockChunk = new Mock>(); - SaveFile = new SaveFile(MockChunk.Object, MockAsyncIO.Object, MockAsyncSerializer.Object, MockCompresser.Object); + SaveFile = new SaveFile(MockChunk.Object, MockAsyncIO.Object, MockAsyncSerializer.Object, MockCompressor.Object); } [Fact] @@ -53,7 +53,7 @@ public async Task SaveAsync_WritesCompressesAndSerializes() var compressionStream = new MemoryStream(); MockChunk.Setup(chunk => chunk.GetSaveData()).Returns("test").Verifiable(); - MockCompresser.Setup(compresser => compresser.Compress(It.IsAny(), It.IsAny(), true)).Callback((stream, _, _) => ioStream = (MemoryStream)stream).Returns(compressionStream).Verifiable(); + MockCompressor.Setup(compressor => compressor.Compress(It.IsAny(), It.IsAny(), true)).Callback((stream, _, _) => ioStream = (MemoryStream)stream).Returns(compressionStream).Verifiable(); MockAsyncSerializer.Setup(serializer => serializer.SerializeAsync(compressionStream, "test", typeof(string), CancellationToken)).Verifiable(); MockAsyncIO.Setup(io => io.WriteAsync(It.Is(stream => ioStream == stream), CancellationToken)).Verifiable(); @@ -62,7 +62,7 @@ public async Task SaveAsync_WritesCompressesAndSerializes() // Assert MockChunk.Verify(); - MockCompresser.Verify(); + MockCompressor.Verify(); MockAsyncSerializer.Verify(); MockAsyncIO.Verify(); } @@ -91,13 +91,13 @@ public async Task SaveAsync_CompressorIsNull_WritesAndSerializesWithoutCompressi public async Task SaveAsync_CompressionLevel_UsedByCompressor() { // Arrange - MockCompresser.Setup(compressor => compressor.Compress(It.IsAny(), CompressionLevel.Fastest, true)).Verifiable(); + MockCompressor.Setup(compressor => compressor.Compress(It.IsAny(), CompressionLevel.Fastest, true)).Verifiable(); // Act await SaveFile.SaveAsync(CompressionLevel.Fastest, CancellationToken); // Assert - MockCompresser.Verify(); + MockCompressor.Verify(); } [Fact] @@ -108,7 +108,7 @@ public async Task LoadAsync_ReadsDecompressesAndDeserializes() var decompressionStream = new MemoryStream(); MockAsyncIO.Setup(io => io.ReadAsync(CancellationToken)).ReturnsAsync(ioStream).Verifiable(); - MockCompresser.Setup(compresser => compresser.Decompress(ioStream)).Returns(decompressionStream).Verifiable(); + MockCompressor.Setup(compressor => compressor.Decompress(ioStream)).Returns(decompressionStream).Verifiable(); MockAsyncSerializer.Setup(serializer => serializer.DeserializeAsync(decompressionStream, typeof(string), CancellationToken)).ReturnsAsync("test").Verifiable(); MockChunk.Setup(chunk => chunk.LoadSaveData("test")).Verifiable(); @@ -117,7 +117,7 @@ public async Task LoadAsync_ReadsDecompressesAndDeserializes() // Assert MockAsyncIO.Verify(); - MockCompresser.Verify(); + MockCompressor.Verify(); MockAsyncSerializer.Verify(); MockChunk.Verify(); } diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs index c9b7ea2..61bfa08 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs @@ -9,7 +9,7 @@ public class SaveFileTest { public Mock MockIO { get; set; } public Mock MockSerializer { get; set; } - public Mock MockCompresser { get; set; } + public Mock MockCompressor { get; set; } public Mock> MockChunk { get; set; } @@ -19,11 +19,11 @@ public SaveFileTest() { MockIO = new Mock(); MockSerializer = new Mock(); - MockCompresser = new Mock(); + MockCompressor = new Mock(); MockChunk = new Mock>(); - SaveFile = new SaveFile(MockChunk.Object, MockIO.Object, MockSerializer.Object, MockCompresser.Object); + SaveFile = new SaveFile(MockChunk.Object, MockIO.Object, MockSerializer.Object, MockCompressor.Object); } [Fact] @@ -38,7 +38,7 @@ public void Save_WritesCompressesAndSerializes() MockChunk.Setup(chunk => chunk.GetSaveData()).Returns("test").Verifiable(); MockIO.Setup(io => io.Write()).Returns(io).Verifiable(); - MockCompresser.Setup(compresser => compresser.Compress(io)).Returns(compressionStream).Verifiable(); + MockCompressor.Setup(compressor => compressor.Compress(io)).Returns(compressionStream).Verifiable(); MockSerializer.Setup(serializer => serializer.Serialize(compressionStream, "test", typeof(string))).Verifiable(); // Act @@ -47,7 +47,7 @@ public void Save_WritesCompressesAndSerializes() // Assert MockChunk.Verify(); MockIO.Verify(); - MockCompresser.Verify(); + MockCompressor.Verify(); MockSerializer.Verify(); } @@ -75,13 +75,13 @@ public void Save_CompressorIsNull_WritesAndSerializesWithoutCompressing() public void Save_CompressionLevel_UsedByCompressor() { // Arrange - MockCompresser.Setup(compressor => compressor.Compress(It.IsAny(), CompressionLevel.Fastest)).Verifiable(); + MockCompressor.Setup(compressor => compressor.Compress(It.IsAny(), CompressionLevel.Fastest)).Verifiable(); // Act SaveFile.Save(CompressionLevel.Fastest); // Assert - MockCompresser.Verify(); + MockCompressor.Verify(); } [Fact] @@ -92,7 +92,7 @@ public void Load_ReadsDecompressesAndDeserializes() var compressionStream = new MemoryStream(); MockIO.Setup(io => io.Read()).Returns(ioStream).Verifiable(); - MockCompresser.Setup(compresser => compresser.Decompress(ioStream)).Returns(compressionStream).Verifiable(); + MockCompressor.Setup(compressor => compressor.Decompress(ioStream)).Returns(compressionStream).Verifiable(); MockSerializer.Setup(serializer => serializer.Deserialize(compressionStream, typeof(string))).Returns("test").Verifiable(); MockChunk.Setup(chunk => chunk.LoadSaveData("test")).Verifiable(); @@ -101,7 +101,7 @@ public void Load_ReadsDecompressesAndDeserializes() // Assert MockIO.Verify(); - MockCompresser.Verify(); + MockCompressor.Verify(); MockSerializer.Verify(); MockChunk.Verify(); } From cc1b38a886a7e374dd0debd0e4fdfca2baac8877 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Sat, 3 Jan 2026 10:20:20 +0100 Subject: [PATCH 10/19] Regress syntax to C# 13 The ci pipeline does not support C# 14 out of the box. We could look into fixing this, but it might be fixed by itself in the future. For now, these small syntax changes are much easier to rely upon. --- .../test/src/SaveFileAsyncTest.cs | 2 +- Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs | 6 +++--- Chickensoft.SaveFileBuilder/src/SaveFile.cs | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs index da96826..967a6aa 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs @@ -108,7 +108,7 @@ public async Task LoadAsync_ReadsDecompressesAndDeserializes() var decompressionStream = new MemoryStream(); MockAsyncIO.Setup(io => io.ReadAsync(CancellationToken)).ReturnsAsync(ioStream).Verifiable(); - MockCompressor.Setup(compressor => compressor.Decompress(ioStream)).Returns(decompressionStream).Verifiable(); + MockCompressor.Setup(compressor => compressor.Decompress(ioStream, false)).Returns(decompressionStream).Verifiable(); MockAsyncSerializer.Setup(serializer => serializer.DeserializeAsync(decompressionStream, typeof(string), CancellationToken)).ReturnsAsync("test").Verifiable(); MockChunk.Setup(chunk => chunk.LoadSaveData("test")).Verifiable(); diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs index 61bfa08..8688bea 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs @@ -38,7 +38,7 @@ public void Save_WritesCompressesAndSerializes() MockChunk.Setup(chunk => chunk.GetSaveData()).Returns("test").Verifiable(); MockIO.Setup(io => io.Write()).Returns(io).Verifiable(); - MockCompressor.Setup(compressor => compressor.Compress(io)).Returns(compressionStream).Verifiable(); + MockCompressor.Setup(compressor => compressor.Compress(io, default, false)).Returns(compressionStream).Verifiable(); MockSerializer.Setup(serializer => serializer.Serialize(compressionStream, "test", typeof(string))).Verifiable(); // Act @@ -75,7 +75,7 @@ public void Save_CompressorIsNull_WritesAndSerializesWithoutCompressing() public void Save_CompressionLevel_UsedByCompressor() { // Arrange - MockCompressor.Setup(compressor => compressor.Compress(It.IsAny(), CompressionLevel.Fastest)).Verifiable(); + MockCompressor.Setup(compressor => compressor.Compress(It.IsAny(), CompressionLevel.Fastest, false)).Verifiable(); // Act SaveFile.Save(CompressionLevel.Fastest); @@ -92,7 +92,7 @@ public void Load_ReadsDecompressesAndDeserializes() var compressionStream = new MemoryStream(); MockIO.Setup(io => io.Read()).Returns(ioStream).Verifiable(); - MockCompressor.Setup(compressor => compressor.Decompress(ioStream)).Returns(compressionStream).Verifiable(); + MockCompressor.Setup(compressor => compressor.Decompress(ioStream, false)).Returns(compressionStream).Verifiable(); MockSerializer.Setup(serializer => serializer.Deserialize(compressionStream, typeof(string))).Returns("test").Verifiable(); MockChunk.Setup(chunk => chunk.LoadSaveData("test")).Verifiable(); diff --git a/Chickensoft.SaveFileBuilder/src/SaveFile.cs b/Chickensoft.SaveFileBuilder/src/SaveFile.cs index 7e40b66..c9d1802 100644 --- a/Chickensoft.SaveFileBuilder/src/SaveFile.cs +++ b/Chickensoft.SaveFileBuilder/src/SaveFile.cs @@ -74,7 +74,7 @@ public class SaveFile : ISaveFile where TData : class public bool CanSaveSynchronously => _io is not null && _serializer is not null; private static InvalidOperationException SynchronousOperationNotAllowedException() - => new($"Synchronous operation is not allowed because either the {nameof(IStreamIO)} or the {nameof(IStreamSerializer)} of the {nameof(SaveFile<>)} is null."); + => new($"Synchronous operation is not allowed because either the {nameof(IStreamIO)} or the {nameof(IStreamSerializer)} of the {nameof(SaveFile)} is null."); private readonly IStreamIO? _io; private readonly IAsyncStreamIO? _asyncIO; From cbd655b00c26569cce0828ffd6c1fda6a3b5d489 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Mon, 5 Jan 2026 09:10:48 +0100 Subject: [PATCH 11/19] Silly little "not a word"-mistake That should be the last one. --- Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs b/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs index 6223658..b5e2c7e 100644 --- a/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs +++ b/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs @@ -103,7 +103,7 @@ public HttpStreamIO(string baseAddress) /// Initializes a new instance of the class with the specified address and timeout. /// The base address used when sending requests. - /// The timespan to wait before a request times out. + /// The time to wait before a request times out. public HttpStreamIO(string baseAddress, TimeSpan timeout) : this(new Uri(baseAddress), timeout) { } From 915a8b6c303ed249b69e3281500e460f403c71e6 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Mon, 5 Jan 2026 09:12:23 +0100 Subject: [PATCH 12/19] Rename CreateGZipJsonFile to CreateGZipJsonIO where applicable This is more true to the nature of these methods. --- Chickensoft.SaveFileBuilder/src/SaveFile.cs | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/Chickensoft.SaveFileBuilder/src/SaveFile.cs b/Chickensoft.SaveFileBuilder/src/SaveFile.cs index c9d1802..94dca23 100644 --- a/Chickensoft.SaveFileBuilder/src/SaveFile.cs +++ b/Chickensoft.SaveFileBuilder/src/SaveFile.cs @@ -295,23 +295,23 @@ public static class SaveFile ); /// Creates a new that uses the specified io, JSON serialization and GZip compression. - public static SaveFile CreateGZipJsonFile(ISaveChunk root, IStreamIO io, JsonSerializerOptions? options = null) where TData : class => new( + public static SaveFile CreateGZipJsonIO(ISaveChunk root, IStreamIO io, JsonSerializerOptions? options = null) where TData : class => new( root: root, io: io, serializer: new JsonStreamSerializer(options), compressor: new GZipStreamCompressor() ); - /// - public static SaveFile CreateGZipJsonFile(ISaveChunk root, IStreamIO io, JsonSerializerContext context) where TData : class => new( + /// + public static SaveFile CreateGZipJsonIO(ISaveChunk root, IStreamIO io, JsonSerializerContext context) where TData : class => new( root: root, io: io, serializer: new JsonStreamSerializer(context), compressor: new GZipStreamCompressor() ); - /// - public static SaveFile CreateGZipJsonFile(ISaveChunk root, IStreamIO io, JsonTypeInfo jsonTypeInfo) where TData : class => new( + /// + public static SaveFile CreateGZipJsonIO(ISaveChunk root, IStreamIO io, JsonTypeInfo jsonTypeInfo) where TData : class => new( root: root, io: io, serializer: new JsonStreamSerializer(jsonTypeInfo), @@ -319,23 +319,23 @@ public static class SaveFile ); /// Creates a new that uses the specified io, JSON serialization and GZip compression. - public static SaveFile CreateGZipJsonFile(ISaveChunk root, IAsyncStreamIO asyncIO, JsonSerializerOptions? options = null) where TData : class => new( + public static SaveFile CreateGZipJsonIO(ISaveChunk root, IAsyncStreamIO asyncIO, JsonSerializerOptions? options = null) where TData : class => new( root: root, asyncIO: asyncIO, serializer: new JsonStreamSerializer(options), compressor: new GZipStreamCompressor() ); - /// - public static SaveFile CreateGZipJsonFile(ISaveChunk root, IAsyncStreamIO asyncIO, JsonSerializerContext context) where TData : class => new( + /// + public static SaveFile CreateGZipJsonIO(ISaveChunk root, IAsyncStreamIO asyncIO, JsonSerializerContext context) where TData : class => new( root: root, asyncIO: asyncIO, serializer: new JsonStreamSerializer(context), compressor: new GZipStreamCompressor() ); - /// - public static SaveFile CreateGZipJsonFile(ISaveChunk root, IAsyncStreamIO asyncIO, JsonTypeInfo jsonTypeInfo) where TData : class => new( + /// + public static SaveFile CreateGZipJsonFIO(ISaveChunk root, IAsyncStreamIO asyncIO, JsonTypeInfo jsonTypeInfo) where TData : class => new( root: root, asyncIO: asyncIO, serializer: new JsonStreamSerializer(jsonTypeInfo), From f1b862de9f9d33fa07235d0cc35aae8c0c39cf58 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Tue, 6 Jan 2026 23:45:50 +0100 Subject: [PATCH 13/19] Change godot-testing for dotnet-testing Didn't think about changing the workflow before: mistakes creep up. --- .github/workflows/tests.yaml | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index f5c59a7..a618e4b 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -40,25 +40,9 @@ jobs: - name: 📦 Restore Dependencies run: dotnet restore - - name: 🤖 Setup Godot - uses: chickensoft-games/setup-godot@v2 - with: - # Version must include major, minor, and patch, and be >= 4.0.0 - # Pre-release label is optional. - # - # In this case, we are using the version from global.json. - # - # This allows checks on renovatebot PR's to succeed whenever - # renovatebot updates the Godot SDK version. - version: global.json - - - name: 🧑‍🔬 Generate .NET Bindings - working-directory: Chickensoft.SaveFileBuilder.Tests - run: godot --headless --build-solutions --quit || exit 0 - - name: 🦺 Build Projects - run: dotnet build + run: dotnet build --no-restore - name: 🧪 Run Tests working-directory: Chickensoft.SaveFileBuilder.Tests - run: godot --headless --run-tests --quit-on-finish + run: dotnet test --no-build --verbosity normal From 282f31092600430a8cae3de1a2a400ba9a951096 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Sat, 10 Jan 2026 14:22:34 +0100 Subject: [PATCH 14/19] Fix indentation for 2 HttpStreamIO constructors --- Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs b/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs index b5e2c7e..50aac90 100644 --- a/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs +++ b/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs @@ -72,10 +72,10 @@ public HttpStreamIO() /// Initializes a new instance of the class with the specified timeout. /// public HttpStreamIO(TimeSpan timeout) - : this(new HttpClient() - { - Timeout = timeout - }) + : this(new HttpClient() + { + Timeout = timeout + }) { } /// @@ -98,7 +98,7 @@ public HttpStreamIO(Uri baseAddress, TimeSpan timeout) /// Initializes a new instance of the class with the specified address. /// public HttpStreamIO(string baseAddress) - : this(new Uri(baseAddress)) + : this(new Uri(baseAddress)) { } /// Initializes a new instance of the class with the specified address and timeout. From 7c10c572b6dc03a193cda75431f86e3ba714f58c Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Sat, 10 Jan 2026 14:23:30 +0100 Subject: [PATCH 15/19] Add tests for Compression, IO and Serialization implementations --- .../Compression/BrotliStreamCompressorTest.cs | 282 +++++++++ .../DeflateStreamCompressorTest.cs | 282 +++++++++ .../Compression/GZipStreamCompressorTest.cs | 282 +++++++++ .../test/src/IO/FileStreamIOTest.cs | 245 ++++++++ .../test/src/IO/HttpStreamIOTest.cs | 552 ++++++++++++++++ .../Serialization/JsonStreamSerializerTest.cs | 588 ++++++++++++++++++ 6 files changed, 2231 insertions(+) create mode 100644 Chickensoft.SaveFileBuilder.Tests/test/src/Compression/BrotliStreamCompressorTest.cs create mode 100644 Chickensoft.SaveFileBuilder.Tests/test/src/Compression/DeflateStreamCompressorTest.cs create mode 100644 Chickensoft.SaveFileBuilder.Tests/test/src/Compression/GZipStreamCompressorTest.cs create mode 100644 Chickensoft.SaveFileBuilder.Tests/test/src/IO/FileStreamIOTest.cs create mode 100644 Chickensoft.SaveFileBuilder.Tests/test/src/IO/HttpStreamIOTest.cs create mode 100644 Chickensoft.SaveFileBuilder.Tests/test/src/Serialization/JsonStreamSerializerTest.cs diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/Compression/BrotliStreamCompressorTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/Compression/BrotliStreamCompressorTest.cs new file mode 100644 index 0000000..53db37c --- /dev/null +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/Compression/BrotliStreamCompressorTest.cs @@ -0,0 +1,282 @@ +namespace Chickensoft.SaveFileBuilder.Tests.Compression; + +using System.IO.Compression; +using System.Text; +using Chickensoft.SaveFileBuilder.Compression; + +public class BrotliStreamCompressorTest +{ + private readonly BrotliStreamCompressor _compressor; + + public BrotliStreamCompressorTest() + { + _compressor = new BrotliStreamCompressor(); + } + + [Fact] + public void Compress_WithDefaultParameters_ReturnsBrotliStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + Assert.True(compressedStream.CanWrite); + } + + [Fact] + public void Compress_WithOptimalCompression_ReturnsBrotliStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, CompressionLevel.Optimal); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + } + + [Fact] + public void Compress_WithFastestCompression_ReturnsBrotliStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, CompressionLevel.Fastest); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + } + + [Fact] + public void Compress_WithSmallestSizeCompression_ReturnsBrotliStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, CompressionLevel.SmallestSize); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + } + + [Fact] + public void Compress_WithNoCompression_ReturnsBrotliStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, CompressionLevel.NoCompression); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + } + + [Fact] + public void Compress_WithLeaveOpenTrue_KeepsBaseStreamOpen() + { + var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, leaveOpen: true); + + compressedStream.Dispose(); + + // BaseStream should still be accessible if leaveOpen was true + Assert.True(baseStream.CanRead); + baseStream.Dispose(); + } + + [Fact] + public void Compress_WithLeaveOpenFalse_ClosesBaseStream() + { + var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, leaveOpen: false); + + compressedStream.Dispose(); + + // BaseStream should be closed if leaveOpen was false + Assert.False(baseStream.CanRead); + } + + [Fact] + public void Compress_WithNullStream_ThrowsArgumentNullException() + { + var exception = Record.Exception(() => _compressor.Compress(null!)); + + Assert.NotNull(exception); + Assert.IsType(exception); + } + + [Fact] + public void Compress_WithNonWritableStream_ThrowsArgumentException() + { + using var readOnlyStream = new MemoryStream(new byte[10], writable: false); + var exception = Record.Exception(() => _compressor.Compress(readOnlyStream)); + + Assert.NotNull(exception); + Assert.IsType(exception); + } + + [Fact] + public void Decompress_WithValidStream_ReturnsBrotliStream() + { + // Create a compressed stream first + using var compressedData = new MemoryStream(); + using (var brotliStream = new BrotliStream(compressedData, CompressionMode.Compress, leaveOpen: true)) + { + var data = Encoding.UTF8.GetBytes("test data"); + brotliStream.Write(data, 0, data.Length); + } + compressedData.Position = 0; + + var decompressedStream = _compressor.Decompress(compressedData); + + Assert.NotNull(decompressedStream); + Assert.IsType(decompressedStream); + Assert.True(decompressedStream.CanRead); + } + + [Fact] + public void Decompress_WithLeaveOpenTrue_KeepsBaseStreamOpen() + { + // Create a compressed stream first + var compressedData = new MemoryStream(); + using (var brotliStream = new BrotliStream(compressedData, CompressionMode.Compress, leaveOpen: true)) + { + var data = Encoding.UTF8.GetBytes("test data"); + brotliStream.Write(data, 0, data.Length); + } + compressedData.Position = 0; + + var decompressedStream = _compressor.Decompress(compressedData, leaveOpen: true); + decompressedStream.Dispose(); + + // BaseStream should still be accessible if leaveOpen was true + Assert.True(compressedData.CanRead); + compressedData.Dispose(); + } + + [Fact] + public void Decompress_WithLeaveOpenFalse_ClosesBaseStream() + { + // Create a compressed stream first + var compressedData = new MemoryStream(); + using (var brotliStream = new BrotliStream(compressedData, CompressionMode.Compress, leaveOpen: true)) + { + var data = Encoding.UTF8.GetBytes("test data"); + brotliStream.Write(data, 0, data.Length); + } + compressedData.Position = 0; + + var decompressedStream = _compressor.Decompress(compressedData, leaveOpen: false); + decompressedStream.Dispose(); + + // BaseStream should be closed if leaveOpen was false + Assert.False(compressedData.CanRead); + } + + [Fact] + public void Decompress_WithNullStream_ThrowsArgumentNullException() + { + var exception = Record.Exception(() => _compressor.Decompress(null!)); + + Assert.NotNull(exception); + Assert.IsType(exception); + } + + [Fact] + public void CompressAndDecompress_RoundTrip_PreservesData() + { + var originalData = "This is test data for compression and decompression round trip!"; + var originalBytes = Encoding.UTF8.GetBytes(originalData); + + // Compress + using var compressedStream = new MemoryStream(); + using (var brotliCompress = _compressor.Compress(compressedStream, leaveOpen: true)) + { + brotliCompress.Write(originalBytes, 0, originalBytes.Length); + } + + // Decompress + compressedStream.Position = 0; + using var decompressedStream = new MemoryStream(); + using (var brotliDecompress = _compressor.Decompress(compressedStream, leaveOpen: true)) + { + brotliDecompress.CopyTo(decompressedStream); + } + + // Verify + var decompressedData = Encoding.UTF8.GetString(decompressedStream.ToArray()); + Assert.Equal(originalData, decompressedData); + } + + [Fact] + public void CompressAndDecompress_WithLargeData_PreservesData() + { + // Create a larger test data set + var originalData = string.Join("", Enumerable.Repeat("Large test data for compression! ", 1000)); + var originalBytes = Encoding.UTF8.GetBytes(originalData); + + // Compress + using var compressedStream = new MemoryStream(); + using (var brotliCompress = _compressor.Compress(compressedStream, CompressionLevel.Optimal, leaveOpen: true)) + { + brotliCompress.Write(originalBytes, 0, originalBytes.Length); + } + + // Verify compression actually occurred + Assert.True(compressedStream.Length < originalBytes.Length); + + // Decompress + compressedStream.Position = 0; + using var decompressedStream = new MemoryStream(); + using (var brotliDecompress = _compressor.Decompress(compressedStream, leaveOpen: true)) + { + brotliDecompress.CopyTo(decompressedStream); + } + + // Verify + var decompressedData = Encoding.UTF8.GetString(decompressedStream.ToArray()); + Assert.Equal(originalData, decompressedData); + Assert.Equal(originalBytes.Length, decompressedStream.Length); + } + + [Fact] + public void CompressAndDecompress_WithEmptyData_PreservesEmptyData() + { + var originalBytes = Array.Empty(); + + // Compress + using var compressedStream = new MemoryStream(); + using (var brotliCompress = _compressor.Compress(compressedStream, leaveOpen: true)) + { + brotliCompress.Write(originalBytes, 0, originalBytes.Length); + } + + // Decompress + compressedStream.Position = 0; + using var decompressedStream = new MemoryStream(); + using (var brotliDecompress = _compressor.Decompress(compressedStream, leaveOpen: true)) + { + brotliDecompress.CopyTo(decompressedStream); + } + + // Verify + Assert.Empty(decompressedStream.ToArray()); + } + + [Fact] + public void Compress_WithDifferentCompressionLevels_ProducesDifferentSizes() + { + var testData = string.Join("", Enumerable.Repeat("Compression test data! ", 100)); + var testBytes = Encoding.UTF8.GetBytes(testData); + + // Compress with Fastest + using var fastestStream = new MemoryStream(); + using (var brotliFastest = _compressor.Compress(fastestStream, CompressionLevel.Fastest, leaveOpen: true)) + { + brotliFastest.Write(testBytes, 0, testBytes.Length); + } + + // Compress with SmallestSize + using var smallestStream = new MemoryStream(); + using (var brotliSmallest = _compressor.Compress(smallestStream, CompressionLevel.SmallestSize, leaveOpen: true)) + { + brotliSmallest.Write(testBytes, 0, testBytes.Length); + } + + // SmallestSize should produce smaller or equal output than Fastest + Assert.True(smallestStream.Length <= fastestStream.Length); + } +} diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/Compression/DeflateStreamCompressorTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/Compression/DeflateStreamCompressorTest.cs new file mode 100644 index 0000000..24d2cd1 --- /dev/null +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/Compression/DeflateStreamCompressorTest.cs @@ -0,0 +1,282 @@ +namespace Chickensoft.SaveFileBuilder.Tests.Compression; + +using System.IO.Compression; +using System.Text; +using Chickensoft.SaveFileBuilder.Compression; + +public class DeflateStreamCompressorTest +{ + private readonly DeflateStreamCompressor _compressor; + + public DeflateStreamCompressorTest() + { + _compressor = new DeflateStreamCompressor(); + } + + [Fact] + public void Compress_WithDefaultParameters_ReturnsDeflateStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + Assert.True(compressedStream.CanWrite); + } + + [Fact] + public void Compress_WithOptimalCompression_ReturnsDeflateStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, CompressionLevel.Optimal); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + } + + [Fact] + public void Compress_WithFastestCompression_ReturnsDeflateStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, CompressionLevel.Fastest); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + } + + [Fact] + public void Compress_WithSmallestSizeCompression_ReturnsDeflateStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, CompressionLevel.SmallestSize); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + } + + [Fact] + public void Compress_WithNoCompression_ReturnsDeflateStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, CompressionLevel.NoCompression); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + } + + [Fact] + public void Compress_WithLeaveOpenTrue_KeepsBaseStreamOpen() + { + var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, leaveOpen: true); + + compressedStream.Dispose(); + + // BaseStream should still be accessible if leaveOpen was true + Assert.True(baseStream.CanRead); + baseStream.Dispose(); + } + + [Fact] + public void Compress_WithLeaveOpenFalse_ClosesBaseStream() + { + var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, leaveOpen: false); + + compressedStream.Dispose(); + + // BaseStream should be closed if leaveOpen was false + Assert.False(baseStream.CanRead); + } + + [Fact] + public void Compress_WithNullStream_ThrowsArgumentNullException() + { + var exception = Record.Exception(() => _compressor.Compress(null!)); + + Assert.NotNull(exception); + Assert.IsType(exception); + } + + [Fact] + public void Compress_WithNonWritableStream_ThrowsArgumentException() + { + using var readOnlyStream = new MemoryStream(new byte[10], writable: false); + var exception = Record.Exception(() => _compressor.Compress(readOnlyStream)); + + Assert.NotNull(exception); + Assert.IsType(exception); + } + + [Fact] + public void Decompress_WithValidStream_ReturnsDeflateStream() + { + // Create a compressed stream first + using var compressedData = new MemoryStream(); + using (var deflateStream = new DeflateStream(compressedData, CompressionMode.Compress, leaveOpen: true)) + { + var data = Encoding.UTF8.GetBytes("test data"); + deflateStream.Write(data, 0, data.Length); + } + compressedData.Position = 0; + + var decompressedStream = _compressor.Decompress(compressedData); + + Assert.NotNull(decompressedStream); + Assert.IsType(decompressedStream); + Assert.True(decompressedStream.CanRead); + } + + [Fact] + public void Decompress_WithLeaveOpenTrue_KeepsBaseStreamOpen() + { + // Create a compressed stream first + var compressedData = new MemoryStream(); + using (var deflateStream = new DeflateStream(compressedData, CompressionMode.Compress, leaveOpen: true)) + { + var data = Encoding.UTF8.GetBytes("test data"); + deflateStream.Write(data, 0, data.Length); + } + compressedData.Position = 0; + + var decompressedStream = _compressor.Decompress(compressedData, leaveOpen: true); + decompressedStream.Dispose(); + + // BaseStream should still be accessible if leaveOpen was true + Assert.True(compressedData.CanRead); + compressedData.Dispose(); + } + + [Fact] + public void Decompress_WithLeaveOpenFalse_ClosesBaseStream() + { + // Create a compressed stream first + var compressedData = new MemoryStream(); + using (var deflateStream = new DeflateStream(compressedData, CompressionMode.Compress, leaveOpen: true)) + { + var data = Encoding.UTF8.GetBytes("test data"); + deflateStream.Write(data, 0, data.Length); + } + compressedData.Position = 0; + + var decompressedStream = _compressor.Decompress(compressedData, leaveOpen: false); + decompressedStream.Dispose(); + + // BaseStream should be closed if leaveOpen was false + Assert.False(compressedData.CanRead); + } + + [Fact] + public void Decompress_WithNullStream_ThrowsArgumentNullException() + { + var exception = Record.Exception(() => _compressor.Decompress(null!)); + + Assert.NotNull(exception); + Assert.IsType(exception); + } + + [Fact] + public void CompressAndDecompress_RoundTrip_PreservesData() + { + var originalData = "This is test data for compression and decompression round trip!"; + var originalBytes = Encoding.UTF8.GetBytes(originalData); + + // Compress + using var compressedStream = new MemoryStream(); + using (var deflateCompress = _compressor.Compress(compressedStream, leaveOpen: true)) + { + deflateCompress.Write(originalBytes, 0, originalBytes.Length); + } + + // Decompress + compressedStream.Position = 0; + using var decompressedStream = new MemoryStream(); + using (var deflateDecompress = _compressor.Decompress(compressedStream, leaveOpen: true)) + { + deflateDecompress.CopyTo(decompressedStream); + } + + // Verify + var decompressedData = Encoding.UTF8.GetString(decompressedStream.ToArray()); + Assert.Equal(originalData, decompressedData); + } + + [Fact] + public void CompressAndDecompress_WithLargeData_PreservesData() + { + // Create a larger test data set + var originalData = string.Join("", Enumerable.Repeat("Large test data for compression! ", 1000)); + var originalBytes = Encoding.UTF8.GetBytes(originalData); + + // Compress + using var compressedStream = new MemoryStream(); + using (var deflateCompress = _compressor.Compress(compressedStream, CompressionLevel.Optimal, leaveOpen: true)) + { + deflateCompress.Write(originalBytes, 0, originalBytes.Length); + } + + // Verify compression actually occurred + Assert.True(compressedStream.Length < originalBytes.Length); + + // Decompress + compressedStream.Position = 0; + using var decompressedStream = new MemoryStream(); + using (var deflateDecompress = _compressor.Decompress(compressedStream, leaveOpen: true)) + { + deflateDecompress.CopyTo(decompressedStream); + } + + // Verify + var decompressedData = Encoding.UTF8.GetString(decompressedStream.ToArray()); + Assert.Equal(originalData, decompressedData); + Assert.Equal(originalBytes.Length, decompressedStream.Length); + } + + [Fact] + public void CompressAndDecompress_WithEmptyData_PreservesEmptyData() + { + var originalBytes = Array.Empty(); + + // Compress + using var compressedStream = new MemoryStream(); + using (var deflateCompress = _compressor.Compress(compressedStream, leaveOpen: true)) + { + deflateCompress.Write(originalBytes, 0, originalBytes.Length); + } + + // Decompress + compressedStream.Position = 0; + using var decompressedStream = new MemoryStream(); + using (var deflateDecompress = _compressor.Decompress(compressedStream, leaveOpen: true)) + { + deflateDecompress.CopyTo(decompressedStream); + } + + // Verify + Assert.Empty(decompressedStream.ToArray()); + } + + [Fact] + public void Compress_WithDifferentCompressionLevels_ProducesDifferentSizes() + { + var testData = string.Join("", Enumerable.Repeat("Compression test data! ", 100)); + var testBytes = Encoding.UTF8.GetBytes(testData); + + // Compress with Fastest + using var fastestStream = new MemoryStream(); + using (var deflateFastest = _compressor.Compress(fastestStream, CompressionLevel.Fastest, leaveOpen: true)) + { + deflateFastest.Write(testBytes, 0, testBytes.Length); + } + + // Compress with SmallestSize + using var smallestStream = new MemoryStream(); + using (var deflateSmallest = _compressor.Compress(smallestStream, CompressionLevel.SmallestSize, leaveOpen: true)) + { + deflateSmallest.Write(testBytes, 0, testBytes.Length); + } + + // SmallestSize should produce smaller or equal output than Fastest + Assert.True(smallestStream.Length <= fastestStream.Length); + } +} diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/Compression/GZipStreamCompressorTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/Compression/GZipStreamCompressorTest.cs new file mode 100644 index 0000000..2c46148 --- /dev/null +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/Compression/GZipStreamCompressorTest.cs @@ -0,0 +1,282 @@ +namespace Chickensoft.SaveFileBuilder.Tests.Compression; + +using System.IO.Compression; +using System.Text; +using Chickensoft.SaveFileBuilder.Compression; + +public class GZipStreamCompressorTest +{ + private readonly GZipStreamCompressor _compressor; + + public GZipStreamCompressorTest() + { + _compressor = new GZipStreamCompressor(); + } + + [Fact] + public void Compress_WithDefaultParameters_ReturnsGZipStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + Assert.True(compressedStream.CanWrite); + } + + [Fact] + public void Compress_WithOptimalCompression_ReturnsGZipStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, CompressionLevel.Optimal); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + } + + [Fact] + public void Compress_WithFastestCompression_ReturnsGZipStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, CompressionLevel.Fastest); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + } + + [Fact] + public void Compress_WithSmallestSizeCompression_ReturnsGZipStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, CompressionLevel.SmallestSize); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + } + + [Fact] + public void Compress_WithNoCompression_ReturnsGZipStream() + { + using var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, CompressionLevel.NoCompression); + + Assert.NotNull(compressedStream); + Assert.IsType(compressedStream); + } + + [Fact] + public void Compress_WithLeaveOpenTrue_KeepsBaseStreamOpen() + { + var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, leaveOpen: true); + + compressedStream.Dispose(); + + // BaseStream should still be accessible if leaveOpen was true + Assert.True(baseStream.CanRead); + baseStream.Dispose(); + } + + [Fact] + public void Compress_WithLeaveOpenFalse_ClosesBaseStream() + { + var baseStream = new MemoryStream(); + var compressedStream = _compressor.Compress(baseStream, leaveOpen: false); + + compressedStream.Dispose(); + + // BaseStream should be closed if leaveOpen was false + Assert.False(baseStream.CanRead); + } + + [Fact] + public void Compress_WithNullStream_ThrowsArgumentNullException() + { + var exception = Record.Exception(() => _compressor.Compress(null!)); + + Assert.NotNull(exception); + Assert.IsType(exception); + } + + [Fact] + public void Compress_WithNonWritableStream_ThrowsArgumentException() + { + using var readOnlyStream = new MemoryStream(new byte[10], writable: false); + var exception = Record.Exception(() => _compressor.Compress(readOnlyStream)); + + Assert.NotNull(exception); + Assert.IsType(exception); + } + + [Fact] + public void Decompress_WithValidStream_ReturnsGZipStream() + { + // Create a compressed stream first + using var compressedData = new MemoryStream(); + using (var gzipStream = new GZipStream(compressedData, CompressionMode.Compress, leaveOpen: true)) + { + var data = Encoding.UTF8.GetBytes("test data"); + gzipStream.Write(data, 0, data.Length); + } + compressedData.Position = 0; + + var decompressedStream = _compressor.Decompress(compressedData); + + Assert.NotNull(decompressedStream); + Assert.IsType(decompressedStream); + Assert.True(decompressedStream.CanRead); + } + + [Fact] + public void Decompress_WithLeaveOpenTrue_KeepsBaseStreamOpen() + { + // Create a compressed stream first + var compressedData = new MemoryStream(); + using (var gzipStream = new GZipStream(compressedData, CompressionMode.Compress, leaveOpen: true)) + { + var data = Encoding.UTF8.GetBytes("test data"); + gzipStream.Write(data, 0, data.Length); + } + compressedData.Position = 0; + + var decompressedStream = _compressor.Decompress(compressedData, leaveOpen: true); + decompressedStream.Dispose(); + + // BaseStream should still be accessible if leaveOpen was true + Assert.True(compressedData.CanRead); + compressedData.Dispose(); + } + + [Fact] + public void Decompress_WithLeaveOpenFalse_ClosesBaseStream() + { + // Create a compressed stream first + var compressedData = new MemoryStream(); + using (var gzipStream = new GZipStream(compressedData, CompressionMode.Compress, leaveOpen: true)) + { + var data = Encoding.UTF8.GetBytes("test data"); + gzipStream.Write(data, 0, data.Length); + } + compressedData.Position = 0; + + var decompressedStream = _compressor.Decompress(compressedData, leaveOpen: false); + decompressedStream.Dispose(); + + // BaseStream should be closed if leaveOpen was false + Assert.False(compressedData.CanRead); + } + + [Fact] + public void Decompress_WithNullStream_ThrowsArgumentNullException() + { + var exception = Record.Exception(() => _compressor.Decompress(null!)); + + Assert.NotNull(exception); + Assert.IsType(exception); + } + + [Fact] + public void CompressAndDecompress_RoundTrip_PreservesData() + { + var originalData = "This is test data for compression and decompression round trip!"; + var originalBytes = Encoding.UTF8.GetBytes(originalData); + + // Compress + using var compressedStream = new MemoryStream(); + using (var gzipCompress = _compressor.Compress(compressedStream, leaveOpen: true)) + { + gzipCompress.Write(originalBytes, 0, originalBytes.Length); + } + + // Decompress + compressedStream.Position = 0; + using var decompressedStream = new MemoryStream(); + using (var gzipDecompress = _compressor.Decompress(compressedStream, leaveOpen: true)) + { + gzipDecompress.CopyTo(decompressedStream); + } + + // Verify + var decompressedData = Encoding.UTF8.GetString(decompressedStream.ToArray()); + Assert.Equal(originalData, decompressedData); + } + + [Fact] + public void CompressAndDecompress_WithLargeData_PreservesData() + { + // Create a larger test data set + var originalData = string.Join("", Enumerable.Repeat("Large test data for compression! ", 1000)); + var originalBytes = Encoding.UTF8.GetBytes(originalData); + + // Compress + using var compressedStream = new MemoryStream(); + using (var gzipCompress = _compressor.Compress(compressedStream, CompressionLevel.Optimal, leaveOpen: true)) + { + gzipCompress.Write(originalBytes, 0, originalBytes.Length); + } + + // Verify compression actually occurred + Assert.True(compressedStream.Length < originalBytes.Length); + + // Decompress + compressedStream.Position = 0; + using var decompressedStream = new MemoryStream(); + using (var gzipDecompress = _compressor.Decompress(compressedStream, leaveOpen: true)) + { + gzipDecompress.CopyTo(decompressedStream); + } + + // Verify + var decompressedData = Encoding.UTF8.GetString(decompressedStream.ToArray()); + Assert.Equal(originalData, decompressedData); + Assert.Equal(originalBytes.Length, decompressedStream.Length); + } + + [Fact] + public void CompressAndDecompress_WithEmptyData_PreservesEmptyData() + { + var originalBytes = Array.Empty(); + + // Compress + using var compressedStream = new MemoryStream(); + using (var gzipCompress = _compressor.Compress(compressedStream, leaveOpen: true)) + { + gzipCompress.Write(originalBytes, 0, originalBytes.Length); + } + + // Decompress + compressedStream.Position = 0; + using var decompressedStream = new MemoryStream(); + using (var gzipDecompress = _compressor.Decompress(compressedStream, leaveOpen: true)) + { + gzipDecompress.CopyTo(decompressedStream); + } + + // Verify + Assert.Empty(decompressedStream.ToArray()); + } + + [Fact] + public void Compress_WithDifferentCompressionLevels_ProducesDifferentSizes() + { + var testData = string.Join("", Enumerable.Repeat("Compression test data! ", 100)); + var testBytes = Encoding.UTF8.GetBytes(testData); + + // Compress with Fastest + using var fastestStream = new MemoryStream(); + using (var gzipFastest = _compressor.Compress(fastestStream, CompressionLevel.Fastest, leaveOpen: true)) + { + gzipFastest.Write(testBytes, 0, testBytes.Length); + } + + // Compress with SmallestSize + using var smallestStream = new MemoryStream(); + using (var gzipSmallest = _compressor.Compress(smallestStream, CompressionLevel.SmallestSize, leaveOpen: true)) + { + gzipSmallest.Write(testBytes, 0, testBytes.Length); + } + + // SmallestSize should produce smaller or equal output than Fastest + Assert.True(smallestStream.Length <= fastestStream.Length); + } +} diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/IO/FileStreamIOTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/IO/FileStreamIOTest.cs new file mode 100644 index 0000000..8388ce8 --- /dev/null +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/IO/FileStreamIOTest.cs @@ -0,0 +1,245 @@ +namespace Chickensoft.SaveFileBuilder.Tests.IO; + +using System.IO; +using Chickensoft.SaveFileBuilder.IO; + +public class FileStreamIOTest : IDisposable +{ + private readonly string _testDirectory = Path.Combine(Path.GetTempPath(), "FileStreamIOTest"); + private readonly string _testFileName = "test.txt"; + + public FileStreamIOTest() + { + Directory.CreateDirectory(_testDirectory); + } + + public void Dispose() + { + // Clean up test directory after tests + if (Directory.Exists(_testDirectory)) + { + Directory.Delete(_testDirectory, true); + } + GC.SuppressFinalize(this); + } + + private string GetTestFilePath() => Path.Combine(_testDirectory, _testFileName); + + [Fact] + public void Constructor_WithFileInfo_SetsFileInfo() + { + // Arrange + var fileInfo = new FileInfo(GetTestFilePath()); + + // Act + var streamIO = new FileStreamIO(fileInfo); + + // Assert + Assert.Equal(fileInfo.FullName, streamIO.FileInfo.FullName); + } + + [Fact] + public void Constructor_WithFileName_SetsFileInfo() + { + // Arrange + var fileName = GetTestFilePath(); + + // Act + var streamIO = new FileStreamIO(fileName); + + // Assert + Assert.Equal(fileName, streamIO.FileInfo.FullName); + } + + [Fact] + public void Read_ExistingFile_ReturnsReadableStream() + { + // Arrange + var filePath = GetTestFilePath(); + File.WriteAllText(filePath, "test content"); + var streamIO = new FileStreamIO(filePath); + + // Act + using var stream = streamIO.Read(); + + // Assert + Assert.NotNull(stream); + Assert.True(stream.CanRead); + using var reader = new StreamReader(stream); + var content = reader.ReadToEnd(); + Assert.Equal("test content", content); + } + + [Fact] + public void Read_NonExistingFile_ThrowsFileNotFoundException() + { + // Arrange + var filePath = GetTestFilePath(); + var streamIO = new FileStreamIO(filePath); + + // Act & Assert + Assert.Throws(streamIO.Read); + } + + [Fact] + public void Write_NonExistingFile_CreatesFileAndReturnsWritableStream() + { + // Arrange + var filePath = GetTestFilePath(); + var streamIO = new FileStreamIO(filePath); + + // Act + using var stream = streamIO.Write(); + + // Assert + Assert.NotNull(stream); + Assert.True(stream.CanWrite); + Assert.True(File.Exists(filePath)); + } + + [Fact] + public void Write_ExistingFile_ReturnsWritableStream() + { + // Arrange + var filePath = GetTestFilePath(); + File.WriteAllText(filePath, "existing content"); + var streamIO = new FileStreamIO(filePath); + + // Act + using var stream = streamIO.Write(); + + // Assert + Assert.NotNull(stream); + Assert.True(stream.CanWrite); + } + + [Fact] + public void Write_NonExistingDirectory_CreatesDirectoryAndReturnsStream() + { + // Arrange + var subdirectory = Path.Combine(_testDirectory, "subdir1", "subdir2"); + var filePath = Path.Combine(subdirectory, _testFileName); + var streamIO = new FileStreamIO(filePath); + + // Act + using var stream = streamIO.Write(); + + // Assert + Assert.NotNull(stream); + Assert.True(stream.CanWrite); + Assert.True(Directory.Exists(subdirectory)); + } + + [Fact] + public void Write_AllowsWritingContent() + { + // Arrange + var filePath = GetTestFilePath(); + var streamIO = new FileStreamIO(filePath); + var testContent = "test write content"; + + // Act + using (var stream = streamIO.Write()) + using (var writer = new StreamWriter(stream)) + { + writer.Write(testContent); + } + + // Assert + var actualContent = File.ReadAllText(filePath); + Assert.Equal(testContent, actualContent); + } + + [Fact] + public void Exists_ExistingFile_ReturnsTrue() + { + // Arrange + var filePath = GetTestFilePath(); + File.WriteAllText(filePath, "content"); + var streamIO = new FileStreamIO(filePath); + + // Act + var exists = streamIO.Exists(); + + // Assert + Assert.True(exists); + } + + [Fact] + public void Exists_NonExistingFile_ReturnsFalse() + { + // Arrange + var filePath = GetTestFilePath(); + var streamIO = new FileStreamIO(filePath); + + // Act + var exists = streamIO.Exists(); + + // Assert + Assert.False(exists); + } + + [Fact] + public void Exists_AfterFileCreation_ReturnsTrue() + { + // Arrange + var filePath = GetTestFilePath(); + var streamIO = new FileStreamIO(filePath); + + // Act - Initially doesn't exist + var existsBefore = streamIO.Exists(); + + // Create the file + File.WriteAllText(filePath, "content"); + + // Act - Check after creation + var existsAfter = streamIO.Exists(); + + // Assert + Assert.False(existsBefore); + Assert.True(existsAfter); + } + + [Fact] + public void Delete_ExistingFile_DeletesFile() + { + // Arrange + var filePath = GetTestFilePath(); + File.WriteAllText(filePath, "content"); + var streamIO = new FileStreamIO(filePath); + + // Act + streamIO.Delete(); + + // Assert + Assert.False(File.Exists(filePath)); + } + + [Fact] + public void Delete_NonExistingFile_DoesNotThrow() + { + // Arrange + var filePath = GetTestFilePath(); + var streamIO = new FileStreamIO(filePath); + + // Act & Assert + var exception = Record.Exception(streamIO.Delete); + Assert.Null(exception); + } + + [Fact] + public void Delete_AfterDeletion_ExistsReturnsFalse() + { + // Arrange + var filePath = GetTestFilePath(); + File.WriteAllText(filePath, "content"); + var streamIO = new FileStreamIO(filePath); + + // Act + streamIO.Delete(); + var exists = streamIO.Exists(); + + // Assert + Assert.False(exists); + } +} diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/IO/HttpStreamIOTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/IO/HttpStreamIOTest.cs new file mode 100644 index 0000000..ff3211a --- /dev/null +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/IO/HttpStreamIOTest.cs @@ -0,0 +1,552 @@ +namespace Chickensoft.SaveFileBuilder.Tests.IO; + +using System.Diagnostics.CodeAnalysis; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using Chickensoft.SaveFileBuilder.IO; + +public class HttpStreamIOTest : IDisposable +{ + private CancellationToken CancellationToken { get; } + + private readonly MockHttpMessageHandler _mockHandler; + private readonly HttpClient _httpClient; + + public HttpStreamIOTest(ITestContextAccessor testContextAccessor) + { + CancellationToken = testContextAccessor.Current.CancellationToken; + + _mockHandler = new MockHttpMessageHandler(); + _httpClient = new HttpClient(_mockHandler) + { + BaseAddress = new Uri("http://localhost:8080") + }; + } + + public void Dispose() + { + _httpClient?.Dispose(); + GC.SuppressFinalize(this); + } + + #region Constructor Tests + + [Fact] + public async Task Constructor_WithHttpClientDisposeTrue_DisposesClientOnDispose() + { + // Arrange + var handler = new MockHttpMessageHandler(); + var client = new HttpClient(handler); + var streamIO = new HttpStreamIO(client, disposeClient: true); + + // Act + streamIO.Dispose(); + + // Assert - Verify client is disposed by trying to send a request + await Assert.ThrowsAsync(async () => + await client.GetAsync("http://test.com", CancellationToken) + ); + } + + [Fact] + public void Constructor_WithHttpClientDisposeFalse_DoesNotDisposeClientOnDispose() + { + // Arrange + var handler = new MockHttpMessageHandler(); + var client = new HttpClient(handler); + var streamIO = new HttpStreamIO(client, disposeClient: false); + + // Act + streamIO.Dispose(); + + // Assert - client should still be usable + var exception = Record.Exception(() => _ = client.BaseAddress); + Assert.Null(exception); + client.Dispose(); + } + + #endregion + + #region Headers Tests + + [Fact] + public void ReadHeaders_CanAddHeaders() + { + // Arrange + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false); + streamIO.ReadHeaders.Add("X-Custom-Header", "test-value"); + + // Act + var headerExists = streamIO.ReadHeaders.Contains("X-Custom-Header"); + + // Assert + Assert.True(headerExists); + } + + [Fact] + public void WriteHeaders_CanAddHeaders() + { + // Arrange + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false); + streamIO.WriteHeaders.Add("X-Custom-Header", "test-value"); + + // Act + var headerExists = streamIO.WriteHeaders.Contains("X-Custom-Header"); + + // Assert + Assert.True(headerExists); + } + + [Fact] + public void WriteHeaders_CanSetContentLength() + { + // Arrange + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false); + + // Act + streamIO.WriteHeaders.ContentLength = 1024; + + // Assert + Assert.Equal(1024, streamIO.WriteHeaders.ContentLength); + } + + #endregion + + #region ReadAsync Tests + + [Fact] + public async Task ReadAsync_SuccessfulResponse_ReturnsStreamWithContent() + { + // Arrange + var expectedContent = "test data"; + _mockHandler.SetupResponse(HttpStatusCode.OK, expectedContent); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(ReadUri: new Uri("api/read", UriKind.Relative)) + }; + + // Act + using var stream = await streamIO.ReadAsync(CancellationToken); + + // Assert + Assert.NotNull(stream); + stream.Position = 0; + using var reader = new StreamReader(stream); + var actualContent = await reader.ReadToEndAsync(CancellationToken); + Assert.Equal(expectedContent, actualContent); + } + + [Fact] + public async Task ReadAsync_NotFoundResponse_ReturnsEmptyStream() + { + // Arrange + _mockHandler.SetupResponse(HttpStatusCode.NotFound, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(ReadUri: new Uri("api/read", UriKind.Relative)) + }; + + // Act + using var stream = await streamIO.ReadAsync(CancellationToken); + + // Assert + Assert.NotNull(stream); + Assert.Equal(0, stream.Length); + } + + [Fact] + public async Task ReadAsync_CancelledToken_ThrowsOperationCanceledException() + { + // Arrange + var cts = CancellationTokenSource.CreateLinkedTokenSource(CancellationToken); + cts.Cancel(); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(ReadUri: new Uri("api/read", UriKind.Relative)) + }; + + // Act & Assert + await Assert.ThrowsAsync( + async () => await streamIO.ReadAsync(cts.Token) + ); + } + + [Fact] + public async Task ReadAsync_ServerError_ThrowsHttpRequestException() + { + // Arrange + _mockHandler.SetupResponse(HttpStatusCode.InternalServerError, "Server Error"); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(ReadUri: new Uri("api/read", UriKind.Relative)) + }; + + // Act & Assert + await Assert.ThrowsAsync( + async () => await streamIO.ReadAsync(CancellationToken) + ); + } + + #endregion + + #region WriteAsync Tests + + [Fact] + public async Task WriteAsync_ValidStream_PostsDataSuccessfully() + { + // Arrange + var testData = "test write data"; + var stream = new MemoryStream(Encoding.UTF8.GetBytes(testData)); + _mockHandler.SetupResponse(HttpStatusCode.OK, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(WriteUri: new Uri("api/write", UriKind.Relative)) + }; + + // Act + await streamIO.WriteAsync(stream, CancellationToken); + + // Assert + Assert.True(_mockHandler.RequestReceived); + Assert.Equal(HttpMethod.Post, _mockHandler.LastRequest?.Method); + } + + [Fact] + public async Task WriteAsync_UsesStreamLength_WhenContentLengthIsNull() + { + // Arrange + var testData = "test data"; + var stream = new MemoryStream(Encoding.UTF8.GetBytes(testData)); + var streamLength = stream.Length; + _mockHandler.SetupResponse(HttpStatusCode.OK, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(WriteUri: new Uri("api/write", UriKind.Relative)) + }; + streamIO.WriteHeaders.ContentLength = null; + + // Act + await streamIO.WriteAsync(stream, CancellationToken); + + // Assert + Assert.Equal(streamLength, _mockHandler.LastRequest?.Content?.Headers.ContentLength); + + } + + [Fact] + public async Task WriteAsync_UsesCustomContentLength_WhenSet() + { + // Arrange + var testData = "test data"; + var stream = new MemoryStream(Encoding.UTF8.GetBytes(testData)); + _mockHandler.SetupResponse(HttpStatusCode.OK, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(WriteUri: new Uri("api/write", UriKind.Relative)) + }; + streamIO.WriteHeaders.ContentLength = 5; + + // Act + await streamIO.WriteAsync(stream, CancellationToken); + + // Assert + Assert.Equal(5, _mockHandler.LastRequest?.Content?.Headers.ContentLength); + } + + [Fact] + public async Task WriteAsync_CopiesAllHeadersExceptContentLength() + { + // Arrange + var stream = new MemoryStream(Encoding.UTF8.GetBytes("test")); + _mockHandler.SetupResponse(HttpStatusCode.OK, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(WriteUri: new Uri("api/write", UriKind.Relative)) + }; + streamIO.WriteHeaders.Add("X-Custom-Header", "custom-value"); + streamIO.WriteHeaders.ContentType = new MediaTypeHeaderValue("application/json"); + + // Act + await streamIO.WriteAsync(stream, CancellationToken); + + // Assert + IEnumerable? contentTypeValues = []; + Assert.True(_mockHandler.LastRequest?.Content?.Headers.TryGetValues("X-Custom-Header", out contentTypeValues)); + Assert.NotNull(contentTypeValues); + Assert.Single(contentTypeValues); + Assert.Equal("custom-value", contentTypeValues.First()); + + Assert.Equal("application/json", _mockHandler.LastRequest?.Content?.Headers.ContentType?.MediaType); + } + + [Fact] + public async Task WriteAsync_CancelledToken_ThrowsOperationCanceledException() + { + // Arrange + var cts = CancellationTokenSource.CreateLinkedTokenSource(CancellationToken); + cts.Cancel(); + var stream = new MemoryStream(Encoding.UTF8.GetBytes("test")); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(WriteUri: new Uri("api/write", UriKind.Relative)) + }; + + // Act & Assert + await Assert.ThrowsAsync( + async () => await streamIO.WriteAsync(stream, cts.Token) + ); + } + + #endregion + + #region ExistsAsync Tests + + [Fact] + public async Task ExistsAsync_SuccessStatusCode_ReturnsTrue() + { + // Arrange + _mockHandler.SetupResponse(HttpStatusCode.OK, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(ExistsUri: new Uri("api/exists", UriKind.Relative)) + }; + + // Act + var exists = await streamIO.ExistsAsync(CancellationToken); + + // Assert + Assert.True(exists); + } + + [Fact] + public async Task ExistsAsync_NotFoundStatusCode_ReturnsFalse() + { + // Arrange + _mockHandler.SetupResponse(HttpStatusCode.NotFound, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(ExistsUri: new Uri("api/exists", UriKind.Relative)) + }; + + // Act + var exists = await streamIO.ExistsAsync(CancellationToken); + + // Assert + Assert.False(exists); + } + + [Fact] + public async Task ExistsAsync_OtherErrorStatusCode_ReturnsFalse() + { + // Arrange + _mockHandler.SetupResponse(HttpStatusCode.InternalServerError, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(ExistsUri: new Uri("api/exists", UriKind.Relative)) + }; + + // Act + var exists = await streamIO.ExistsAsync(CancellationToken); + + // Assert + Assert.False(exists); + } + + [Fact] + public async Task ExistsAsync_CancelledToken_ThrowsOperationCanceledException() + { + // Arrange + var cts = CancellationTokenSource.CreateLinkedTokenSource(CancellationToken); + cts.Cancel(); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(ExistsUri: new Uri("api/exists", UriKind.Relative)) + }; + + // Act & Assert + await Assert.ThrowsAsync( + async () => await streamIO.ExistsAsync(cts.Token) + ); + } + + #endregion + + #region DeleteAsync Tests + + [Fact] + public async Task DeleteAsync_SuccessStatusCode_ReturnsTrue() + { + // Arrange + _mockHandler.SetupResponse(HttpStatusCode.OK, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(DeleteUri: new Uri("api/delete", UriKind.Relative)) + }; + + // Act + var deleted = await streamIO.DeleteAsync(CancellationToken); + + // Assert + Assert.True(deleted); + } + + [Fact] + public async Task DeleteAsync_NoContentStatusCode_ReturnsTrue() + { + // Arrange + _mockHandler.SetupResponse(HttpStatusCode.NoContent, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(DeleteUri: new Uri("api/delete", UriKind.Relative)) + }; + + // Act + var deleted = await streamIO.DeleteAsync(CancellationToken); + + // Assert + Assert.True(deleted); + } + + [Fact] + public async Task DeleteAsync_NotFoundStatusCode_ReturnsFalse() + { + // Arrange + _mockHandler.SetupResponse(HttpStatusCode.NotFound, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(DeleteUri: new Uri("api/delete", UriKind.Relative)) + }; + + // Act + var deleted = await streamIO.DeleteAsync(CancellationToken); + + // Assert + Assert.False(deleted); + } + + [Fact] + public async Task DeleteAsync_ErrorStatusCode_ReturnsFalse() + { + // Arrange + _mockHandler.SetupResponse(HttpStatusCode.InternalServerError, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(DeleteUri: new Uri("api/delete", UriKind.Relative)) + }; + + // Act + var deleted = await streamIO.DeleteAsync(CancellationToken); + + // Assert + Assert.False(deleted); + } + + [Fact] + public async Task DeleteAsync_CancelledToken_ThrowsOperationCanceledException() + { + // Arrange + var cts = CancellationTokenSource.CreateLinkedTokenSource(CancellationToken); + cts.Cancel(); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(DeleteUri: new Uri("api/delete", UriKind.Relative)) + }; + + // Act & Assert + await Assert.ThrowsAsync( + async () => await streamIO.DeleteAsync(cts.Token) + ); + } + + [Fact] + public async Task DeleteAsync_UsesDeleteHttpMethod() + { + // Arrange + _mockHandler.SetupResponse(HttpStatusCode.OK, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(DeleteUri: new Uri("api/delete", UriKind.Relative)) + }; + + // Act + await streamIO.DeleteAsync(CancellationToken); + + // Assert + Assert.True(_mockHandler.RequestReceived); + Assert.Equal(HttpMethod.Delete, _mockHandler.LastRequest?.Method); + } + + #endregion + + #region Dispose Tests + + [Fact] + public void Dispose_CalledMultipleTimes_DoesNotThrow() + { + // Arrange + var handler = new MockHttpMessageHandler(); + var client = new HttpClient(handler); + var streamIO = new HttpStreamIO(client, disposeClient: true); + + // Act & Assert + streamIO.Dispose(); + var exception = Record.Exception(streamIO.Dispose); + Assert.Null(exception); + } + + [Fact] + public void Dispose_DisposeClientFalse_DoesNotDisposeHttpClient() + { + // Arrange + var handler = new MockHttpMessageHandler(); + var client = new HttpClient(handler); + var streamIO = new HttpStreamIO(client, disposeClient: false); + + // Act + streamIO.Dispose(); + + // Assert - client should still be usable + var exception = Record.Exception(() => _ = client.BaseAddress); + Assert.Null(exception); + client.Dispose(); + } + + #endregion +} + +/// +/// Mock HttpMessageHandler for testing HTTP requests without actual network calls. +/// +internal class MockHttpMessageHandler : HttpMessageHandler +{ + private HttpStatusCode _statusCode = HttpStatusCode.OK; + private string _content = ""; + + [MemberNotNullWhen(true, nameof(LastRequest))] + public bool RequestReceived { get; private set; } + + public HttpRequestMessage? LastRequest { get; private set; } + + public void SetupResponse(HttpStatusCode statusCode, string content) + { + _statusCode = statusCode; + _content = content; + } + + protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + RequestReceived = true; + LastRequest = request; + + var response = new HttpResponseMessage(_statusCode) + { + Content = new StringContent(_content, Encoding.UTF8, "application/json") + }; + + return Task.FromResult(response); + } +} diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/Serialization/JsonStreamSerializerTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/Serialization/JsonStreamSerializerTest.cs new file mode 100644 index 0000000..bff9ac7 --- /dev/null +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/Serialization/JsonStreamSerializerTest.cs @@ -0,0 +1,588 @@ +namespace Chickensoft.SaveFileBuilder.Tests.Serialization; + +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Chickensoft.SaveFileBuilder.Serialization; + +public partial class JsonStreamSerializerTest(ITestContextAccessor testContextAccessor) +{ + private CancellationToken CancellationToken { get; } = testContextAccessor.Current.CancellationToken; + + #region Test Models and Context + + private class TestData + { + public string Name { get; set; } = string.Empty; + public int Value { get; set; } + } + + [JsonSerializable(typeof(TestData))] + private partial class TestJsonContext : JsonSerializerContext; + + #endregion + + #region Serialize Tests + + [Fact] + public void Serialize_WithJsonTypeInfo_SerializesObject() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var testData = new TestData { Name = "Test", Value = 42 }; + using var stream = new MemoryStream(); + + // Act + serializer.Serialize(stream, testData); + + // Assert + stream.Position = 0; + var json = Encoding.UTF8.GetString(stream.ToArray()); + Assert.Contains("\"Name\":\"Test\"", json); + Assert.Contains("\"Value\":42", json); + } + + [Fact] + public void Serialize_WithJsonSerializerOptions_SerializesObject() + { + // Arrange + var options = new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + var serializer = new JsonStreamSerializer(options); + var testData = new TestData { Name = "Test", Value = 42 }; + using var stream = new MemoryStream(); + + // Act + serializer.Serialize(stream, testData); + + // Assert + stream.Position = 0; + var json = Encoding.UTF8.GetString(stream.ToArray()); + Assert.Contains("\"name\":", json); + Assert.Contains("\"value\":", json); + } + + [Fact] + public void Serialize_WithJsonSerializerContext_SerializesObject() + { + // Arrange + var context = TestJsonContext.Default; + var serializer = new JsonStreamSerializer(context); + var testData = new TestData { Name = "Test", Value = 42 }; + using var stream = new MemoryStream(); + + // Act + serializer.Serialize(stream, testData); + + // Assert + stream.Position = 0; + var json = Encoding.UTF8.GetString(stream.ToArray()); + Assert.Contains("\"Name\":\"Test\"", json); + Assert.Contains("\"Value\":42", json); + } + + [Fact] + public void Serialize_WithNullValue_SerializesNull() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.String; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + using var stream = new MemoryStream(); + + // Act + serializer.Serialize(stream, null, typeof(string)); + + // Assert + stream.Position = 0; + var json = Encoding.UTF8.GetString(stream.ToArray()); + Assert.Equal("null", json); + } + + [Fact] + public void Serialize_WithComplexObject_SerializesCorrectly() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var testData = new TestData + { + Name = "Complex Test with \"quotes\" and \n newlines", + Value = int.MaxValue + }; + using var stream = new MemoryStream(); + + // Act + serializer.Serialize(stream, testData); + + // Assert + stream.Position = 0; + var json = Encoding.UTF8.GetString(stream.ToArray()); + Assert.Contains("\"Name\":\"Complex Test with \\u0022quotes\\u0022 and \\n newlines\"", json); + Assert.Contains("\"Value\":2147483647", json); + } + + #endregion + + #region SerializeAsync Tests + + [Fact] + public async Task SerializeAsync_WithJsonTypeInfo_SerializesObject() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var testData = new TestData { Name = "AsyncTest", Value = 123 }; + using var stream = new MemoryStream(); + + // Act + await serializer.SerializeAsync(stream, testData, CancellationToken); + + // Assert + stream.Position = 0; + var json = Encoding.UTF8.GetString(stream.ToArray()); + Assert.Contains("\"Name\":\"AsyncTest\"", json); + Assert.Contains("\"Value\":123", json); + } + + [Fact] + public async Task SerializeAsync_WithJsonSerializerOptions_SerializesObject() + { + // Arrange + var options = new JsonSerializerOptions + { + WriteIndented = true + }; + var serializer = new JsonStreamSerializer(options); + var testData = new TestData { Name = "AsyncTest", Value = 123 }; + using var stream = new MemoryStream(); + + // Act + await serializer.SerializeAsync(stream, testData, CancellationToken); + + // Assert + stream.Position = 0; + var json = Encoding.UTF8.GetString(stream.ToArray()); + Assert.Contains("\n", json); // Indented JSON contains newlines + } + + [Fact] + public async Task SerializeAsync_WithJsonSerializerContext_SerializesObject() + { + // Arrange + var context = TestJsonContext.Default; + var serializer = new JsonStreamSerializer(context); + var testData = new TestData { Name = "AsyncTest", Value = 123 }; + using var stream = new MemoryStream(); + + // Act + await serializer.SerializeAsync(stream, testData, CancellationToken); + + // Assert + stream.Position = 0; + var json = Encoding.UTF8.GetString(stream.ToArray()); + Assert.Contains("\"Name\":\"AsyncTest\"", json); + Assert.Contains("\"Value\":123", json); + } + + [Fact] + public async Task SerializeAsync_WithCancelledToken_ThrowsOperationCanceledException() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var testData = new TestData { Name = "CancelTest", Value = 456 }; + using var stream = new MemoryStream(); + using var cts = CancellationTokenSource.CreateLinkedTokenSource(CancellationToken); + cts.Cancel(); + + // Act & Assert + await Assert.ThrowsAsync( + async () => await serializer.SerializeAsync(stream, testData, cts.Token) + ); + } + + [Fact] + public async Task SerializeAsync_WithNullValue_SerializesNull() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.String; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + using var stream = new MemoryStream(); + + // Act + await serializer.SerializeAsync(stream, null, typeof(string), CancellationToken); + + // Assert + stream.Position = 0; + var json = Encoding.UTF8.GetString(stream.ToArray()); + Assert.Equal("null", json); + } + + #endregion + + #region Deserialize Tests + + [Fact] + public void Deserialize_WithJsonTypeInfo_DeserializesObject() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var json = /*lang=json,strict*/ "{\"Name\":\"DeserializeTest\",\"Value\":789}"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var result = serializer.Deserialize(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal("DeserializeTest", result.Name); + Assert.Equal(789, result.Value); + } + + [Fact] + public void Deserialize_WithJsonSerializerOptions_DeserializesObject() + { + // Arrange + var options = new JsonSerializerOptions + { + PropertyNameCaseInsensitive = true + }; + var serializer = new JsonStreamSerializer(options); + var json = /*lang=json,strict*/ "{\"name\":\"DeserializeTest\",\"value\":789}"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var result = serializer.Deserialize(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal("DeserializeTest", result.Name); + Assert.Equal(789, result.Value); + } + + [Fact] + public void Deserialize_WithJsonSerializerContext_DeserializesObject() + { + // Arrange + var context = TestJsonContext.Default; + var serializer = new JsonStreamSerializer(context); + var json = /*lang=json,strict*/ "{\"Name\":\"DeserializeTest\",\"Value\":789}"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var result = serializer.Deserialize(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal("DeserializeTest", result.Name); + Assert.Equal(789, result.Value); + } + + [Fact] + public void Deserialize_WithNullJson_ReturnsNull() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.String; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var json = "null"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var result = serializer.Deserialize(stream); + + // Assert + Assert.Null(result); + } + + [Fact] + public void Deserialize_WithInvalidJson_ThrowsJsonException() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var invalidJson = "{\"Name\":\"Test\",\"Value\":}"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(invalidJson)); + + // Act & Assert + Assert.Throws( + () => serializer.Deserialize(stream) + ); + } + + [Fact] + public void Deserialize_WithEmptyStream_ThrowsJsonException() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + using var stream = new MemoryStream(); + + // Act & Assert + Assert.Throws( + () => serializer.Deserialize(stream) + ); + } + + #endregion + + #region DeserializeAsync Tests + + [Fact] + public async Task DeserializeAsync_WithJsonTypeInfo_DeserializesObject() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var json = /*lang=json,strict*/ "{\"Name\":\"AsyncDeserializeTest\",\"Value\":999}"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var result = await serializer.DeserializeAsync(stream, CancellationToken); + + // Assert + Assert.NotNull(result); + Assert.Equal("AsyncDeserializeTest", result.Name); + Assert.Equal(999, result.Value); + } + + [Fact] + public async Task DeserializeAsync_WithJsonSerializerOptions_DeserializesObject() + { + // Arrange + var options = new JsonSerializerOptions + { + PropertyNameCaseInsensitive = true + }; + var serializer = new JsonStreamSerializer(options); + var json = /*lang=json,strict*/ "{\"name\":\"AsyncDeserializeTest\",\"value\":999}"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var result = await serializer.DeserializeAsync(stream, CancellationToken); + + // Assert + Assert.NotNull(result); + Assert.Equal("AsyncDeserializeTest", result.Name); + Assert.Equal(999, result.Value); + } + + [Fact] + public async Task DeserializeAsync_WithJsonSerializerContext_DeserializesObject() + { + // Arrange + var context = TestJsonContext.Default; + var serializer = new JsonStreamSerializer(context); + var json = /*lang=json,strict*/ "{\"Name\":\"AsyncDeserializeTest\",\"Value\":999}"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var result = await serializer.DeserializeAsync(stream, CancellationToken); + + // Assert + Assert.NotNull(result); + Assert.Equal("AsyncDeserializeTest", result.Name); + Assert.Equal(999, result.Value); + } + + [Fact] + public async Task DeserializeAsync_WithCancelledToken_ThrowsOperationCanceledException() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var json = /*lang=json,strict*/ "{\"Name\":\"CancelTest\",\"Value\":777}"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + using var cts = CancellationTokenSource.CreateLinkedTokenSource(CancellationToken); + cts.Cancel(); + + // Act & Assert + await Assert.ThrowsAsync( + async () => await serializer.DeserializeAsync(stream, cts.Token) + ); + } + + [Fact] + public async Task DeserializeAsync_WithNullJson_ReturnsNull() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.String; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var json = "null"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var result = await serializer.DeserializeAsync(stream, CancellationToken); + + // Assert + Assert.Null(result); + } + + [Fact] + public async Task DeserializeAsync_WithInvalidJson_ThrowsJsonException() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var invalidJson = "{\"Name\":\"Test\",\"Value\":}"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(invalidJson)); + + // Act & Assert + await Assert.ThrowsAsync( + async () => await serializer.DeserializeAsync(stream, CancellationToken) + ); + } + + [Fact] + public async Task DeserializeAsync_WithEmptyStream_ThrowsJsonException() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + using var stream = new MemoryStream(); + + // Act & Assert + await Assert.ThrowsAsync( + async () => await serializer.DeserializeAsync(stream, CancellationToken) + ); + } + + #endregion + + #region Round-Trip Tests + + [Fact] + public void SerializeAndDeserialize_RoundTrip_PreservesData() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var originalData = new TestData + { + Name = "RoundTripTest", + Value = 12345 + }; + + // Act - Serialize + using var stream = new MemoryStream(); + serializer.Serialize(stream, originalData); + + // Act - Deserialize + stream.Position = 0; + var deserializedData = serializer.Deserialize(stream); + + // Assert + Assert.NotNull(deserializedData); + Assert.Equal(originalData.Name, deserializedData.Name); + Assert.Equal(originalData.Value, deserializedData.Value); + } + + [Fact] + public async Task SerializeAsyncAndDeserializeAsync_RoundTrip_PreservesData() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var originalData = new TestData + { + Name = "AsyncRoundTripTest", + Value = 54321 + }; + + // Act - Serialize + using var stream = new MemoryStream(); + await serializer.SerializeAsync(stream, originalData, CancellationToken); + + // Act - Deserialize + stream.Position = 0; + var deserializedData = await serializer.DeserializeAsync(stream, CancellationToken); + + // Assert + Assert.NotNull(deserializedData); + Assert.Equal(originalData.Name, deserializedData.Name); + Assert.Equal(originalData.Value, deserializedData.Value); + } + + [Fact] + public void SerializeAndDeserialize_WithDifferentSerializers_PreservesData() + { + // Arrange + var serializeSerializer = new JsonStreamSerializer(TestJsonContext.Default.TestData); + var deserializeSerializer = new JsonStreamSerializer(); + var originalData = new TestData + { + Name = "CrossSerializerTest", + Value = 99999 + }; + + // Act - Serialize with one serializer + using var stream = new MemoryStream(); + serializeSerializer.Serialize(stream, originalData); + + // Act - Deserialize with another serializer + stream.Position = 0; + var deserializedData = deserializeSerializer.Deserialize(stream); + + // Assert + Assert.NotNull(deserializedData); + Assert.Equal(originalData.Name, deserializedData.Name); + Assert.Equal(originalData.Value, deserializedData.Value); + } + + [Fact] + public void SerializeAndDeserialize_WithSpecialCharacters_PreservesData() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var originalData = new TestData + { + Name = "Test with \"quotes\", \n newlines, \t tabs, and \\ backslashes", + Value = 0 + }; + + // Act - Serialize + using var stream = new MemoryStream(); + serializer.Serialize(stream, originalData); + + // Act - Deserialize + stream.Position = 0; + var deserializedData = serializer.Deserialize(stream); + + // Assert + Assert.NotNull(deserializedData); + Assert.Equal(originalData.Name, deserializedData.Name); + Assert.Equal(originalData.Value, deserializedData.Value); + } + + [Fact] + public void SerializeAndDeserialize_WithMaxValues_PreservesData() + { + // Arrange + var jsonTypeInfo = TestJsonContext.Default.TestData; + var serializer = new JsonStreamSerializer(jsonTypeInfo); + var originalData = new TestData + { + Name = new string('X', 10000), // Long string + Value = int.MaxValue + }; + + // Act - Serialize + using var stream = new MemoryStream(); + serializer.Serialize(stream, originalData); + + // Act - Deserialize + stream.Position = 0; + var deserializedData = serializer.Deserialize(stream); + + // Assert + Assert.NotNull(deserializedData); + Assert.Equal(originalData.Name, deserializedData.Name); + Assert.Equal(originalData.Value, deserializedData.Value); + } + + #endregion +} From 625c19c83aacb2bbb8fc7425abf6cb0b883cdf4f Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Tue, 20 Jan 2026 20:55:06 +0100 Subject: [PATCH 16/19] Explicitly set asyncSerializer in CreateGZipJsonIO(... IAsyncStreamIO ...) It just looks a little nicer. --- Chickensoft.SaveFileBuilder/src/SaveFile.cs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Chickensoft.SaveFileBuilder/src/SaveFile.cs b/Chickensoft.SaveFileBuilder/src/SaveFile.cs index 94dca23..50e2005 100644 --- a/Chickensoft.SaveFileBuilder/src/SaveFile.cs +++ b/Chickensoft.SaveFileBuilder/src/SaveFile.cs @@ -322,7 +322,7 @@ public static class SaveFile public static SaveFile CreateGZipJsonIO(ISaveChunk root, IAsyncStreamIO asyncIO, JsonSerializerOptions? options = null) where TData : class => new( root: root, asyncIO: asyncIO, - serializer: new JsonStreamSerializer(options), + asyncSerializer: new JsonStreamSerializer(options), compressor: new GZipStreamCompressor() ); @@ -330,7 +330,7 @@ public static class SaveFile public static SaveFile CreateGZipJsonIO(ISaveChunk root, IAsyncStreamIO asyncIO, JsonSerializerContext context) where TData : class => new( root: root, asyncIO: asyncIO, - serializer: new JsonStreamSerializer(context), + asyncSerializer: new JsonStreamSerializer(context), compressor: new GZipStreamCompressor() ); @@ -338,7 +338,7 @@ public static class SaveFile public static SaveFile CreateGZipJsonFIO(ISaveChunk root, IAsyncStreamIO asyncIO, JsonTypeInfo jsonTypeInfo) where TData : class => new( root: root, asyncIO: asyncIO, - serializer: new JsonStreamSerializer(jsonTypeInfo), + asyncSerializer: new JsonStreamSerializer(jsonTypeInfo), compressor: new GZipStreamCompressor() ); } From e759e96430ff82b99c4830a3a8847e0dcc520170 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Tue, 20 Jan 2026 20:55:33 +0100 Subject: [PATCH 17/19] Add package project url & repository url inside csproj --- .../Chickensoft.SaveFileBuilder.csproj | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj b/Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj index a1e8a1c..b503c13 100644 --- a/Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj +++ b/Chickensoft.SaveFileBuilder/Chickensoft.SaveFileBuilder.csproj @@ -25,10 +25,10 @@ README.md LICENSE - + https://www.nuget.org/packages/Chickensoft.SaveFileBuilder git - + https://github.com/chickensoft-games/SaveFileBuilder From d54c2ee9329ed3a1627b9425190252a28d356705 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Tue, 20 Jan 2026 21:01:05 +0100 Subject: [PATCH 18/19] Add coverlet coverage and complete said coverage Look this commit is not very structured but basically it gets the coverage up to 100% and that's pretty rad no? --- .../Chickensoft.SaveFileBuilder.Tests.csproj | 8 + .../badges/branch_coverage.svg | 75 +++-- .../badges/line_coverage.svg | 75 +++-- Chickensoft.SaveFileBuilder.Tests/coverage.sh | 66 +--- .../test/src/IO/HttpStreamIOTest.cs | 311 ++++++++++++++++++ .../test/src/SaveFileAsyncTest.cs | 28 ++ .../test/src/SaveFileFactoryTest.cs | 309 +++++++++++++++++ .../test/src/SaveFileTest.cs | 8 + .../Serialization/JsonStreamSerializerTest.cs | 16 +- .../test/src/TestData.cs | 12 + .../src/IO/FileStreamIO.cs | 5 - .../src/IO/HttpStreamIO.cs | 8 +- 12 files changed, 796 insertions(+), 125 deletions(-) create mode 100644 Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileFactoryTest.cs create mode 100644 Chickensoft.SaveFileBuilder.Tests/test/src/TestData.cs diff --git a/Chickensoft.SaveFileBuilder.Tests/Chickensoft.SaveFileBuilder.Tests.csproj b/Chickensoft.SaveFileBuilder.Tests/Chickensoft.SaveFileBuilder.Tests.csproj index 45d3b6d..a2a1d2a 100644 --- a/Chickensoft.SaveFileBuilder.Tests/Chickensoft.SaveFileBuilder.Tests.csproj +++ b/Chickensoft.SaveFileBuilder.Tests/Chickensoft.SaveFileBuilder.Tests.csproj @@ -20,6 +20,14 @@ + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + diff --git a/Chickensoft.SaveFileBuilder.Tests/badges/branch_coverage.svg b/Chickensoft.SaveFileBuilder.Tests/badges/branch_coverage.svg index fca46b2..920f7cc 100644 --- a/Chickensoft.SaveFileBuilder.Tests/badges/branch_coverage.svg +++ b/Chickensoft.SaveFileBuilder.Tests/badges/branch_coverage.svg @@ -3,29 +3,46 @@ Code coverage @@ -78,7 +100,7 @@ - + @@ -87,27 +109,30 @@ - + - + + - + - Generated by: ReportGenerator 5.1.26.0 + Generated by: ReportGenerator 5.4.8.0 Coverage Coverage - + 100%100% - + + - + Branch coverage - + + - + \ No newline at end of file diff --git a/Chickensoft.SaveFileBuilder.Tests/badges/line_coverage.svg b/Chickensoft.SaveFileBuilder.Tests/badges/line_coverage.svg index 28d1811..790601a 100644 --- a/Chickensoft.SaveFileBuilder.Tests/badges/line_coverage.svg +++ b/Chickensoft.SaveFileBuilder.Tests/badges/line_coverage.svg @@ -3,29 +3,46 @@ Code coverage @@ -78,7 +100,7 @@ - + @@ -88,26 +110,29 @@ - - + + + - + - Generated by: ReportGenerator 5.1.26.0 + Generated by: ReportGenerator 5.4.8.0 Coverage Coverage 100%100% - - + + + Line coverage - - + + + - + \ No newline at end of file diff --git a/Chickensoft.SaveFileBuilder.Tests/coverage.sh b/Chickensoft.SaveFileBuilder.Tests/coverage.sh index e7eeb1d..7e0b862 100755 --- a/Chickensoft.SaveFileBuilder.Tests/coverage.sh +++ b/Chickensoft.SaveFileBuilder.Tests/coverage.sh @@ -1,52 +1,16 @@ #!/bin/bash -# To collect code coverage, you will need the following environment setup: -# -# - A "GODOT" environment variable pointing to the Godot executable -# - ReportGenerator installed -# -# dotnet tool install -g dotnet-reportgenerator-globaltool -# -# - A version of coverlet > 3.2.0. -# -# As of Jan 2023, this is not yet released. -# -# The included `nuget.config` file will allow you to install a nightly -# version of coverlet from the coverlet nightly nuget feed. -# -# dotnet tool install --global coverlet.console --prerelease. -# -# You can build coverlet yourself, but you will need to edit the path to -# coverlet below to point to your local build of the coverlet dll. -# -# If you need help with coverage, feel free to join the Chickensoft Discord. -# https://chickensoft.games +# dotnet build-server shutdown; dotnet build --no-incremental +dotnet build -dotnet build --no-restore - -coverlet \ - "./.godot/mono/temp/bin/Debug" --verbosity detailed \ - --target $GODOT \ - --targetargs "--run-tests --coverage --quit-on-finish" \ - --format "opencover" \ - --output "./coverage/coverage.xml" \ - --exclude-by-file "**/test/**/*.cs" \ - --exclude-by-file "**/*Microsoft.NET.Test.Sdk.Program.cs" \ - --exclude-by-file "**/Godot.SourceGenerators/**/*.cs" \ - --exclude-assemblies-without-sources "missingall" \ - --skipautoprops - -# Projects included via will be collected in code coverage. -# If you want to exclude them, replace the string below with the names of -# the assemblies to ignore. e.g., -# ASSEMBLIES_TO_REMOVE="-AssemblyToRemove1;-AssemblyToRemove2" -ASSEMBLIES_TO_REMOVE="-Chickensoft.SaveFileBuilder.Tests" +dotnet test \ + -p:CollectCoverage=true \ + -p:CoverletOutputFormat="opencover" \ + -p:CoverletOutput=./coverage/ reportgenerator \ - -reports:"./coverage/coverage.xml" \ + -reports:"./coverage/coverage.opencover.xml" \ -targetdir:"./coverage/report" \ - "-assemblyfilters:$ASSEMBLIES_TO_REMOVE" \ - "-classfilters:-GodotPlugins.Game.Main" \ -reporttypes:"Html;Badges" # Copy badges into their own folder. The badges folder should be included in @@ -61,21 +25,21 @@ mv ./coverage/report/badge_linecoverage.svg ./badges/line_coverage.svg case "$(uname -s)" in Darwin) - echo 'Mac OS X' - open coverage/report/index.htm + echo 'Mac OS X' + open coverage/report/index.htm ;; Linux) - echo 'Linux' - open coverage/report/index.htm + echo 'Linux' + xdg-open coverage/report/index.htm ;; CYGWIN*|MINGW32*|MSYS*|MINGW*) - echo 'MS Windows' - start coverage/report/index.htm + echo 'MS Windows' + start coverage/report/index.htm ;; *) - echo 'Other OS' - ;; + echo 'Other OS' + ;; esac diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/IO/HttpStreamIOTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/IO/HttpStreamIOTest.cs index ff3211a..fc5672f 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/IO/HttpStreamIOTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/IO/HttpStreamIOTest.cs @@ -31,8 +31,211 @@ public void Dispose() GC.SuppressFinalize(this); } + #region HttpIORequestUris Tests + + [Fact] + public void HttpIORequestUris_DefaultConstructor_AllUrisAreNull() + { + // Arrange & Act + var uris = new HttpIORequestUris(); + + // Assert + Assert.Null(uris.ReadUri); + Assert.Null(uris.WriteUri); + Assert.Null(uris.ExistsUri); + Assert.Null(uris.DeleteUri); + } + + [Fact] + public void HttpIORequestUris_WithUriParameters_SetsUrisCorrectly() + { + // Arrange + var readUri = new Uri("api/read", UriKind.Relative); + var writeUri = new Uri("api/write", UriKind.Relative); + var existsUri = new Uri("api/exists", UriKind.Relative); + var deleteUri = new Uri("api/delete", UriKind.Relative); + + // Act + var uris = new HttpIORequestUris(readUri, writeUri, existsUri, deleteUri); + + // Assert + Assert.Equal(readUri, uris.ReadUri); + Assert.Equal(writeUri, uris.WriteUri); + Assert.Equal(existsUri, uris.ExistsUri); + Assert.Equal(deleteUri, uris.DeleteUri); + } + + [Fact] + public void HttpIORequestUris_WithStringParameters_SetsUrisCorrectly() + { + // Arrange & Act + var uris = new HttpIORequestUris( + readUri: "api/read", + writeUri: "api/write", + existsUri: "api/exists", + deleteUri: "api/delete" + ); + + // Assert + Assert.NotNull(uris.ReadUri); + Assert.Equal("api/read", uris.ReadUri.ToString()); + Assert.NotNull(uris.WriteUri); + Assert.Equal("api/write", uris.WriteUri.ToString()); + Assert.NotNull(uris.ExistsUri); + Assert.Equal("api/exists", uris.ExistsUri.ToString()); + Assert.NotNull(uris.DeleteUri); + Assert.Equal("api/delete", uris.DeleteUri.ToString()); + } + + [Fact] + public void HttpIORequestUris_WithNullStringParameters_SetsUrisToNull() + { + // Arrange & Act + var uris = new HttpIORequestUris( + readUri: null, + writeUri: null, + existsUri: null, + deleteUri: null + ); + + // Assert + Assert.Null(uris.ReadUri); + Assert.Null(uris.WriteUri); + Assert.Null(uris.ExistsUri); + Assert.Null(uris.DeleteUri); + } + + [Fact] + public void HttpIORequestUris_WithPartialStringParameters_SetsSomeUrisCorrectly() + { + // Arrange & Act + var uris = new HttpIORequestUris( + readUri: "api/read", + writeUri: null, + existsUri: "api/exists", + deleteUri: null + ); + + // Assert + Assert.NotNull(uris.ReadUri); + Assert.Equal("api/read", uris.ReadUri.ToString()); + Assert.Null(uris.WriteUri); + Assert.NotNull(uris.ExistsUri); + Assert.Equal("api/exists", uris.ExistsUri.ToString()); + Assert.Null(uris.DeleteUri); + } + + [Fact] + public void HttpIORequestUris_CanBeSet_UsingInitSyntax() + { + // Arrange + var readUri = new Uri("api/read", UriKind.Relative); + var writeUri = new Uri("api/write", UriKind.Relative); + var existsUri = new Uri("api/exists", UriKind.Relative); + var deleteUri = new Uri("api/delete", UriKind.Relative); + // Act + var uris = new HttpIORequestUris( + readUri: "api/ignored", + writeUri: "api/ignored", + existsUri: "api/ignored", + deleteUri: "api/ignored" + ) + { + ReadUri = readUri, + WriteUri = writeUri, + ExistsUri = existsUri, + DeleteUri = deleteUri + }; + // Assert + Assert.Equal(readUri, uris.ReadUri); + Assert.Equal(writeUri, uris.WriteUri); + Assert.Equal(existsUri, uris.ExistsUri); + Assert.Equal(deleteUri, uris.DeleteUri); + } + + #endregion + #region Constructor Tests + [Fact] + public void Constructor_Default_CreatesInstanceSuccessfully() + { + // Arrange & Act + using var streamIO = new HttpStreamIO(); + + // Assert + Assert.NotNull(streamIO); + Assert.NotNull(streamIO.ReadHeaders); + Assert.NotNull(streamIO.WriteHeaders); + } + + [Fact] + public void Constructor_WithTimeout_SetsTimeoutCorrectly() + { + // Arrange + var timeout = TimeSpan.FromSeconds(30); + + // Act + using var streamIO = new HttpStreamIO(timeout); + + // Assert + Assert.NotNull(streamIO); + } + + [Fact] + public void Constructor_WithUriBaseAddress_SetsBaseAddressCorrectly() + { + // Arrange + var baseAddress = new Uri("http://example.com"); + + // Act + using var streamIO = new HttpStreamIO(baseAddress); + + // Assert + Assert.NotNull(streamIO); + } + + [Fact] + public void Constructor_WithUriBaseAddressAndTimeout_SetsPropertiesCorrectly() + { + // Arrange + var baseAddress = new Uri("http://example.com"); + var timeout = TimeSpan.FromSeconds(45); + + // Act + using var streamIO = new HttpStreamIO(baseAddress, timeout); + + // Assert + Assert.NotNull(streamIO); + } + + [Fact] + public void Constructor_WithStringBaseAddress_SetsBaseAddressCorrectly() + { + // Arrange + var baseAddress = "http://example.com"; + + // Act + using var streamIO = new HttpStreamIO(baseAddress); + + // Assert + Assert.NotNull(streamIO); + } + + [Fact] + public void Constructor_WithStringBaseAddressAndTimeout_SetsPropertiesCorrectly() + { + // Arrange + var baseAddress = "http://example.com"; + var timeout = TimeSpan.FromSeconds(60); + + // Act + using var streamIO = new HttpStreamIO(baseAddress, timeout); + + // Assert + Assert.NotNull(streamIO); + } + [Fact] public async Task Constructor_WithHttpClientDisposeTrue_DisposesClientOnDispose() { @@ -67,6 +270,24 @@ public void Constructor_WithHttpClientDisposeFalse_DoesNotDisposeClientOnDispose client.Dispose(); } + [Fact] + public void RequestUris_CanBeSet_UsingInitSyntax() + { + // Arrange + var readUri = new Uri("api/custom-read", UriKind.Relative); + var writeUri = new Uri("api/custom-write", UriKind.Relative); + + // Act + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(ReadUri: readUri, WriteUri: writeUri) + }; + + // Assert + Assert.Equal(readUri, streamIO.RequestUris.ReadUri); + Assert.Equal(writeUri, streamIO.RequestUris.WriteUri); + } + #endregion #region Headers Tests @@ -112,6 +333,23 @@ public void WriteHeaders_CanSetContentLength() Assert.Equal(1024, streamIO.WriteHeaders.ContentLength); } + [Fact] + public void WriteHeaders_WithMultipleCustomHeaders_AllHeadersAreSet() + { + // Arrange + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false); + + // Act + streamIO.WriteHeaders.Add("X-Header-1", "value1"); + streamIO.WriteHeaders.Add("X-Header-2", "value2"); + streamIO.WriteHeaders.Add("X-Header-3", "value3"); + + // Assert + Assert.True(streamIO.WriteHeaders.Contains("X-Header-1")); + Assert.True(streamIO.WriteHeaders.Contains("X-Header-2")); + Assert.True(streamIO.WriteHeaders.Contains("X-Header-3")); + } + #endregion #region ReadAsync Tests @@ -189,6 +427,41 @@ await Assert.ThrowsAsync( ); } + [Fact] + public async Task ReadAsync_WithEmptyResponse_ReturnsEmptyStream() + { + // Arrange + _mockHandler.SetupResponse(HttpStatusCode.OK, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(ReadUri: new Uri("api/read", UriKind.Relative)) + }; + + // Act + using var stream = await streamIO.ReadAsync(CancellationToken); + + // Assert + Assert.Equal(0, stream.Length); + } + + [Fact] + public async Task ReadAsync_WithLargeResponse_ReturnsFullContent() + { + // Arrange + var largeContent = new string('X', 1024 * 1024); // 1MB of data + _mockHandler.SetupResponse(HttpStatusCode.OK, largeContent); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(ReadUri: new Uri("api/read", UriKind.Relative)) + }; + + // Act + using var stream = await streamIO.ReadAsync(CancellationToken); + + // Assert + Assert.Equal(largeContent.Length, stream.Length); + } + #endregion #region WriteAsync Tests @@ -299,6 +572,44 @@ await Assert.ThrowsAsync( ); } + [Fact] + public async Task WriteAsync_WithEmptyStream_PostsSuccessfully() + { + // Arrange + var stream = new MemoryStream(); + _mockHandler.SetupResponse(HttpStatusCode.OK, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(WriteUri: new Uri("api/write", UriKind.Relative)) + }; + + // Act + await streamIO.WriteAsync(stream, CancellationToken); + + // Assert + Assert.Equal(0, _mockHandler.LastRequest?.Content?.Headers.ContentLength); + } + + [Fact] + public async Task WriteAsync_WithLargeStream_PostsSuccessfully() + { + // Arrange + var largeData = new byte[1024 * 1024]; // 1MB + Array.Fill(largeData, (byte)'A'); + var stream = new MemoryStream(largeData); + _mockHandler.SetupResponse(HttpStatusCode.OK, ""); + using var streamIO = new HttpStreamIO(_httpClient, disposeClient: false) + { + RequestUris = new HttpIORequestUris(WriteUri: new Uri("api/write", UriKind.Relative)) + }; + + // Act + await streamIO.WriteAsync(stream, CancellationToken); + + // Assert + Assert.Equal(largeData.Length, _mockHandler.LastRequest?.Content?.Headers.ContentLength); + } + #endregion #region ExistsAsync Tests diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs index 967a6aa..658b3d7 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileAsyncTest.cs @@ -45,6 +45,34 @@ public SaveFileAsyncTest(ITestContextAccessor testContextAccessor) [Fact] public void Delete_ThrowsInvalidOperationException() => Assert.Throws(SaveFile.Delete); + [Fact] + public void Constructor_WithIStreamIOAndIAsyncStreamSerializer_CreatesInstanceSuccessfully() + { + // Arrange + var mockStreamIO = new Mock(); + var mockAsyncSerializer = new Mock(); + + // Act + var saveFile = new SaveFile(MockChunk.Object, mockStreamIO.Object, mockAsyncSerializer.Object, MockCompressor.Object); + + // Assert + Assert.NotNull(saveFile); + } + + [Fact] + public void Constructor_WithIAsyncStreamIOAndIStreamSerializer_CreatesInstanceSuccessfully() + { + // Arrange + var mockAsyncStreamIO = new Mock(); + var mockSerializer = new Mock(); + + // Act + var saveFile = new SaveFile(MockChunk.Object, mockAsyncStreamIO.Object, mockSerializer.Object, MockCompressor.Object); + + // Assert + Assert.NotNull(saveFile); + } + [Fact] public async Task SaveAsync_WritesCompressesAndSerializes() { diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileFactoryTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileFactoryTest.cs new file mode 100644 index 0000000..3fcee50 --- /dev/null +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileFactoryTest.cs @@ -0,0 +1,309 @@ +namespace Chickensoft.SaveFileBuilder.Tests; + +using System.Text.Json; +using Chickensoft.SaveFileBuilder; +using Chickensoft.SaveFileBuilder.IO; + +public partial class SaveFileFactoryTest +{ + private CancellationToken CancellationToken { get; } + + private Mock> MockChunk { get; } + private const string FILE_PATH = "test_save.dat"; + + public SaveFileFactoryTest(ITestContextAccessor testContextAccessor) + { + CancellationToken = testContextAccessor.Current.CancellationToken; + MockChunk = new Mock>(); + } + + [Fact] + public void CreateGZipJsonFile_WithOptions_CreatesValidInstance() + { + // Arrange + var options = new JsonSerializerOptions { WriteIndented = true }; + + // Act + var saveFile = SaveFile.CreateGZipJsonFile(MockChunk.Object, FILE_PATH, options); + + // Assert + Assert.NotNull(saveFile); + Assert.NotNull(saveFile.Root); + Assert.Equal(MockChunk.Object, saveFile.Root); + Assert.True(saveFile.CanSaveSynchronously); + } + + [Fact] + public void CreateGZipJsonFile_WithNullOptions_CreatesValidInstance() + { + // Act + var saveFile = SaveFile.CreateGZipJsonFile(MockChunk.Object, FILE_PATH); + + // Assert + Assert.NotNull(saveFile); + Assert.NotNull(saveFile.Root); + Assert.Equal(MockChunk.Object, saveFile.Root); + Assert.True(saveFile.CanSaveSynchronously); + } + + [Fact] + public void CreateGZipJsonFile_WithContext_CreatesValidInstance() + { + // Arrange + var context = TestJsonContext.Default; + + // Act + var saveFile = SaveFile.CreateGZipJsonFile(MockChunk.Object, FILE_PATH, context); + + // Assert + Assert.NotNull(saveFile); + Assert.NotNull(saveFile.Root); + Assert.Equal(MockChunk.Object, saveFile.Root); + Assert.True(saveFile.CanSaveSynchronously); + } + + [Fact] + public void CreateGZipJsonFile_WithJsonTypeInfo_CreatesValidInstance() + { + // Arrange + var typeInfo = TestJsonContext.Default.TestData; + + // Act + var saveFile = SaveFile.CreateGZipJsonFile(MockChunk.Object, FILE_PATH, typeInfo); + + // Assert + Assert.NotNull(saveFile); + Assert.NotNull(saveFile.Root); + Assert.Equal(MockChunk.Object, saveFile.Root); + Assert.True(saveFile.CanSaveSynchronously); + } + + [Fact] + public void CreateGZipJsonIO_WithIStreamIOAndOptions_CreatesValidInstance() + { + // Arrange + var mockIO = new Mock(); + var options = new JsonSerializerOptions { WriteIndented = true }; + + // Act + var saveFile = SaveFile.CreateGZipJsonIO(MockChunk.Object, mockIO.Object, options); + + // Assert + Assert.NotNull(saveFile); + Assert.NotNull(saveFile.Root); + Assert.Equal(MockChunk.Object, saveFile.Root); + Assert.True(saveFile.CanSaveSynchronously); + } + + [Fact] + public void CreateGZipJsonIO_WithIStreamIOAndNullOptions_CreatesValidInstance() + { + // Arrange + var mockIO = new Mock(); + + // Act + var saveFile = SaveFile.CreateGZipJsonIO(MockChunk.Object, mockIO.Object); + + // Assert + Assert.NotNull(saveFile); + Assert.NotNull(saveFile.Root); + Assert.Equal(MockChunk.Object, saveFile.Root); + Assert.True(saveFile.CanSaveSynchronously); + } + + [Fact] + public void CreateGZipJsonIO_WithIStreamIOAndContext_CreatesValidInstance() + { + // Arrange + var mockIO = new Mock(); + var context = TestJsonContext.Default; + + // Act + var saveFile = SaveFile.CreateGZipJsonIO(MockChunk.Object, mockIO.Object, context); + + // Assert + Assert.NotNull(saveFile); + Assert.NotNull(saveFile.Root); + Assert.Equal(MockChunk.Object, saveFile.Root); + Assert.True(saveFile.CanSaveSynchronously); + } + + [Fact] + public void CreateGZipJsonIO_WithIStreamIOAndJsonTypeInfo_CreatesValidInstance() + { + // Arrange + var mockIO = new Mock(); + var typeInfo = TestJsonContext.Default.TestData; + + // Act + var saveFile = SaveFile.CreateGZipJsonIO(MockChunk.Object, mockIO.Object, typeInfo); + + // Assert + Assert.NotNull(saveFile); + Assert.NotNull(saveFile.Root); + Assert.Equal(MockChunk.Object, saveFile.Root); + Assert.True(saveFile.CanSaveSynchronously); + } + + [Fact] + public void CreateGZipJsonIO_WithIAsyncStreamIOAndOptions_CreatesValidInstance() + { + // Arrange + var mockAsyncIO = new Mock(); + var options = new JsonSerializerOptions { WriteIndented = true }; + + // Act + var saveFile = SaveFile.CreateGZipJsonIO(MockChunk.Object, mockAsyncIO.Object, options); + + // Assert + Assert.NotNull(saveFile); + Assert.NotNull(saveFile.Root); + Assert.Equal(MockChunk.Object, saveFile.Root); + Assert.False(saveFile.CanSaveSynchronously); + } + + [Fact] + public void CreateGZipJsonIO_WithIAsyncStreamIOAndNullOptions_CreatesValidInstance() + { + // Arrange + var mockAsyncIO = new Mock(); + + // Act + var saveFile = SaveFile.CreateGZipJsonIO(MockChunk.Object, mockAsyncIO.Object); + + // Assert + Assert.NotNull(saveFile); + Assert.NotNull(saveFile.Root); + Assert.Equal(MockChunk.Object, saveFile.Root); + Assert.False(saveFile.CanSaveSynchronously); + } + + [Fact] + public void CreateGZipJsonIO_WithIAsyncStreamIOAndContext_CreatesValidInstance() + { + // Arrange + var mockAsyncIO = new Mock(); + var context = TestJsonContext.Default; + + // Act + var saveFile = SaveFile.CreateGZipJsonIO(MockChunk.Object, mockAsyncIO.Object, context); + + // Assert + Assert.NotNull(saveFile); + Assert.NotNull(saveFile.Root); + Assert.Equal(MockChunk.Object, saveFile.Root); + Assert.False(saveFile.CanSaveSynchronously); + } + + [Fact] + public void CreateGZipJsonFIO_WithIAsyncStreamIOAndJsonTypeInfo_CreatesValidInstance() + { + // Arrange + var mockAsyncIO = new Mock(); + var typeInfo = TestJsonContext.Default.TestData; + + // Act + var saveFile = SaveFile.CreateGZipJsonFIO(MockChunk.Object, mockAsyncIO.Object, typeInfo); + + // Assert + Assert.NotNull(saveFile); + Assert.NotNull(saveFile.Root); + Assert.Equal(MockChunk.Object, saveFile.Root); + Assert.False(saveFile.CanSaveSynchronously); + } + + [Fact] + public void CreateGZipJsonFile_IntegrationTest_CanSaveAndLoad() + { + // Arrange + var tempFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString() + ".dat"); + var data = new TestData { Name = "Hello, World!", Value = 42 }; + var chunk = new SaveChunk( + onSave: _ => data, + onLoad: (_, loadedData) => + { + data.Name = loadedData.Name; + data.Value = loadedData.Value; + } + ); + + try + { + var saveFile = SaveFile.CreateGZipJsonFile(chunk, tempFile); + + // Act - Save + saveFile.Save(); + + // Assert - File exists + Assert.True(File.Exists(tempFile)); + + // Act - Load + data.Name = "Modified"; + data.Value = 0; + saveFile.Load(); + + // Assert - Data restored + Assert.Equal("Hello, World!", data.Name); + Assert.Equal(42, data.Value); + } + finally + { + // Cleanup + if (File.Exists(tempFile)) + { + File.Delete(tempFile); + } + } + } + + [Fact] + public async Task CreateGZipJsonFile_IntegrationTest_CanSaveAndLoadAsync() + { + // Arrange + var tempFile = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString() + ".dat"); + var data = new TestData { Name = "Async Test", Value = 99 }; + var chunk = new SaveChunk( + onSave: _ => data, + onLoad: (_, loadedData) => + { + data.Name = loadedData.Name; + data.Value = loadedData.Value; + } + ); + + try + { + var saveFile = SaveFile.CreateGZipJsonFile(chunk, tempFile); + + // Act - Save + await saveFile.SaveAsync(cancellationToken: CancellationToken); + + // Assert - File exists + Assert.True(await saveFile.ExistsAsync(CancellationToken)); + + // Act - Load + data.Name = "Modified"; + data.Value = 0; + await saveFile.LoadAsync(CancellationToken); + + // Assert - Data restored + Assert.Equal("Async Test", data.Name); + Assert.Equal(99, data.Value); + + // Act - Delete + var deleted = await saveFile.DeleteAsync(CancellationToken); + + // Assert - File deleted + Assert.True(deleted); + Assert.False(await saveFile.ExistsAsync(CancellationToken)); + } + finally + { + // Cleanup + if (File.Exists(tempFile)) + { + File.Delete(tempFile); + } + } + } +} diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs index 8688bea..a6eae43 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/SaveFileTest.cs @@ -175,6 +175,14 @@ public void SaveAsync_CompletedSynchronously() Assert.True(task.IsCompletedSuccessfully); } + [Fact] + public void SaveAsync_CompressorIsNull_CompletedSynchronously() + { + SaveFile = new SaveFile(MockChunk.Object, MockIO.Object, MockSerializer.Object, null); + var task = SaveFile.SaveAsync(cancellationToken: TestContext.Current.CancellationToken); + Assert.True(task.IsCompletedSuccessfully); + } + [Fact] public void LoadAsync_CompletedSynchronously() { diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/Serialization/JsonStreamSerializerTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/Serialization/JsonStreamSerializerTest.cs index bff9ac7..2138dd7 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/Serialization/JsonStreamSerializerTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/Serialization/JsonStreamSerializerTest.cs @@ -2,26 +2,12 @@ namespace Chickensoft.SaveFileBuilder.Tests.Serialization; using System.Text; using System.Text.Json; -using System.Text.Json.Serialization; using Chickensoft.SaveFileBuilder.Serialization; -public partial class JsonStreamSerializerTest(ITestContextAccessor testContextAccessor) +public class JsonStreamSerializerTest(ITestContextAccessor testContextAccessor) { private CancellationToken CancellationToken { get; } = testContextAccessor.Current.CancellationToken; - #region Test Models and Context - - private class TestData - { - public string Name { get; set; } = string.Empty; - public int Value { get; set; } - } - - [JsonSerializable(typeof(TestData))] - private partial class TestJsonContext : JsonSerializerContext; - - #endregion - #region Serialize Tests [Fact] diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/TestData.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/TestData.cs new file mode 100644 index 0000000..c18315b --- /dev/null +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/TestData.cs @@ -0,0 +1,12 @@ +namespace Chickensoft.SaveFileBuilder.Tests; + +using System.Text.Json.Serialization; + +public class TestData +{ + public string Name { get; set; } = string.Empty; + public int Value { get; set; } +} + +[JsonSerializable(typeof(TestData))] +internal partial class TestJsonContext : JsonSerializerContext; diff --git a/Chickensoft.SaveFileBuilder/src/IO/FileStreamIO.cs b/Chickensoft.SaveFileBuilder/src/IO/FileStreamIO.cs index f06f85f..1d9b4b0 100644 --- a/Chickensoft.SaveFileBuilder/src/IO/FileStreamIO.cs +++ b/Chickensoft.SaveFileBuilder/src/IO/FileStreamIO.cs @@ -29,11 +29,6 @@ public FileStreamIO(string fileName) public Stream Write() { FileInfo.Refresh(); - if (FileInfo.DirectoryName == null) - { - throw new DirectoryNotFoundException("The directory of the file does not exist."); - } - Directory.CreateDirectory(FileInfo.DirectoryName); return FileInfo.Open(FileMode.OpenOrCreate, FileAccess.Write); } diff --git a/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs b/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs index 50aac90..5acec88 100644 --- a/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs +++ b/Chickensoft.SaveFileBuilder/src/IO/HttpStreamIO.cs @@ -31,10 +31,10 @@ public HttpIORequestUris( string? existsUri = null, string? deleteUri = null ) : this( - readUri is not null ? new Uri(readUri) : null, - writeUri is not null ? new Uri(writeUri) : null, - existsUri is not null ? new Uri(existsUri) : null, - deleteUri is not null ? new Uri(deleteUri) : null + readUri is not null ? new Uri(readUri, UriKind.RelativeOrAbsolute) : null, + writeUri is not null ? new Uri(writeUri, UriKind.RelativeOrAbsolute) : null, + existsUri is not null ? new Uri(existsUri, UriKind.RelativeOrAbsolute) : null, + deleteUri is not null ? new Uri(deleteUri, UriKind.RelativeOrAbsolute) : null ) { } } From 2576bf6bbd68afedefff224c1a3e8e3666bf0d50 Mon Sep 17 00:00:00 2001 From: Mosakaas Date: Tue, 20 Jan 2026 21:20:55 +0100 Subject: [PATCH 19/19] Fix can-be-sealed suggestion Not so much fix as take the suggestion --- .../test/src/IO/HttpStreamIOTest.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Chickensoft.SaveFileBuilder.Tests/test/src/IO/HttpStreamIOTest.cs b/Chickensoft.SaveFileBuilder.Tests/test/src/IO/HttpStreamIOTest.cs index fc5672f..634bfa2 100644 --- a/Chickensoft.SaveFileBuilder.Tests/test/src/IO/HttpStreamIOTest.cs +++ b/Chickensoft.SaveFileBuilder.Tests/test/src/IO/HttpStreamIOTest.cs @@ -830,7 +830,7 @@ public void Dispose_DisposeClientFalse_DoesNotDisposeHttpClient() /// /// Mock HttpMessageHandler for testing HTTP requests without actual network calls. /// -internal class MockHttpMessageHandler : HttpMessageHandler +internal sealed class MockHttpMessageHandler : HttpMessageHandler { private HttpStatusCode _statusCode = HttpStatusCode.OK; private string _content = "";