diff --git a/eng/pipelines/libraries/fuzzing/deploy-to-onefuzz.yml b/eng/pipelines/libraries/fuzzing/deploy-to-onefuzz.yml index 1f7a53fe649409..98fe3510da28af 100644 --- a/eng/pipelines/libraries/fuzzing/deploy-to-onefuzz.yml +++ b/eng/pipelines/libraries/fuzzing/deploy-to-onefuzz.yml @@ -169,4 +169,12 @@ extends: onefuzzDropDirectory: $(fuzzerProject)/deployment/Utf8JsonWriterFuzzer SYSTEM_ACCESSTOKEN: $(System.AccessToken) displayName: Send Utf8JsonWriterFuzzer to OneFuzz + + - task: onefuzz-task@0 + inputs: + onefuzzOSes: 'Windows' + env: + onefuzzDropDirectory: $(fuzzerProject)/deployment/ZipArchiveFuzzer + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + displayName: Send ZipArchiveFuzzer to OneFuzz # ONEFUZZ_TASK_WORKAROUND_END diff --git a/src/libraries/Common/tests/System/IO/Compression/CompressionStreamUnitTestBase.cs b/src/libraries/Common/tests/System/IO/Compression/CompressionStreamUnitTestBase.cs index a6635777f85832..5d2e194fe60b1f 100644 --- a/src/libraries/Common/tests/System/IO/Compression/CompressionStreamUnitTestBase.cs +++ b/src/libraries/Common/tests/System/IO/Compression/CompressionStreamUnitTestBase.cs @@ -107,7 +107,7 @@ public async Task FlushAsync_DuringFlushAsync() { byte[] buffer = null; string testFilePath = CompressedTestFile(UncompressedTestFile()); - using (var origStream = await LocalMemoryStream.readAppFileAsync(testFilePath)) + using (var origStream = await LocalMemoryStream.ReadAppFileAsync(testFilePath)) { buffer = origStream.ToArray(); } @@ -164,8 +164,8 @@ public virtual async Task Dispose_WithUnfinishedReadAsync() [MemberData(nameof(UncompressedTestFiles))] public async Task Read(string testFile) { - var uncompressedStream = await LocalMemoryStream.readAppFileAsync(testFile); - var compressedStream = await LocalMemoryStream.readAppFileAsync(CompressedTestFile(testFile)); + var uncompressedStream = await LocalMemoryStream.ReadAppFileAsync(testFile); + var compressedStream = await LocalMemoryStream.ReadAppFileAsync(CompressedTestFile(testFile)); using var decompressor = CreateStream(compressedStream, CompressionMode.Decompress); var decompressorOutput = new MemoryStream(); @@ -199,7 +199,7 @@ public async Task Read(string testFile) [Fact] public async Task Read_EndOfStreamPosition() { - var compressedStream = await LocalMemoryStream.readAppFileAsync(CompressedTestFile(UncompressedTestFile())); + var compressedStream = await LocalMemoryStream.ReadAppFileAsync(CompressedTestFile(UncompressedTestFile())); int compressedEndPosition = (int)compressedStream.Length; var rand = new Random(1024); int _bufferSize = BufferSize * 2 - 568; @@ -219,7 +219,7 @@ public async Task Read_EndOfStreamPosition() public async Task Read_BaseStreamSlowly() { string testFile = UncompressedTestFile(); - var uncompressedStream = await LocalMemoryStream.readAppFileAsync(testFile); + var uncompressedStream = await LocalMemoryStream.ReadAppFileAsync(testFile); var compressedStream = new BadWrappedStream(BadWrappedStream.Mode.ReadSlowly, File.ReadAllBytes(CompressedTestFile(testFile))); using var decompressor = CreateStream(compressedStream, CompressionMode.Decompress); var decompressorOutput = new MemoryStream(); @@ -354,7 +354,7 @@ public async Task TestLeaveOpenAfterValidDecompress() //Create the Stream int _bufferSize = 1024; var bytes = new byte[_bufferSize]; - Stream compressedStream = await LocalMemoryStream.readAppFileAsync(CompressedTestFile(UncompressedTestFile())); + Stream compressedStream = await LocalMemoryStream.ReadAppFileAsync(CompressedTestFile(UncompressedTestFile())); Stream decompressor = CreateStream(compressedStream, CompressionMode.Decompress, leaveOpen: false); //Read some data and Close the stream @@ -426,7 +426,7 @@ public void BaseStreamTest(CompressionMode mode) [InlineData(CompressionMode.Decompress)] public async Task BaseStream_Modify(CompressionMode mode) { - using (var baseStream = await LocalMemoryStream.readAppFileAsync(CompressedTestFile(UncompressedTestFile()))) + using (var baseStream = await LocalMemoryStream.ReadAppFileAsync(CompressedTestFile(UncompressedTestFile()))) using (var compressor = CreateStream(baseStream, mode)) { int size = 1024; @@ -457,7 +457,7 @@ public void BaseStream_NullAfterDisposeWithFalseLeaveOpen(CompressionMode mode) [InlineData(CompressionMode.Decompress)] public async Task BaseStream_ValidAfterDisposeWithTrueLeaveOpen(CompressionMode mode) { - var ms = await LocalMemoryStream.readAppFileAsync(CompressedTestFile(UncompressedTestFile())); + var ms = await LocalMemoryStream.ReadAppFileAsync(CompressedTestFile(UncompressedTestFile())); using var decompressor = CreateStream(ms, mode, leaveOpen: true); var baseStream = BaseStream(decompressor); Assert.Same(ms, baseStream); @@ -475,7 +475,7 @@ public async Task BaseStream_ValidAfterDisposeWithTrueLeaveOpen(CompressionMode [MemberData(nameof(UncompressedTestFilesZLib))] public async Task CompressionLevel_SizeInOrder(string testFile) { - using var uncompressedStream = await LocalMemoryStream.readAppFileAsync(testFile); + using var uncompressedStream = await LocalMemoryStream.ReadAppFileAsync(testFile); async Task GetLengthAsync(CompressionLevel compressionLevel) { @@ -501,7 +501,7 @@ async Task GetLengthAsync(CompressionLevel compressionLevel) [MemberData(nameof(UncompressedTestFilesZLib))] public async Task ZLibCompressionOptions_SizeInOrder(string testFile) { - using var uncompressedStream = await LocalMemoryStream.readAppFileAsync(testFile); + using var uncompressedStream = await LocalMemoryStream.ReadAppFileAsync(testFile); async Task GetLengthAsync(int compressionLevel) { @@ -512,7 +512,7 @@ async Task GetLengthAsync(int compressionLevel) await compressor.FlushAsync(); return mms.Length; } - + long fastestLength = await GetLengthAsync(1); long optimalLength = await GetLengthAsync(5); long smallestLength = await GetLengthAsync(9); @@ -525,7 +525,7 @@ async Task GetLengthAsync(int compressionLevel) [MemberData(nameof(ZLibOptionsRoundTripTestData))] public async Task RoundTripWithZLibCompressionOptions(string testFile, ZLibCompressionOptions options) { - using var uncompressedStream = await LocalMemoryStream.readAppFileAsync(testFile); + using var uncompressedStream = await LocalMemoryStream.ReadAppFileAsync(testFile); var compressedStream = await CompressTestFile(uncompressedStream, options); using var decompressor = CreateStream(compressedStream, mode: CompressionMode.Decompress); using var decompressorOutput = new MemoryStream(); diff --git a/src/libraries/Common/tests/System/IO/Compression/LocalMemoryStream.cs b/src/libraries/Common/tests/System/IO/Compression/LocalMemoryStream.cs index 549dafa7ef6dd3..82c35fe4718ba8 100644 --- a/src/libraries/Common/tests/System/IO/Compression/LocalMemoryStream.cs +++ b/src/libraries/Common/tests/System/IO/Compression/LocalMemoryStream.cs @@ -16,7 +16,7 @@ public LocalMemoryStream Clone() return local; } - public static async Task readAppFileAsync(string testFile) + public static async Task ReadAppFileAsync(string testFile) { var baseStream = await StreamHelpers.CreateTempCopyStream(testFile); var ms = new LocalMemoryStream(); diff --git a/src/libraries/Common/tests/System/IO/Compression/NoAsyncCallsStream.cs b/src/libraries/Common/tests/System/IO/Compression/NoAsyncCallsStream.cs new file mode 100644 index 00000000000000..592a94bc9b4ab0 --- /dev/null +++ b/src/libraries/Common/tests/System/IO/Compression/NoAsyncCallsStream.cs @@ -0,0 +1,64 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Threading; +using System.Threading.Tasks; + +namespace System.IO.Compression.Tests; + +// A stream meant to be used for testing that an implementation's sync methods do not accidentally call any async methods. +internal sealed class NoAsyncCallsStream : Stream +{ + private readonly Stream _s; + + public NoAsyncCallsStream(Stream stream) => _s = stream; + + // Allows temporarily disabling the current stream's sync API usage restriction. + public bool IsRestrictionEnabled { get; set; } + + public override bool CanRead => _s.CanRead; + public override bool CanSeek => _s.CanSeek; + public override bool CanTimeout => _s.CanTimeout; + public override bool CanWrite => _s.CanWrite; + public override long Length => _s.Length; + public override long Position { get => _s.Position; set => _s.Position = value; } + public override int ReadTimeout { get => _s.ReadTimeout; set => _s.ReadTimeout = value; } + public override int WriteTimeout { get => _s.WriteTimeout; set => _s.WriteTimeout = value; } + public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback? callback, object? state) => _s.BeginRead(buffer, offset, count, callback, state); + public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback? callback, object? state) => _s.BeginWrite(buffer, offset, count, callback, state); + public override void Close() => _s.Close(); + public override int EndRead(IAsyncResult asyncResult) => _s.EndRead(asyncResult); + public override void EndWrite(IAsyncResult asyncResult) => _s.EndWrite(asyncResult); + public override bool Equals(object? obj) => _s.Equals(obj); + public override int GetHashCode() => _s.GetHashCode(); + public override int ReadByte() => _s.ReadByte(); + public override long Seek(long offset, SeekOrigin origin) => _s.Seek(offset, origin); + public override void SetLength(long value) => _s.SetLength(value); + public override string? ToString() => _s.ToString(); + + // Sync + public override void CopyTo(Stream destination, int bufferSize) => _s.CopyTo(destination, bufferSize); + protected override void Dispose(bool disposing) => _s.Dispose(); + public override void Flush() => _s.Flush(); + public override int Read(byte[] buffer, int offset, int count) => _s.Read(buffer, offset, count); + public override int Read(Span buffer) => _s.Read(buffer); + public override void Write(byte[] buffer, int offset, int count) => _s.Write(buffer, offset, count); + public override void Write(ReadOnlySpan buffer) => _s.Write(buffer); + public override void WriteByte(byte value) => _s.WriteByte(value); + + // Async + public override Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken) => + IsRestrictionEnabled ? throw new InvalidOperationException() : _s.CopyToAsync(destination, bufferSize, cancellationToken); + public override ValueTask DisposeAsync() => + IsRestrictionEnabled ? throw new InvalidOperationException() : _s.DisposeAsync(); + public override Task FlushAsync(CancellationToken cancellationToken) => + IsRestrictionEnabled ? throw new InvalidOperationException() : _s.FlushAsync(); + public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) => + IsRestrictionEnabled ? throw new InvalidOperationException() : _s.ReadAsync(buffer, offset, count, cancellationToken); + public override ValueTask ReadAsync(Memory buffer, CancellationToken cancellationToken = default) => + IsRestrictionEnabled ? throw new InvalidOperationException() : _s.ReadAsync(buffer, cancellationToken); + public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) => + IsRestrictionEnabled ? throw new InvalidOperationException() : _s.WriteAsync(buffer, offset, count, cancellationToken); + public override ValueTask WriteAsync(ReadOnlyMemory buffer, CancellationToken cancellationToken = default) => + IsRestrictionEnabled ? throw new InvalidOperationException() : _s.WriteAsync(buffer, cancellationToken); +} diff --git a/src/libraries/Common/tests/System/IO/Compression/NoSyncCallsStream.cs b/src/libraries/Common/tests/System/IO/Compression/NoSyncCallsStream.cs new file mode 100644 index 00000000000000..6adf468d0a12a7 --- /dev/null +++ b/src/libraries/Common/tests/System/IO/Compression/NoSyncCallsStream.cs @@ -0,0 +1,132 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Threading; +using System.Threading.Tasks; + +namespace System.IO.Compression.Tests; + +// A stream meant to be used for testing that an implementation's async methods do not accidentally call any sync methods. +internal sealed class NoSyncCallsStream : Stream +{ + private readonly Stream _s; + + public NoSyncCallsStream(Stream stream) => _s = stream; + + // Allows temporarily disabling the current stream's sync API usage restriction. + public bool IsRestrictionEnabled { get; set; } + + public override bool CanRead => _s.CanRead; + public override bool CanSeek => _s.CanSeek; + public override bool CanTimeout => _s.CanTimeout; + public override bool CanWrite => _s.CanWrite; + public override long Length => _s.Length; + public override long Position { get => _s.Position; set => _s.Position = value; } + public override int ReadTimeout { get => _s.ReadTimeout; set => _s.ReadTimeout = value; } + public override int WriteTimeout { get => _s.WriteTimeout; set => _s.WriteTimeout = value; } + public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback? callback, object? state) => _s.BeginRead(buffer, offset, count, callback, state); + public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback? callback, object? state) => _s.BeginWrite(buffer, offset, count, callback, state); + public override void Close() => _s.Close(); + public override int EndRead(IAsyncResult asyncResult) => _s.EndRead(asyncResult); + public override void EndWrite(IAsyncResult asyncResult) => _s.EndWrite(asyncResult); + public override bool Equals(object? obj) => _s.Equals(obj); + public override int GetHashCode() => _s.GetHashCode(); + public override int ReadByte() => _s.ReadByte(); + public override long Seek(long offset, SeekOrigin origin) => _s.Seek(offset, origin); + public override void SetLength(long value) => _s.SetLength(value); + public override string? ToString() => _s.ToString(); + + // Sync + public override void CopyTo(Stream destination, int bufferSize) + { + if (IsRestrictionEnabled) + { + throw new InvalidOperationException(); + } + else + { + _s.CopyTo(destination, bufferSize); + } + } + protected override void Dispose(bool disposing) + { + // _disposing = true; + if (IsRestrictionEnabled) + { + throw new InvalidOperationException(); + } + else + { + _s.Dispose(); + } + } + public override void Flush() + { + if (IsRestrictionEnabled) + { + throw new InvalidOperationException(); + } + else + { + _s.Flush(); + } + } + public override int Read(byte[] buffer, int offset, int count) => + IsRestrictionEnabled ? throw new InvalidOperationException() : _s.Read(buffer, offset, count); + public override int Read(Span buffer) => + IsRestrictionEnabled ? throw new InvalidOperationException() : _s.Read(buffer); + public override void Write(byte[] buffer, int offset, int count) + { + bool isDeflateStream = false; + + // Get the stack trace to determine the calling method + var stackTrace = new System.Diagnostics.StackTrace(); + var callingMethod = stackTrace.GetFrame(1)?.GetMethod(); + + // Check if the calling method belongs to the DeflateStream class + if (callingMethod?.DeclaringType == typeof(System.IO.Compression.DeflateStream)) + { + isDeflateStream = true; + } + + if (!isDeflateStream && IsRestrictionEnabled) + { + throw new InvalidOperationException($"Parent class is {callingMethod.DeclaringType}"); + } + else + { + _s.Write(buffer, offset, count); + } + } + public override void Write(ReadOnlySpan buffer) + { + if (IsRestrictionEnabled) + { + throw new InvalidOperationException(); + } + else + { + _s.Write(buffer); + } + } + public override void WriteByte(byte value) + { + if (IsRestrictionEnabled) + { + throw new InvalidOperationException(); + } + else + { + _s.WriteByte(value); + } + } + + // Async + public override Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken) => _s.CopyToAsync(destination, bufferSize, cancellationToken); + public override ValueTask DisposeAsync() => _s.DisposeAsync(); + public override Task FlushAsync(CancellationToken cancellationToken) => _s.FlushAsync(cancellationToken); + public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) => _s.ReadAsync(buffer, offset, count, cancellationToken); + public override ValueTask ReadAsync(Memory buffer, CancellationToken cancellationToken = default) => _s.ReadAsync(buffer, cancellationToken); + public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) => _s.WriteAsync(buffer, offset, count, cancellationToken); + public override ValueTask WriteAsync(ReadOnlyMemory buffer, CancellationToken cancellationToken = default) => _s.WriteAsync(buffer, cancellationToken); +} diff --git a/src/libraries/Common/tests/System/IO/Compression/StreamHelpers.cs b/src/libraries/Common/tests/System/IO/Compression/StreamHelpers.cs index 474de992235e77..5ddafa7887589c 100644 --- a/src/libraries/Common/tests/System/IO/Compression/StreamHelpers.cs +++ b/src/libraries/Common/tests/System/IO/Compression/StreamHelpers.cs @@ -9,7 +9,7 @@ public static partial class StreamHelpers { public static async Task CreateTempCopyStream(string path) { - var bytes = File.ReadAllBytes(path); + var bytes = await File.ReadAllBytesAsync(path); var ms = new MemoryStream(); await ms.WriteAsync(bytes, 0, bytes.Length); diff --git a/src/libraries/Common/tests/System/IO/Compression/ZipTestHelper.ZipFile.cs b/src/libraries/Common/tests/System/IO/Compression/ZipTestHelper.ZipFile.cs new file mode 100644 index 00000000000000..f5e1db167b95b1 --- /dev/null +++ b/src/libraries/Common/tests/System/IO/Compression/ZipTestHelper.ZipFile.cs @@ -0,0 +1,319 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; + +namespace System.IO.Compression.Tests; + +public partial class ZipFileTestBase : FileCleanupTestBase +{ + protected Task CallZipFileOpen(bool async, string archiveFileName, ZipArchiveMode mode) + { + return async ? + ZipFile.OpenAsync(archiveFileName, mode) : + Task.FromResult(ZipFile.Open(archiveFileName, mode)); + } + + protected Task CallZipFileOpen(bool async, string archiveFileName, ZipArchiveMode mode, Encoding? entryNameEncoding) + { + return async ? + ZipFile.OpenAsync(archiveFileName, mode, entryNameEncoding) : + Task.FromResult(ZipFile.Open(archiveFileName, mode, entryNameEncoding)); + } + + protected Task CallZipFileOpenRead(bool async, string archiveFileName) + { + return async ? + ZipFile.OpenReadAsync(archiveFileName) : + Task.FromResult(ZipFile.OpenRead(archiveFileName)); + } + + protected Task CallZipFileExtensionsCreateEntryFromFile(bool async, ZipArchive archive, string fileName, string entryName) + { + return async ? + archive.CreateEntryFromFileAsync(fileName, entryName) : + Task.FromResult(archive.CreateEntryFromFile(fileName, entryName)); + } + + protected Task CallZipFileExtensionsCreateEntryFromFile(bool async, ZipArchive archive, string fileName, string entryName, CompressionLevel compressionLevel) + { + return async ? + archive.CreateEntryFromFileAsync(fileName, entryName, compressionLevel) : + Task.FromResult(archive.CreateEntryFromFile(fileName, entryName, compressionLevel)); + } + + protected Task CallExtractToFile(bool async, ZipArchiveEntry entry, string destinationFileName) + { + if (async) + { + return entry.ExtractToFileAsync(destinationFileName, overwrite: false); + } + else + { + entry.ExtractToFile(destinationFileName); + return Task.CompletedTask; + } + } + + protected Task CallExtractToFile(bool async, ZipArchiveEntry entry, string destinationFileName, bool overwrite) + { + if (async) + { + return entry.ExtractToFileAsync(destinationFileName, overwrite); + } + else + { + entry.ExtractToFile(destinationFileName, overwrite); + return Task.CompletedTask; + } + } + + protected Task CallZipFileCreateFromDirectory(bool async, string sourceDirectoryName, Stream destination) + { + if (async) + { + return ZipFile.CreateFromDirectoryAsync(sourceDirectoryName, destination); + } + else + { + ZipFile.CreateFromDirectory(sourceDirectoryName, destination); + return Task.CompletedTask; + } + } + + protected Task CallZipFileCreateFromDirectory(bool async, string sourceDirectoryName, string destinationArchiveFileName) + { + if (async) + { + return ZipFile.CreateFromDirectoryAsync(sourceDirectoryName, destinationArchiveFileName); + } + else + { + ZipFile.CreateFromDirectory(sourceDirectoryName, destinationArchiveFileName); + return Task.CompletedTask; + } + } + + protected Task CallZipFileCreateFromDirectory(bool async, string sourceDirectoryName, string destinationArchiveFileName, CompressionLevel compressionLevel, bool includeBaseDirectory) + { + if (async) + { + return ZipFile.CreateFromDirectoryAsync(sourceDirectoryName, destinationArchiveFileName, compressionLevel, includeBaseDirectory); + } + else + { + ZipFile.CreateFromDirectory(sourceDirectoryName, destinationArchiveFileName, compressionLevel, includeBaseDirectory); + return Task.CompletedTask; + } + } + + protected Task CallZipFileCreateFromDirectory(bool async, string sourceDirectoryName, string destinationArchiveFileName, CompressionLevel compressionLevel, bool includeBaseDirectory, Encoding? entryNameEncoding) + { + if (async) + { + return ZipFile.CreateFromDirectoryAsync(sourceDirectoryName, destinationArchiveFileName, compressionLevel, includeBaseDirectory, entryNameEncoding); + } + else + { + ZipFile.CreateFromDirectory(sourceDirectoryName, destinationArchiveFileName, compressionLevel, includeBaseDirectory, entryNameEncoding); + return Task.CompletedTask; + } + } + + protected Task CallZipFileCreateFromDirectory(bool async, string sourceDirectoryName, Stream destination, CompressionLevel compressionLevel, bool includeBaseDirectory) + { + if (async) + { + return ZipFile.CreateFromDirectoryAsync(sourceDirectoryName, destination, compressionLevel, includeBaseDirectory); + } + else + { + ZipFile.CreateFromDirectory(sourceDirectoryName, destination, compressionLevel, includeBaseDirectory); + return Task.CompletedTask; + } + } + + protected Task CallZipFileCreateFromDirectory(bool async, string sourceDirectoryName, Stream destination, CompressionLevel compressionLevel, bool includeBaseDirectory, Encoding? entryNameEncoding) + { + if (async) + { + return ZipFile.CreateFromDirectoryAsync(sourceDirectoryName, destination, compressionLevel, includeBaseDirectory, entryNameEncoding); + } + else + { + ZipFile.CreateFromDirectory(sourceDirectoryName, destination, compressionLevel, includeBaseDirectory, entryNameEncoding); + return Task.CompletedTask; + } + } + + protected Task CallZipFileExtractToDirectory(bool async, Stream source, string destinationDirectoryName) + { + if (async) + { + return ZipFile.ExtractToDirectoryAsync(source, destinationDirectoryName); + } + else + { + ZipFile.ExtractToDirectory(source, destinationDirectoryName); + return Task.CompletedTask; + } + } + + protected Task CallZipFileExtractToDirectory(bool async, Stream source, string destinationDirectoryName, bool overwriteFiles) + { + if (async) + { + return ZipFile.ExtractToDirectoryAsync(source, destinationDirectoryName, overwriteFiles); + } + else + { + ZipFile.ExtractToDirectory(source, destinationDirectoryName, overwriteFiles); + return Task.CompletedTask; + } + } + + protected Task CallZipFileExtractToDirectory(bool async, Stream source, string destinationDirectoryName, Encoding? entryNameEncoding) + { + if (async) + { + return ZipFile.ExtractToDirectoryAsync(source, destinationDirectoryName, entryNameEncoding); + } + else + { + ZipFile.ExtractToDirectory(source, destinationDirectoryName, entryNameEncoding); + return Task.CompletedTask; + } + } + + protected Task CallZipFileExtractToDirectory(bool async, Stream source, string destinationDirectoryName, Encoding? entryNameEncoding, bool overwriteFiles) + { + if (async) + { + return ZipFile.ExtractToDirectoryAsync(source, destinationDirectoryName, entryNameEncoding, overwriteFiles); + } + else + { + ZipFile.ExtractToDirectory(source, destinationDirectoryName, entryNameEncoding, overwriteFiles); + return Task.CompletedTask; + } + } + + protected Task CallZipFileExtractToDirectory(bool async, string sourceArchiveFileName, string destinationDirectoryName) + { + if (async) + { + return ZipFile.ExtractToDirectoryAsync(sourceArchiveFileName, destinationDirectoryName); + } + else + { + ZipFile.ExtractToDirectory(sourceArchiveFileName, destinationDirectoryName); + return Task.CompletedTask; + } + } + + protected Task CallZipFileExtractToDirectory(bool async, string sourceArchiveFileName, string destinationDirectoryName, bool overwriteFiles) + { + if (async) + { + return ZipFile.ExtractToDirectoryAsync(sourceArchiveFileName, destinationDirectoryName, overwriteFiles); + } + else + { + ZipFile.ExtractToDirectory(sourceArchiveFileName, destinationDirectoryName, overwriteFiles); + return Task.CompletedTask; + } + } + + protected Task CallZipFileExtractToDirectory(bool async, string sourceArchiveFileName, string destinationDirectoryName, Encoding? entryNameEncoding) + { + if (async) + { + return ZipFile.ExtractToDirectoryAsync(sourceArchiveFileName, destinationDirectoryName, entryNameEncoding); + } + else + { + ZipFile.ExtractToDirectory(sourceArchiveFileName, destinationDirectoryName, entryNameEncoding); + return Task.CompletedTask; + } + } + + protected Task CallZipFileExtractToDirectory(bool async, string sourceArchiveFileName, string destinationDirectoryName, Encoding? entryNameEncoding, bool overwriteFiles) + { + if (async) + { + return ZipFile.ExtractToDirectoryAsync(sourceArchiveFileName, destinationDirectoryName, entryNameEncoding, overwriteFiles); + } + else + { + ZipFile.ExtractToDirectory(sourceArchiveFileName, destinationDirectoryName, entryNameEncoding, overwriteFiles); + return Task.CompletedTask; + } + } + + protected static Task CallZipFileExtensionsExtractToDirectory(bool async, ZipArchive archive, string destinationDirectoryName) + { + if (async) + { + return archive.ExtractToDirectoryAsync(destinationDirectoryName); + } + else + { + archive.ExtractToDirectory(destinationDirectoryName); + return Task.CompletedTask; + } + } + + public static IEnumerable Get_Unix_ZipWithInvalidFileNames_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "NullCharFileName_FromWindows", async }; + yield return new object[] { "NullCharFileName_FromUnix", async }; + } + } + + public static IEnumerable Get_Unix_ZipWithOSSpecificFileNames_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "backslashes_FromUnix", "aa\\bb\\cc\\dd", async }; + yield return new object[] { "backslashes_FromWindows", "aa\\bb\\cc\\dd", async }; + yield return new object[] { "WindowsInvalid_FromUnix", "aad", async }; + yield return new object[] { "WindowsInvalid_FromWindows", "aad", async }; + } + } + + public static IEnumerable Get_Windows_ZipWithOSSpecificFileNames_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "backslashes_FromUnix", "dd", async }; + yield return new object[] { "backslashes_FromWindows", "dd", async }; + } + } + + /// + /// This test checks whether or not ZipFile.ExtractToDirectory() is capable of handling filenames + /// which contain invalid path characters in Windows. + /// Archive: InvalidWindowsFileNameChars.zip + /// Test/ + /// Test/normalText.txt + /// Test"<>|^A^B^C^D^E^F^G^H^I^J^K^L^M^N^O^P^Q^R^S^T^U^V^W^X^Y^Z^[^\^]^^^_/ + /// Test"<>|^A^B^C^D^E^F^G^H^I^J^K^L^M^N^O^P^Q^R^S^T^U^V^W^X^Y^Z^[^\^]^^^_/TestText1"<>|^A^B^C^D^E^F^G^H^I^J^K^L^M^N^O^P^Q^R^S^T^U^V^W^X^Y^Z^[^\^]^^^_.txt + /// TestEmpty/ + /// TestText"<>|^A^B^C^D^E^F^G^H^I^J^K^L^M^N^O^P^Q^R^S^T^U^V^W^X^Y^Z^[^\^]^^^_.txt + /// + public static IEnumerable Get_Windows_ZipWithInvalidFileNames_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "InvalidWindowsFileNameChars.zip", new string[] { "TestText______________________________________.txt", "Test______________________________________/TestText1______________________________________.txt", "Test/normalText.txt" }, async }; + yield return new object[] { "NullCharFileName_FromWindows.zip", new string[] { "a_6b6d" }, async }; + yield return new object[] { "NullCharFileName_FromUnix.zip", new string[] { "a_6b6d" }, async }; + yield return new object[] { "WindowsInvalid_FromUnix.zip", new string[] { "aa_b_d" }, async }; + yield return new object[] { "WindowsInvalid_FromWindows.zip", new string[] { "aa_b_d" }, async }; + } + } +} diff --git a/src/libraries/Common/tests/System/IO/Compression/ZipTestHelper.cs b/src/libraries/Common/tests/System/IO/Compression/ZipTestHelper.cs index 3bd1a5113da156..5d610bb461cb4b 100644 --- a/src/libraries/Common/tests/System/IO/Compression/ZipTestHelper.cs +++ b/src/libraries/Common/tests/System/IO/Compression/ZipTestHelper.cs @@ -26,7 +26,7 @@ protected TempFile CreateTempCopyFile(string path, string newPath) return newfile; } - public static long LengthOfUnseekableStream(Stream s) + public static async Task LengthOfUnseekableStream(Stream s) { long totalBytes = 0; const int bufSize = 4096; @@ -35,7 +35,7 @@ public static long LengthOfUnseekableStream(Stream s) do { - bytesRead = s.Read(buf, 0, bufSize); + bytesRead = await s.ReadAsync(buf, 0, bufSize); totalBytes += bytesRead; } while (bytesRead > 0); @@ -43,7 +43,7 @@ public static long LengthOfUnseekableStream(Stream s) } // reads exactly bytesToRead out of stream, unless it is out of bytes - public static void ReadBytes(Stream stream, byte[] buffer, long bytesToRead) + public static async Task ReadBytes(Stream stream, byte[] buffer, long bytesToRead, bool async) { int bytesLeftToRead; if (bytesToRead > int.MaxValue) @@ -58,13 +58,17 @@ public static void ReadBytes(Stream stream, byte[] buffer, long bytesToRead) while (bytesLeftToRead > 0) { - int bytesRead = stream.Read(buffer, totalBytesRead, bytesLeftToRead); + int bytesRead = async ? + await stream.ReadAsync(buffer, totalBytesRead, bytesLeftToRead) : + stream.Read(buffer, totalBytesRead, bytesLeftToRead); + if (bytesRead == 0) throw new IOException("Unexpected end of stream"); totalBytesRead += bytesRead; bytesLeftToRead -= bytesRead; } } + public static async Task ReadAllBytesAsync(Stream stream, byte[] buffer, int offset, int count) { int bytesRead; @@ -111,11 +115,6 @@ public static void StreamsEqual(Stream ast, Stream bst) StreamsEqual(ast, bst, -1); } - public static async Task StreamsEqualAsync(Stream ast, Stream bst) - { - await StreamsEqualAsync(ast, bst, -1); - } - public static void StreamsEqual(Stream ast, Stream bst, int blocksToRead) { if (ast.CanSeek) @@ -188,15 +187,13 @@ public static async Task StreamsEqualAsync(Stream ast, Stream bst, int blocksToR } while (ac == bufSize); } - public static async Task IsZipSameAsDirAsync(string archiveFile, string directory, ZipArchiveMode mode) - { - await IsZipSameAsDirAsync(archiveFile, directory, mode, requireExplicit: false, checkTimes: false); - } + public static async Task IsZipSameAsDir(string archiveFile, string directory, ZipArchiveMode mode, bool async) => + await IsZipSameAsDir(archiveFile, directory, mode, requireExplicit: false, checkTimes: false, async); - public static async Task IsZipSameAsDirAsync(string archiveFile, string directory, ZipArchiveMode mode, bool requireExplicit, bool checkTimes) + public static async Task IsZipSameAsDir(string archiveFile, string directory, ZipArchiveMode mode, bool requireExplicit, bool checkTimes, bool async) { var s = await StreamHelpers.CreateTempCopyStream(archiveFile); - IsZipSameAsDir(s, directory, mode, requireExplicit, checkTimes); + await IsZipSameAsDir(s, directory, mode, requireExplicit, checkTimes, async); } public static byte[] NormalizeLineEndings(byte[] str) @@ -207,104 +204,110 @@ public static byte[] NormalizeLineEndings(byte[] str) return Text.Encoding.Default.GetBytes(rep); } - public static void IsZipSameAsDir(Stream archiveFile, string directory, ZipArchiveMode mode, bool requireExplicit, bool checkTimes) + public static async Task IsZipSameAsDir(Stream archiveFile, string directory, ZipArchiveMode mode, bool requireExplicit, bool checkTimes, bool async) { - int count = 0; + ZipArchive archive = await CreateZipArchive(async, archiveFile, mode); + + List files = FileData.InPath(directory); - using (ZipArchive archive = new ZipArchive(archiveFile, mode)) + int count = 0; + foreach (FileData file in files) { - List files = FileData.InPath(directory); - Assert.All(files, (file) => { - count++; - string entryName = file.FullName; - if (file.IsFolder) - entryName += Path.DirectorySeparatorChar; - ZipArchiveEntry entry = archive.GetEntry(entryName); - if (entry == null) - { - entryName = FlipSlashes(entryName); - entry = archive.GetEntry(entryName); - } - if (file.IsFile) - { - Assert.NotNull(entry); - long givenLength = entry.Length; + count++; + string entryName = file.FullName; + if (file.IsFolder) + entryName += Path.DirectorySeparatorChar; + ZipArchiveEntry entry = archive.GetEntry(entryName); + if (entry == null) + { + entryName = FlipSlashes(entryName); + entry = archive.GetEntry(entryName); + } + if (file.IsFile) + { + Assert.NotNull(entry); - var buffer = new byte[entry.Length]; - using (Stream entrystream = entry.Open()) - { - ReadAllBytes(entrystream, buffer, 0, buffer.Length); + // IMPORTANT: Call Length before opening the entry in Update mode + long givenLength = entry.Length; + var buffer = new byte[entry.Length]; + + Stream entryStream = await OpenEntryStream(async, entry); + + ReadAllBytes(entryStream, buffer, 0, buffer.Length); #if NET - uint zipcrc = entry.Crc32; - Assert.Equal(CRC.CalculateCRC(buffer), zipcrc); + uint zipcrc = entry.Crc32; + Assert.Equal(CRC.CalculateCRC(buffer), zipcrc); #endif - if (file.Length != givenLength) - { - buffer = NormalizeLineEndings(buffer); - } + if (file.Length != givenLength) + { + buffer = NormalizeLineEndings(buffer); + } - Assert.Equal(file.Length, buffer.Length); - ulong crc = CRC.CalculateCRC(buffer); - Assert.Equal(file.CRC, crc.ToString()); - } + Assert.Equal(file.Length, buffer.Length); + ulong crc = CRC.CalculateCRC(buffer); + Assert.Equal(file.CRC, crc.ToString()); + + await DisposeStream(async, entryStream); + + if (checkTimes) + { + const int zipTimestampResolution = 2; // Zip follows the FAT timestamp resolution of two seconds for file records + DateTime lower = file.LastModifiedDate.AddSeconds(-zipTimestampResolution); + DateTime upper = file.LastModifiedDate.AddSeconds(zipTimestampResolution); + Assert.InRange(entry.LastWriteTime.Ticks, lower.Ticks, upper.Ticks); + } - if (checkTimes) + Assert.Equal(file.Name, entry.Name); + Assert.Equal(entryName, entry.FullName); + Assert.Equal(entryName, entry.ToString()); + Assert.Equal(archive, entry.Archive); + } + else if (file.IsFolder) + { + if (entry == null) //entry not found + { + string entryNameOtherSlash = FlipSlashes(entryName); + bool isEmpty = !files.Any( + f => f.IsFile && + (f.FullName.StartsWith(entryName, StringComparison.OrdinalIgnoreCase) || + f.FullName.StartsWith(entryNameOtherSlash, StringComparison.OrdinalIgnoreCase))); + if (requireExplicit || isEmpty) { - const int zipTimestampResolution = 2; // Zip follows the FAT timestamp resolution of two seconds for file records - DateTime lower = file.LastModifiedDate.AddSeconds(-zipTimestampResolution); - DateTime upper = file.LastModifiedDate.AddSeconds(zipTimestampResolution); - Assert.InRange(entry.LastWriteTime.Ticks, lower.Ticks, upper.Ticks); + Assert.Contains("emptydir", entryName); } - Assert.Equal(file.Name, entry.Name); - Assert.Equal(entryName, entry.FullName); - Assert.Equal(entryName, entry.ToString()); - Assert.Equal(archive, entry.Archive); + if ((!requireExplicit && !isEmpty) || entryName.Contains("emptydir")) + count--; //discount this entry } - else if (file.IsFolder) + else { - if (entry == null) //entry not found - { - string entryNameOtherSlash = FlipSlashes(entryName); - bool isEmpty = !files.Any( - f => f.IsFile && - (f.FullName.StartsWith(entryName, StringComparison.OrdinalIgnoreCase) || - f.FullName.StartsWith(entryNameOtherSlash, StringComparison.OrdinalIgnoreCase))); - if (requireExplicit || isEmpty) - { - Assert.Contains("emptydir", entryName); - } + Stream es = await OpenEntryStream(async, entry); - if ((!requireExplicit && !isEmpty) || entryName.Contains("emptydir")) - count--; //discount this entry + try + { + Assert.Equal(0, es.Length); } - else + catch (NotSupportedException) { - using (Stream es = entry.Open()) + try { - try - { - Assert.Equal(0, es.Length); - } - catch (NotSupportedException) - { - try - { - Assert.Equal(-1, es.ReadByte()); - } - catch (Exception) - { - Console.WriteLine("Didn't return EOF"); - throw; - } - } + Assert.Equal(-1, es.ReadByte()); + } + catch (Exception) + { + Console.WriteLine("Didn't return EOF"); + throw; } } + + await DisposeStream(async, es); } - }); - Assert.Equal(count, archive.Entries.Count); + } } + Assert.Equal(count, archive.Entries.Count); + + await DisposeZipArchive(async, archive); } private static string FlipSlashes(string name) @@ -316,6 +319,9 @@ private static string FlipSlashes(string name) name; } + public static FileStream CreateFileStreamRead(bool async, string fileName) => + new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read, bufferSize: 4096, useAsync: async); + public static void DirsEqual(string actual, string expected) { var expectedList = FileData.InPath(expected); @@ -358,107 +364,149 @@ private static void ItemEqual(string[] actualList, List expectedList, { Stream sa = StreamHelpers.CreateTempCopyStream(aEntry).Result; Stream sb = StreamHelpers.CreateTempCopyStream(bEntry).Result; - StreamsEqual(sa, sb); + StreamsEqual(sa, sb); // Not testing zip features, can always be async } } } - /// Tests the Span overloads of Write + /// Tests the Span and Memory overloads of Write and WriteAsync. /// Writes in chunks of 5 to test Write with a nonzero offset - public static async Task CreateFromDir(string directory, Stream archiveStream, ZipArchiveMode mode, bool useSpansForWriting = false, bool writeInChunks = false) + public static async Task CreateFromDir(string directory, Stream archiveStream, bool async, ZipArchiveMode mode, bool useSpansForWriting = false, bool writeInChunks = false) { var files = FileData.InPath(directory); - using (ZipArchive archive = new ZipArchive(archiveStream, mode, true)) + + ZipArchive archive = await CreateZipArchive(async, archiveStream, mode, leaveOpen: true); + + foreach (var i in files) { - foreach (var i in files) + if (i.IsFolder) { - if (i.IsFolder) - { - string entryName = i.FullName; + string entryName = i.FullName; - ZipArchiveEntry e = archive.CreateEntry(entryName.Replace('\\', '/') + "/"); - e.LastWriteTime = i.LastModifiedDate; - } + ZipArchiveEntry e = archive.CreateEntry(entryName.Replace('\\', '/') + "/"); + e.LastWriteTime = i.LastModifiedDate; } + } - foreach (var i in files) + foreach (var i in files) + { + if (i.IsFile) { - if (i.IsFile) + string entryName = i.FullName; + + MemoryStream installStream = await StreamHelpers.CreateTempCopyStream(Path.Combine(i.OrigFolder, i.FullName)); + + if (installStream != null) { - string entryName = i.FullName; + ZipArchiveEntry e = archive.CreateEntry(entryName.Replace('\\', '/')); + e.LastWriteTime = i.LastModifiedDate; - var installStream = await StreamHelpers.CreateTempCopyStream(Path.Combine(i.OrigFolder, i.FullName)); + Stream entryStream = await OpenEntryStream(async, e); - if (installStream != null) + int bytesRead; + var buffer = new byte[1024]; + if (useSpansForWriting) { - ZipArchiveEntry e = archive.CreateEntry(entryName.Replace('\\', '/')); - e.LastWriteTime = i.LastModifiedDate; - using (Stream entryStream = e.Open()) + while ((bytesRead = await installStream.ReadAsync(buffer)) != 0) { - int bytesRead; - var buffer = new byte[1024]; - if (useSpansForWriting) + if (async) { - while ((bytesRead = installStream.Read(new Span(buffer))) != 0) - { - entryStream.Write(new ReadOnlySpan(buffer, 0, bytesRead)); - } + await entryStream.WriteAsync(buffer.AsMemory(0, bytesRead)); } - else if (writeInChunks) + else { - while ((bytesRead = installStream.Read(buffer, 0, buffer.Length)) != 0) + entryStream.Write(buffer.AsSpan(0, bytesRead)); + } + } + } + else if (writeInChunks) + { + while ((bytesRead = await installStream.ReadAsync(buffer, 0, buffer.Length)) != 0) + { + for (int k = 0; k < bytesRead; k += 5) + { + int count = Math.Min(5, bytesRead - k); + if (async) + { + await entryStream.WriteAsync(buffer, k, count); + } + else { - for (int k = 0; k < bytesRead; k += 5) - entryStream.Write(buffer, k, Math.Min(5, bytesRead - k)); + entryStream.Write(buffer, k, count); } } + } + } + else + { + while ((bytesRead = await installStream.ReadAsync(buffer, 0, buffer.Length)) != 0) + { + if (async) + { + await entryStream.WriteAsync(buffer, 0, bytesRead); + } else { - while ((bytesRead = installStream.Read(buffer, 0, buffer.Length)) != 0) - { - entryStream.Write(buffer, 0, bytesRead); - } + entryStream.Write(buffer, 0, bytesRead); } } } + + await DisposeStream(async, entryStream); } } } + + await DisposeZipArchive(async, archive); } - internal static void AddEntry(ZipArchive archive, string name, string contents, DateTimeOffset lastWrite) + internal static async Task AddEntry(ZipArchive archive, string name, string contents, DateTimeOffset lastWrite, bool async) { ZipArchiveEntry e = archive.CreateEntry(name); e.LastWriteTime = lastWrite; - using (StreamWriter w = new StreamWriter(e.Open())) - { - w.WriteLine(contents); - } + + Stream entryStream = await OpenEntryStream(async, e); + + // Leave the streamopen so we can test the entry stream's disposing + StreamWriter w = new StreamWriter(entryStream, encoding: null, bufferSize: 2048, leaveOpen: true); + await w.WriteLineAsync(contents); + + await DisposeStream(async, entryStream); } - public static byte[] CreateZipFile(int entryCount, byte[] entryContents) + public static async Task CreateZipFile(int entryCount, byte[] entryContents, bool async) { - using (MemoryStream ms = new()) + MemoryStream ms = new(); + + ZipArchive createdArchive = await CreateZipArchive(async, ms, ZipArchiveMode.Create, leaveOpen: true); + + for (int i = 0; i < entryCount; i++) { - using (ZipArchive createdArchive = new(ms, ZipArchiveMode.Create, true)) - { - for (int i = 0; i < entryCount; i++) - { - string fileName = $"dummydata/{i}.bin"; - ZipArchiveEntry newEntry = createdArchive.CreateEntry(fileName); + string fileName = $"dummydata/{i}.bin"; + ZipArchiveEntry newEntry = createdArchive.CreateEntry(fileName); - newEntry.LastWriteTime = DateTimeOffset.Now.AddHours(-1.0); - using (Stream entryWriteStream = newEntry.Open()) - { - entryWriteStream.Write(entryContents); - entryWriteStream.WriteByte((byte)(i % byte.MaxValue)); - } - } + newEntry.LastWriteTime = DateTimeOffset.Now.AddHours(-1.0); + + Stream entryWriteStream = await OpenEntryStream(async, newEntry); + if (async) + { + await entryWriteStream.WriteAsync(entryContents); + } + else + { + entryWriteStream.Write(entryContents); } - ms.Flush(); + entryWriteStream.WriteByte((byte)(i % byte.MaxValue)); - return ms.ToArray(); + await DisposeStream(async, entryWriteStream); } + + await DisposeZipArchive(async, createdArchive); + + await ms.FlushAsync(); + await ms.DisposeAsync(); + + return ms.ToArray(); } protected const string Utf8SmileyEmoji = "\ud83d\ude04"; @@ -475,14 +523,56 @@ public static byte[] CreateZipFile(int entryCount, byte[] entryContents) protected static readonly string ALettersUShortMaxValueMinusOneAndCopyRightChar = ALettersUShortMaxValueMinusOne + Utf8CopyrightChar; protected static readonly string ALettersUShortMaxValueMinusOneAndTwoCopyRightChars = ALettersUShortMaxValueMinusOneAndCopyRightChar + Utf8CopyrightChar; + protected static readonly bool[] _bools = [false, true]; + + public static async Task CreateZipArchive(bool async, Stream stream, ZipArchiveMode mode, bool leaveOpen = false, Encoding entryNameEncoding = null) + { + return async ? + await ZipArchive.CreateAsync(stream, mode, leaveOpen, entryNameEncoding) : + new ZipArchive(stream, mode, leaveOpen, entryNameEncoding); + } + + public static async Task DisposeZipArchive(bool async, ZipArchive archive) + { + if (async) + { + await archive.DisposeAsync(); + } + else + { + archive.Dispose(); + } + } + + public static async Task OpenEntryStream(bool async, ZipArchiveEntry entry) + { + return async ? await entry.OpenAsync() : entry.Open(); + } + + public static async Task DisposeStream(bool async, Stream stream) + { + if (async) + { + await stream.DisposeAsync(); + } + else + { + stream.Dispose(); + } + } + public static IEnumerable Get_Booleans_Data() => _bools.Select(b => new object[] { b }); + // Returns pairs that are returned the same way by Utf8 and Latin1 // Returns: originalComment, expectedComment private static IEnumerable SharedComment_Data() { - yield return new object[] { null, string.Empty }; - yield return new object[] { string.Empty, string.Empty }; - yield return new object[] { "a", "a" }; - yield return new object[] { Utf8LowerCaseOUmlautChar, Utf8LowerCaseOUmlautChar }; + foreach (bool async in _bools) + { + yield return new object[] { null, string.Empty, async }; + yield return new object[] { string.Empty, string.Empty, async }; + yield return new object[] { "a", "a", async }; + yield return new object[] { Utf8LowerCaseOUmlautChar, Utf8LowerCaseOUmlautChar, async }; + } } // Returns pairs as expected by Utf8 @@ -499,14 +589,17 @@ public static IEnumerable Utf8Comment_Data() // so it should not be truncated if it's the last character and the total length is not over the limit. string utf8OriginalALettersAndOneEmojiFits = "aaaaa" + Utf8SmileyEmoji; - yield return new object[] { asciiOriginalOverMaxLength, ALettersUShortMaxValue }; - yield return new object[] { utf8OriginalALettersAndOneEmojiDoesNotFit, ALettersUShortMaxValueMinusOne }; - yield return new object[] { utf8OriginalALettersAndOneEmojiFits, utf8OriginalALettersAndOneEmojiFits }; - - foreach (object[] e in SharedComment_Data()) + foreach (bool async in _bools) { - yield return e; + yield return new object[] { asciiOriginalOverMaxLength, ALettersUShortMaxValue, async }; + yield return new object[] { utf8OriginalALettersAndOneEmojiDoesNotFit, ALettersUShortMaxValueMinusOne, async }; + yield return new object[] { utf8OriginalALettersAndOneEmojiFits, utf8OriginalALettersAndOneEmojiFits, async }; } + + foreach (object[] e in SharedComment_Data()) + { + yield return e; + } } // Returns pairs as expected by Latin1 @@ -518,23 +611,29 @@ public static IEnumerable Latin1Comment_Data() string latin1ExpectedALettersAndOneOUmlaut = ALettersUShortMaxValueMinusOne + Utf8LowerCaseOUmlautChar; string latin1OriginalALettersAndTwoOUmlauts = latin1ExpectedALettersAndOneOUmlaut + Utf8LowerCaseOUmlautChar; - yield return new object[] { latin1OriginalALettersAndTwoOUmlauts, latin1ExpectedALettersAndOneOUmlaut }; - - foreach (object[] e in SharedComment_Data()) + foreach (bool async in _bools) { - yield return e; + yield return new object[] { latin1OriginalALettersAndTwoOUmlauts, latin1ExpectedALettersAndOneOUmlaut, async }; } + + foreach (object[] e in SharedComment_Data()) + { + yield return e; + } } // Returns pairs encoded with Latin1, but decoded with UTF8. // Returns: originalComment, expectedComment, transcoded expectedComment public static IEnumerable MismatchingEncodingComment_Data() { - foreach (object[] e in Latin1Comment_Data()) + foreach (bool async in _bools) { - byte[] expectedBytes = Encoding.Latin1.GetBytes(e[1] as string); - - yield return new object[] { e[0], e[1], Encoding.UTF8.GetString(expectedBytes) }; + foreach (object[] e in Latin1Comment_Data()) + { + byte[] expectedBytes = Encoding.Latin1.GetBytes(e[1] as string); + + yield return new object[] { e[0], e[1], Encoding.UTF8.GetString(expectedBytes), async }; + } } } } diff --git a/src/libraries/Fuzzing/DotnetFuzzing/Dictionaries/ziparchive.dict b/src/libraries/Fuzzing/DotnetFuzzing/Dictionaries/ziparchive.dict new file mode 100644 index 00000000000000..584672d47db7f2 --- /dev/null +++ b/src/libraries/Fuzzing/DotnetFuzzing/Dictionaries/ziparchive.dict @@ -0,0 +1,12 @@ + # empty.zip +"\x50\x4B\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" +# emptydir.zip +"\x50\x4B\x03\x04\x0A\x00\x00\x00\x00\x00\x50\x8D\xEC\x3C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x65\x6D\x70\x74\x79\x2F\x50\x4B\x01\x02\x14\x00\x0A\x00\x00\x00\x00\x00\x50\x8D\xEC\x3C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x65\x6D\x70\x74\x79\x2F\x50\x4B\x05\x06\x00\x00\x00\x00\x01\x00\x01\x00\x34\x00\x00\x00\x24\x00\x00\x00\x00\x00" +# large.zip +"\x50\x4B\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" +# unicode.zip +"\x50\x4B\x03\x04\x14\x00\x00\x08\x08\x00\xC5\x6A\xE6\x3C\x3E\xF3\x58\x6F\xF8\x02\x00\x00\x8C\x05\x00\x00\x0A\x00\x00\x00\xED\x95\x9C\xEA\xB8\x80\x2E\x74\x78\x74\x6D\x54\xDB\x4E\xDB\x40\x10\x7D\xAF\xC4\x3F\xEC\x63\x90\x22\xA4\xB6\x3C\xB4\xDF\x82\xF8\x17\x27\x35\x90\x92\x20\x12\xE5\x66\xC0\x0E\xA6\x04\x92\xD2\x50\x36\x24\x04\xA7\x18\xF1\x2F\x48\xA1\xAD\x67\xAC\xFE\x42\xCF\xEC\x3A\x58\x20\x9E\x92\xDD\x9D\xCB\x99\x73\xCE\xF8\xDF\xDD\xC3\xA3\xE3\xA5\x6D\x3F\x89\x9C\x47\xE7\x80\x83\x29\x05\x31\xED\xB6\x94\xFC\x3B\x77\x39\x70\x14\x9F\xC6\x5C\xF5\x93\xEB\xFB\xC2\xA2\x31\x7C\x3A\x28\xFF\x9E\x35\x57\xF1\xAA\x78\xE0\xC8\x73\x72\xFD\x85\xFD\x98\x4E\x7C\x25\x85\x4A\x23\xAA\xF6\x51\xA8\x28\x27\xD2\x2D\x3A\x9B\x2E\x2F\x4C\x1F\x8F\xCA\x1E\x4E\x74\x16\x2B\xF2\xEF\x70\x96\x5E\xC9\x24\x64\x3F\xE4\xCE\xD4\xC6\xF8\x02\xA4\x5B\x67\xD7\x57\x34\x77\xE9\xE4\x8A\xEA\x1E\x77\x2B\xA8\xB3\xA6\x92\xDB\x48\x12\xA5\x71\x5A\xD2\x2A\x89\x00\x40\x3B\x7C\x38\xA5\x3D\xAD\xD8\xEB\xF3\xA0\xA4\x6C\x17\x7A\xA8\x73\x10\x29\xFE\x56\x45\xB0\x04\x16\xF1\x3E\x04\xDC\x21\x85\x15\xE4\x20\x0C\x43\xE9\x44\x1F\x98\x61\xF6\x86\x78\x35\x61\xAF\x8A\x9A\x21\x22\xB9\x2C\x90\xD7\xB0\xA7\xFC\x19\x83\x48\xA1\xFC\x1C\xC4\xA6\x7F\xF3\x0A\x55\x57\x8B\x8A\x2E\x86\x54\xF6\x95\x41\xD7\xDD\x96\x4E\xD4\x38\x4B\x6E\x63\x53\x8F\x7B\x5B\x98\x62\x94\x44\x1A\xE3\x2A\x6E\xD7\x68\xD0\x4B\xAE\x6B\x8A\xF5\xC8\xD0\x1A\xB8\x52\x5B\x21\x0C\x89\x2F\x2B\xCB\x45\xCF\x41\xCD\x11\x1D\x8F\x84\x91\x2C\x18\x8D\xA4\x67\x26\x4C\x9E\x86\x1E\xB4\x6F\x22\x84\xC6\x95\x77\x2B\xEF\xAC\xE6\xE8\x75\xCF\x21\x00\x56\xE7\x10\x19\x30\xA0\x87\x95\x6E\xCC\xEE\x18\x45\x65\x54\x88\xC8\x73\xA3\x54\x36\x3E\x6F\xD7\xE8\x7C\x04\x99\xAC\x59\x4C\xFF\xDC\x03\x47\x15\xBA\x8A\x39\x6C\x73\x50\x43\x48\x8E\x81\x3D\x07\x4A\x43\x86\xBE\x0B\xDC\xEC\x9A\x68\x1B\x57\x78\xBA\x3C\xFD\x7B\x3C\x59\x7D\xD3\x82\xD3\xA5\xF8\x7C\xA2\xD9\x0D\x15\x80\x52\xCD\xB1\x58\x8D\x0B\xAB\x01\x47\x37\x16\x00\xCD\x9C\xD4\xD5\x98\xFD\x5E\xA2\xEA\x1E\x7D\xD7\x10\x61\x12\x51\xCF\xE3\xC0\xCB\x4A\x66\x58\xB9\xA9\x91\x0D\xDE\x77\x01\x4B\xD1\xE0\x2B\x77\x75\xDA\xAB\x18\x54\x9D\x88\x46\x51\xE1\xCF\x8F\x5F\x4F\x9D\x1D\x41\xB5\xF1\x61\xB3\x98\x3D\x00\xB8\x79\x38\x8A\xCC\xC3\xC7\xCD\xCC\xEB\x18\xA5\x52\x58\xEC\xEF\x2D\x2A\x26\x43\x6D\xAC\x6F\x4A\x17\x9A\x55\xC0\x55\x32\xD6\xF0\x64\x51\xA5\xDE\x1C\xDA\x98\x4A\xED\x56\x5A\x9A\x16\x0C\x13\x63\x6A\xF6\x8D\x37\xB4\xE6\x43\x2D\x63\x41\x1A\x29\x52\xB4\x91\x6E\xDA\xD9\x31\x86\xE9\x00\xBE\x46\xE4\x5B\xF1\x39\x79\x66\x9C\xF2\x4F\x9E\xB5\x90\x60\x00\x84\x0E\x08\xC4\xB5\x31\x59\xD8\xE7\x10\x1A\xF6\x2C\xA9\x05\xAA\x0E\x8B\xCA\xA2\xC1\xEF\xB2\x57\x2E\x43\xDA\xF0\x53\x4F\xBE\x04\x66\xF9\x78\xE6\x73\x88\x93\xAD\x3B\x73\xE9\x62\x0B\xD2\x4A\xA5\x55\xF5\xFE\xD3\xE7\x75\xDA\x12\xCB\x34\xB0\x8A\x89\x46\x5E\x49\x28\x4F\x8F\x5C\x99\x23\xB9\xB9\x14\xEE\x16\x7E\x35\xE3\xAE\x98\xDD\x82\x6A\x73\xDB\xD9\x79\x31\x43\x86\x5C\xFE\xE5\xB4\x59\x0F\xD8\xAF\x95\xA5\x57\xEC\x69\xED\x9D\x99\x0C\xA0\xB1\x06\xD2\xD8\xAC\x80\xD9\x74\x74\x11\xFD\x5F\x0C\x46\x93\x69\xBA\x7D\xF3\x6C\xB3\x58\x99\x4F\xC6\x54\x2C\x93\x7D\x92\x30\x1C\x97\x47\xD2\xC6\x6E\xCB\x2B\x6F\x5A\x1A\xDC\x3A\xF4\x35\x66\x0A\x04\x87\x02\x3D\xF9\x1E\x61\x7B\x12\x1D\x2D\x67\x7D\x5E\x0A\x45\xCD\xB1\x75\x61\x2E\xCF\xDA\x7F\x50\x4B\x01\x02\x14\x00\x14\x00\x00\x08\x08\x00\xC5\x6A\xE6\x3C\x3E\xF3\x58\x6F\xF8\x02\x00\x00\x8C\x05\x00\x00\x0A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\xED\x95\x9C\xEA\xB8\x80\x2E\x74\x78\x74\x50\x4B\x05\x06\x00\x00\x00\x00\x01\x00\x01\x00\x38\x00\x00\x00\x20\x03\x00\x00\x00\x00" +# fake64.zip +"\x50\x4B\x03\x04\x14\x00\x00\x00\x08\x00\x45\x5F\xE6\x3C\xA8\x9E\xD7\x9F\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x08\x00\x14\x00\x74\x65\x78\x74\x2E\x74\x78\x74\x01\x00\x10\x00\xFD\x04\x00\x00\x00\x00\x00\x00\xC8\x02\x00\x00\x00\x00\x00\x00\x55\x54\xCB\x6E\xDB\x30\x10\xBC\x17\xC8\x3F\xEC\x07\x38\xFA\x80\xDE\x92\x22\x48\x72\x68\x03\x24\x6E\x83\x1E\x57\xE2\x4A\x22\x4C\x71\x55\x92\xB2\xA0\x7C\x7D\x67\x29\xC7\x68\x2F\x86\xC9\x7D\xCD\xCC\x0E\xF5\x5C\x68\xE5\x4C\x4C\x6D\xF2\xC3\x58\xA8\xD3\xE0\xC8\xF1\x46\x3E\xD2\xDD\x9C\x7C\x38\x10\x47\x47\x65\x14\xEA\x82\x76\xA7\x4C\xAB\x24\xA1\x5C\x92\x3F\xF9\x38\x20\xE0\x53\x11\x89\x0D\xBD\xFB\x98\x8B\x46\x7A\x9B\x7C\x19\x0F\x34\xFA\x4C\xDD\x88\x2E\x71\xF9\xF8\x08\xE2\xD0\xB0\xA8\xDD\x62\x92\x70\x2E\x38\xA3\x33\x49\xDF\x6B\x2A\x84\x90\xE4\x8E\x67\xA9\x93\xCE\x3E\x08\xAD\x3E\xBA\x03\xE5\xE0\xE7\x19\xD5\x7F\x16\xDF\x9D\xC2\x86\x70\xD2\x65\x18\x6B\xDA\x10\x38\x67\x72\xAA\x29\x93\xF6\xF4\xCB\x77\x45\xD3\x46\xDF\x39\x66\xAF\x31\x1F\x90\x54\x73\xA3\x96\x6B\xBD\x44\xBB\xB2\x79\x73\x92\xB3\xC4\x02\xEA\x79\xF5\x29\x58\x87\x21\xF9\x52\x36\x72\x0B\xE0\xF5\x49\x27\x42\x5C\x92\xD1\xE4\xA0\xF8\x5D\xC1\x0C\x14\xA6\xE6\xE6\xCB\xCD\x97\x23\x10\x8C\x1C\xC2\x0A\xB1\xF2\x24\xA1\x58\x87\x56\xBD\x71\xED\xB8\x6D\x79\x90\x2A\x9D\x09\x9A\x78\xA0\x89\x4B\x6E\xE8\x0E\x69\x51\xD0\xD8\x59\xBA\xC7\x78\x93\x5C\x97\x84\xAA\x59\x33\xC6\x1D\x00\x4E\x29\x70\x42\x3D\xB4\x81\x4E\xC6\x90\x9C\xCF\x73\xE0\xED\x80\x99\x8E\x5A\x28\x4E\x85\xBB\x93\x38\x64\x57\x31\x56\x40\x69\x08\xEB\x74\x32\x43\x08\x04\xB2\x9F\xE6\xB0\x01\x83\x91\x4E\x93\x2E\x99\x7A\xEE\xE4\x40\x93\x26\xD3\x19\x01\xA6\x49\x4A\x32\xAD\x9D\x7C\xB5\x36\x35\x03\xC8\x2C\x82\xB8\xFD\x69\x75\x29\x06\xA4\x6C\xB7\xBD\x3F\xA3\xDC\x54\x40\x7C\x14\x3E\x6F\xD4\x06\xA0\x20\x6B\x0E\x38\xE3\xCE\x38\x2D\xC3\x20\x0E\xA3\x47\x9C\xB2\x4E\xE8\x2A\x5C\x40\x31\x5F\x5D\x82\xEE\xAE\xD2\xAB\x43\x51\xEB\x13\x82\x17\x33\x46\xA5\x25\x03\x61\xDA\x20\x7D\x4D\x08\xBE\x2F\x0D\x3D\x9C\x05\x90\x4B\xBD\x69\x25\x57\xC1\x8B\x9F\x24\x93\xDF\x0B\xB3\x04\x87\xAD\xAD\x9A\xCC\x9B\xD5\xB9\x96\x3F\x63\xB2\xC4\xBD\x4E\x82\x74\xF0\x6E\x47\xDD\x92\x12\x2E\xAD\x0E\xFF\xAD\x57\x8F\xC5\xDB\xB2\xCD\xFE\xA1\x3E\x06\xD8\xE7\x1F\x58\x33\x27\x4B\xDB\xDB\x74\x1A\x75\x82\x55\x12\x24\xC1\x8A\x6C\x06\xE2\x5C\x60\xBC\x4A\xEB\x89\x8B\xD0\xBB\xC8\xA9\xA1\xA3\xC9\x1A\xF8\x13\xA2\x91\xE8\x6B\xFF\x4C\xCB\x5C\x41\x7E\xAA\x02\x6D\x47\xB5\xB4\xFD\x59\x6D\xB7\xD1\xC7\x2A\x69\x5D\x3A\xD3\x99\x81\x5C\x21\xCD\x12\x3A\x49\xB6\x1A\x0C\xB7\x27\x55\xDF\x2E\x12\x4F\xC1\xF6\x63\xB4\x72\xD0\x35\xC0\x2D\xA0\x5E\xC0\xA9\xCE\x4D\x1C\x2E\x7A\x69\xBC\x78\x66\x6B\xE8\x05\xFE\xC0\xEA\x28\x60\x4E\x55\x4D\x67\x18\xD1\x17\xB9\x2A\x7F\x9B\x47\xEE\xCB\xC1\xCE\x17\x8F\x56\x0F\xD8\xF9\x7F\x6B\xD1\xC0\x1F\xE2\xEA\xD3\xB9\x7A\xF2\x53\x3E\xB3\xFD\xAE\x9E\x11\x80\x45\xAB\x21\x40\xD8\x63\x36\xDB\x27\x45\x09\xA2\x16\x13\xD4\x99\x3B\x2F\xFB\xDA\x90\xD4\x6B\x00\x1D\xDA\x74\xD9\xED\x88\x2A\x89\xF5\x38\x41\x81\x86\xEE\x9F\x1F\xE9\xFE\xF5\xE5\xF8\xF4\xF0\x4A\xCF\x6F\xF4\x7E\x77\xFC\xF6\xF4\xFC\xE3\x91\x7E\xBF\xFC\xDC\x51\xE3\xCB\x52\x37\xD3\x4A\x84\x13\x47\xF3\x4B\xE2\xD8\xFC\x05\x50\x4B\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\x45\x5F\xE6\x3C\xA8\x9E\xD7\x9F\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x08\x00\x14\x00\x00\x00\x00\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x74\x65\x78\x74\x2E\x74\x78\x74\x01\x00\x10\x00\xFD\x04\x00\x00\x00\x00\x00\x00\xC8\x02\x00\x00\x00\x00\x00\x00\x50\x4B\x06\x06\x2C\x00\x00\x00\x00\x00\x00\x00\x2D\x00\x2D\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x4A\x00\x00\x00\x00\x00\x00\x00\x02\x03\x00\x00\x00\x00\x00\x00\x50\x4B\x06\x07\x00\x00\x00\x00\x4C\x03\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x50\x4B\x05\x06\x00\x00\x00\x00\x01\x00\x01\x00\x4A\x00\x00\x00\xFF\xFF\xFF\xFF\x00\x00" +# encrypted_entries_weak.zip +"\x50\x4B\x03\x04\x14\x00\x01\x00\x00\x00\xFA\x89\xBC\x54\x0A\x2C\x47\xD2\x29\x00\x00\x00\x1D\x00\x00\x00\x13\x00\x00\x00\x66\x69\x6C\x65\x31\x2D\x65\x6E\x63\x72\x79\x70\x74\x65\x64\x2E\x74\x78\x74\xA5\x85\x59\x1B\x10\x63\x11\xAE\x32\x12\x71\x57\x67\x49\xAA\x39\x1F\x58\xA6\x6F\xF8\x68\x17\xE4\x28\x42\xEC\x01\x07\x4C\x49\x83\xEB\x51\x85\x93\xC6\xAB\xEE\x74\xC5\x50\x4B\x03\x04\x0A\x00\x00\x00\x00\x00\x06\x8A\xBC\x54\x96\x8C\x10\x86\x1F\x00\x00\x00\x1F\x00\x00\x00\x15\x00\x00\x00\x66\x69\x6C\x65\x32\x2D\x75\x6E\x65\x6E\x63\x72\x79\x70\x74\x65\x64\x2E\x74\x78\x74\x46\x69\x6C\x65\x6E\x61\x6D\x65\x3A\x20\x66\x69\x6C\x65\x32\x2D\x75\x6E\x65\x6E\x63\x72\x79\x70\x74\x65\x64\x2E\x74\x78\x74\x50\x4B\x03\x04\x14\x00\x01\x00\x00\x00\x07\x8A\xBC\x54\x9B\x9D\xC1\x7A\x29\x00\x00\x00\x1D\x00\x00\x00\x13\x00\x00\x00\x66\x69\x6C\x65\x33\x2D\x65\x6E\x63\x72\x79\x70\x74\x65\x64\x2E\x74\x78\x74\x59\xFC\x2E\xE1\xA3\x36\x26\x13\xFE\xA5\x31\xC7\x2B\x65\x13\xD6\x62\xEE\xF6\xE1\xDE\x9F\x04\x09\x6A\xC8\x98\x16\xCA\xB0\x23\x2E\x91\x59\xC4\x41\xE9\xE9\xA6\x26\x08\x50\x4B\x03\x04\x0A\x00\x00\x00\x00\x00\x0A\x8A\xBC\x54\x9E\x3B\x24\x21\x1F\x00\x00\x00\x1F\x00\x00\x00\x15\x00\x00\x00\x66\x69\x6C\x65\x34\x2D\x75\x6E\x65\x6E\x63\x72\x79\x70\x74\x65\x64\x2E\x74\x78\x74\x46\x69\x6C\x65\x6E\x61\x6D\x65\x3A\x20\x66\x69\x6C\x65\x34\x2D\x75\x6E\x65\x6E\x63\x72\x79\x70\x74\x65\x64\x2E\x74\x78\x74\x50\x4B\x01\x02\x3F\x00\x14\x00\x01\x00\x00\x00\xFA\x89\xBC\x54\x0A\x2C\x47\xD2\x29\x00\x00\x00\x1D\x00\x00\x00\x13\x00\x24\x00\x16\x00\x00\x00\x00\x00\x20\x08\x00\x00\x00\x00\x00\x00\x66\x69\x6C\x65\x31\x2D\x65\x6E\x63\x72\x79\x70\x74\x65\x64\x2E\x74\x78\x74\x0A\x00\x20\x00\x00\x00\x00\x00\x01\x00\x18\x00\x49\x33\x56\x41\xF1\x72\xD8\x01\x12\x55\x66\x41\xF1\x72\xD8\x01\x13\x35\x9F\x9A\x67\x60\xD8\x01\x65\x6E\x63\x72\x79\x70\x74\x65\x64\x5F\x65\x6E\x74\x72\x69\x65\x73\x5F\x77\x65\x61\x6B\x50\x4B\x01\x02\x3F\x00\x0A\x00\x00\x00\x00\x00\x06\x8A\xBC\x54\x96\x8C\x10\x86\x1F\x00\x00\x00\x1F\x00\x00\x00\x15\x00\x24\x00\x00\x00\x00\x00\x00\x00\x20\x08\x00\x00\x5A\x00\x00\x00\x66\x69\x6C\x65\x32\x2D\x75\x6E\x65\x6E\x63\x72\x79\x70\x74\x65\x64\x2E\x74\x78\x74\x0A\x00\x20\x00\x00\x00\x00\x00\x01\x00\x18\x00\xF8\x62\x65\x4C\xF1\x72\xD8\x01\x36\xDC\x6B\x4C\xF1\x72\xD8\x01\xEF\x3C\xE9\xA2\x67\x60\xD8\x01\x50\x4B\x01\x02\x3F\x00\x14\x00\x01\x00\x00\x00\x07\x8A\xBC\x54\x9B\x9D\xC1\x7A\x29\x00\x00\x00\x1D\x00\x00\x00\x13\x00\x24\x00\x16\x00\x00\x00\x00\x00\x20\x08\x00\x00\xAC\x00\x00\x00\x66\x69\x6C\x65\x33\x2D\x65\x6E\x63\x72\x79\x70\x74\x65\x64\x2E\x74\x78\x74\x0A\x00\x20\x00\x00\x00\x00\x00\x01\x00\x18\x00\x12\xDD\x79\x4E\xF1\x72\xD8\x01\x95\x9C\x32\xA5\xF1\x72\xD8\x01\x47\xF5\x27\xA9\x67\x60\xD8\x01\x65\x6E\x63\x72\x79\x70\x74\x65\x64\x5F\x65\x6E\x74\x72\x69\x65\x73\x5F\x77\x65\x61\x6B\x50\x4B\x01\x02\x3F\x00\x0A\x00\x00\x00\x00\x00\x0A\x8A\xBC\x54\x9E\x3B\x24\x21\x1F\x00\x00\x00\x1F\x00\x00\x00\x15\x00\x24\x00\x00\x00\x00\x00\x00\x00\x20\x08\x00\x00\x06\x01\x00\x00\x66\x69\x6C\x65\x34\x2D\x75\x6E\x65\x6E\x63\x72\x79\x70\x74\x65\x64\x2E\x74\x78\x74\x0A\x00\x20\x00\x00\x00\x00\x00\x01\x00\x18\x00\x31\x4C\x9C\x51\xF1\x72\xD8\x01\x31\x4C\x9C\x51\xF1\x72\xD8\x01\x80\xBF\x6F\xAD\x67\x60\xD8\x01\x50\x4B\x05\x06\x00\x00\x00\x00\x04\x00\x04\x00\xC4\x01\x00\x00\x58\x01\x00\x00\x00\x00" diff --git a/src/libraries/Fuzzing/DotnetFuzzing/DotnetFuzzing.csproj b/src/libraries/Fuzzing/DotnetFuzzing/DotnetFuzzing.csproj index 195dc8d04ff993..91af70b17c3c1d 100644 --- a/src/libraries/Fuzzing/DotnetFuzzing/DotnetFuzzing.csproj +++ b/src/libraries/Fuzzing/DotnetFuzzing/DotnetFuzzing.csproj @@ -31,6 +31,7 @@ + diff --git a/src/libraries/Fuzzing/DotnetFuzzing/Fuzzers/ZipArchiveFuzzer.cs b/src/libraries/Fuzzing/DotnetFuzzing/Fuzzers/ZipArchiveFuzzer.cs new file mode 100644 index 00000000000000..f8119f5fabc494 --- /dev/null +++ b/src/libraries/Fuzzing/DotnetFuzzing/Fuzzers/ZipArchiveFuzzer.cs @@ -0,0 +1,70 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Buffers; +using System.IO.Compression; +using System.Threading.Tasks; + +namespace DotnetFuzzing.Fuzzers; + +internal sealed class ZipArchiveFuzzer : IFuzzer +{ + public string[] TargetAssemblies { get; } = ["System.IO.Compression"]; + public string[] TargetCoreLibPrefixes => []; + public string Dictionary => "ziparchive.dict"; + + public void FuzzTarget(ReadOnlySpan bytes) + { + + if (bytes.IsEmpty) + { + return; + } + + try + { + using var stream = new MemoryStream(bytes.ToArray()); + + Task sync_test = TestArchive(stream, async: false); + Task async_test = TestArchive(stream, async: true); + + Task.WaitAll(sync_test, async_test); + } + catch (Exception) { } + } + + private async Task TestArchive(Stream stream, bool async) + { + stream.Position = 0; + + ZipArchive archive; + + if (async) + { + archive = await ZipArchive.CreateAsync(stream, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null); + } + else + { + archive = new ZipArchive(stream, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null); + } + + foreach (var entry in archive.Entries) + { + // Access entry properties to simulate usage + _ = entry.FullName; + _ = entry.Length; + _ = entry.Comment; + _ = entry.LastWriteTime; + _ = entry.Crc32; + } + + if (async) + { + await archive.DisposeAsync(); + } + else + { + archive.Dispose(); + } + } +} diff --git a/src/libraries/System.IO.Compression.Brotli/tests/CompressionStreamUnitTests.Brotli.cs b/src/libraries/System.IO.Compression.Brotli/tests/CompressionStreamUnitTests.Brotli.cs index 5b992cfcb246af..2fb9eacc9dfc3f 100644 --- a/src/libraries/System.IO.Compression.Brotli/tests/CompressionStreamUnitTests.Brotli.cs +++ b/src/libraries/System.IO.Compression.Brotli/tests/CompressionStreamUnitTests.Brotli.cs @@ -111,7 +111,7 @@ public void InvalidBrotliCompressionQuality() [MemberData(nameof(UncompressedTestFilesBrotli))] public async void BrotliCompressionQuality_SizeInOrder(string testFile) { - using var uncompressedStream = await LocalMemoryStream.readAppFileAsync(testFile); + using var uncompressedStream = await LocalMemoryStream.ReadAppFileAsync(testFile); async Task GetLengthAsync(int compressionQuality) { diff --git a/src/libraries/System.IO.Compression.ZipFile/ref/System.IO.Compression.ZipFile.cs b/src/libraries/System.IO.Compression.ZipFile/ref/System.IO.Compression.ZipFile.cs index bc0e2079bfd8bf..f0948420b9eaf9 100644 --- a/src/libraries/System.IO.Compression.ZipFile/ref/System.IO.Compression.ZipFile.cs +++ b/src/libraries/System.IO.Compression.ZipFile/ref/System.IO.Compression.ZipFile.cs @@ -14,6 +14,12 @@ public static void CreateFromDirectory(string sourceDirectoryName, System.IO.Str public static void CreateFromDirectory(string sourceDirectoryName, string destinationArchiveFileName) { } public static void CreateFromDirectory(string sourceDirectoryName, string destinationArchiveFileName, System.IO.Compression.CompressionLevel compressionLevel, bool includeBaseDirectory) { } public static void CreateFromDirectory(string sourceDirectoryName, string destinationArchiveFileName, System.IO.Compression.CompressionLevel compressionLevel, bool includeBaseDirectory, System.Text.Encoding? entryNameEncoding) { } + public static System.Threading.Tasks.Task CreateFromDirectoryAsync(string sourceDirectoryName, System.IO.Stream destination, System.IO.Compression.CompressionLevel compressionLevel, bool includeBaseDirectory, System.Text.Encoding? entryNameEncoding, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task CreateFromDirectoryAsync(string sourceDirectoryName, System.IO.Stream destination, System.IO.Compression.CompressionLevel compressionLevel, bool includeBaseDirectory, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task CreateFromDirectoryAsync(string sourceDirectoryName, System.IO.Stream destination, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task CreateFromDirectoryAsync(string sourceDirectoryName, string destinationArchiveFileName, System.IO.Compression.CompressionLevel compressionLevel, bool includeBaseDirectory, System.Text.Encoding? entryNameEncoding, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task CreateFromDirectoryAsync(string sourceDirectoryName, string destinationArchiveFileName, System.IO.Compression.CompressionLevel compressionLevel, bool includeBaseDirectory, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task CreateFromDirectoryAsync(string sourceDirectoryName, string destinationArchiveFileName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public static void ExtractToDirectory(System.IO.Stream source, string destinationDirectoryName) { } public static void ExtractToDirectory(System.IO.Stream source, string destinationDirectoryName, bool overwriteFiles) { } public static void ExtractToDirectory(System.IO.Stream source, string destinationDirectoryName, System.Text.Encoding? entryNameEncoding) { } @@ -22,18 +28,35 @@ public static void ExtractToDirectory(string sourceArchiveFileName, string desti public static void ExtractToDirectory(string sourceArchiveFileName, string destinationDirectoryName, bool overwriteFiles) { } public static void ExtractToDirectory(string sourceArchiveFileName, string destinationDirectoryName, System.Text.Encoding? entryNameEncoding) { } public static void ExtractToDirectory(string sourceArchiveFileName, string destinationDirectoryName, System.Text.Encoding? entryNameEncoding, bool overwriteFiles) { } + public static System.Threading.Tasks.Task ExtractToDirectoryAsync(System.IO.Stream source, string destinationDirectoryName, bool overwriteFiles, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task ExtractToDirectoryAsync(System.IO.Stream source, string destinationDirectoryName, System.Text.Encoding? entryNameEncoding, bool overwriteFiles, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task ExtractToDirectoryAsync(System.IO.Stream source, string destinationDirectoryName, System.Text.Encoding? entryNameEncoding, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task ExtractToDirectoryAsync(System.IO.Stream source, string destinationDirectoryName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task ExtractToDirectoryAsync(string sourceArchiveFileName, string destinationDirectoryName, bool overwriteFiles, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task ExtractToDirectoryAsync(string sourceArchiveFileName, string destinationDirectoryName, System.Text.Encoding? entryNameEncoding, bool overwriteFiles, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task ExtractToDirectoryAsync(string sourceArchiveFileName, string destinationDirectoryName, System.Text.Encoding? entryNameEncoding, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task ExtractToDirectoryAsync(string sourceArchiveFileName, string destinationDirectoryName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public static System.IO.Compression.ZipArchive Open(string archiveFileName, System.IO.Compression.ZipArchiveMode mode) { throw null; } public static System.IO.Compression.ZipArchive Open(string archiveFileName, System.IO.Compression.ZipArchiveMode mode, System.Text.Encoding? entryNameEncoding) { throw null; } + public static System.Threading.Tasks.Task OpenAsync(string archiveFileName, System.IO.Compression.ZipArchiveMode mode, System.Text.Encoding? entryNameEncoding, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task OpenAsync(string archiveFileName, System.IO.Compression.ZipArchiveMode mode, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public static System.IO.Compression.ZipArchive OpenRead(string archiveFileName) { throw null; } + public static System.Threading.Tasks.Task OpenReadAsync(string archiveFileName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } } [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] public static partial class ZipFileExtensions { public static System.IO.Compression.ZipArchiveEntry CreateEntryFromFile(this System.IO.Compression.ZipArchive destination, string sourceFileName, string entryName) { throw null; } public static System.IO.Compression.ZipArchiveEntry CreateEntryFromFile(this System.IO.Compression.ZipArchive destination, string sourceFileName, string entryName, System.IO.Compression.CompressionLevel compressionLevel) { throw null; } + public static System.Threading.Tasks.Task CreateEntryFromFileAsync(this System.IO.Compression.ZipArchive destination, string sourceFileName, string entryName, System.IO.Compression.CompressionLevel compressionLevel, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task CreateEntryFromFileAsync(this System.IO.Compression.ZipArchive destination, string sourceFileName, string entryName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public static void ExtractToDirectory(this System.IO.Compression.ZipArchive source, string destinationDirectoryName) { } public static void ExtractToDirectory(this System.IO.Compression.ZipArchive source, string destinationDirectoryName, bool overwriteFiles) { } + public static System.Threading.Tasks.Task ExtractToDirectoryAsync(this System.IO.Compression.ZipArchive source, string destinationDirectoryName, bool overwriteFiles, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task ExtractToDirectoryAsync(this System.IO.Compression.ZipArchive source, string destinationDirectoryName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public static void ExtractToFile(this System.IO.Compression.ZipArchiveEntry source, string destinationFileName) { } public static void ExtractToFile(this System.IO.Compression.ZipArchiveEntry source, string destinationFileName, bool overwrite) { } + public static System.Threading.Tasks.Task ExtractToFileAsync(this System.IO.Compression.ZipArchiveEntry source, string destinationFileName, bool overwrite, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public static System.Threading.Tasks.Task ExtractToFileAsync(this System.IO.Compression.ZipArchiveEntry source, string destinationFileName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } } } diff --git a/src/libraries/System.IO.Compression.ZipFile/src/System.IO.Compression.ZipFile.csproj b/src/libraries/System.IO.Compression.ZipFile/src/System.IO.Compression.ZipFile.csproj index 0613ab5e4848a0..298815a2a9a9c8 100644 --- a/src/libraries/System.IO.Compression.ZipFile/src/System.IO.Compression.ZipFile.csproj +++ b/src/libraries/System.IO.Compression.ZipFile/src/System.IO.Compression.ZipFile.csproj @@ -8,10 +8,15 @@ + + + + + + /// Asynchronously opens a ZipArchive on the specified path for reading. The specified file is opened with FileMode.Open. + /// + /// + /// archiveFileName is a zero-length string, contains only whitespace, or contains one + /// or more invalid characters as defined by InvalidPathChars. + /// archiveFileName is null. + /// The specified archiveFileName exceeds the system-defined maximum length. + /// For example, on Windows-based platforms, paths must be less than 248 characters, + /// and file names must be less than 260 characters. + /// The specified archiveFileName is invalid, (for example, it is on an unmapped drive). + /// An unspecified I/O error occurred while opening the file. + /// archiveFileName specified a directory. + /// -OR- The caller does not have the required permission. + /// The file specified in archiveFileName was not found. + /// archiveFileName is in an invalid format. + /// The specified file could not be interpreted as a Zip file. + /// An asynchronous operation is cancelled. + /// + /// A string specifying the path on the filesystem to open the archive on. The path is permitted + /// to specify relative or absolute path information. Relative path information is interpreted as relative to the current working directory. + /// The cancellation token to monitor for cancellation requests. + public static Task OpenReadAsync(string archiveFileName, CancellationToken cancellationToken = default) => OpenAsync(archiveFileName, ZipArchiveMode.Read, cancellationToken); + + /// + /// Asynchronously opens a ZipArchive on the specified archiveFileName in the specified ZipArchiveMode mode. + /// + /// + /// archiveFileName is a zero-length string, contains only whitespace, + /// or contains one or more invalid characters as defined by InvalidPathChars. + /// path is null. + /// The specified archiveFileName exceeds the system-defined maximum length. + /// For example, on Windows-based platforms, paths must be less than 248 characters, + /// and file names must be less than 260 characters. + /// The specified archiveFileName is invalid, (for example, it is on an unmapped drive). + /// An unspecified I/O error occurred while opening the file. + /// archiveFileName specified a directory. + /// -OR- The caller does not have the required permission. + /// mode specified an invalid value. + /// The file specified in archiveFileName was not found. + /// archiveFileName is in an invalid format. + /// The specified file could not be interpreted as a Zip file. + /// -OR- mode is Update and an entry is missing from the archive or + /// is corrupt and cannot be read. + /// -OR- mode is Update and an entry is too large to fit into memory. + /// An asynchronous operation is cancelled. + /// + /// A string specifying the path on the filesystem to open the archive on. + /// The path is permitted to specify relative or absolute path information. + /// Relative path information is interpreted as relative to the current working directory. + /// See the description of the ZipArchiveMode enum. + /// If Read is specified, the file is opened with System.IO.FileMode.Open, and will throw + /// a FileNotFoundException if the file does not exist. + /// If Create is specified, the file is opened with System.IO.FileMode.CreateNew, and will throw + /// a System.IO.IOException if the file already exists. + /// If Update is specified, the file is opened with System.IO.FileMode.OpenOrCreate. + /// If the file exists and is a Zip file, its entries will become accessible, and may be modified, and new entries may be created. + /// If the file exists and is not a Zip file, a ZipArchiveException will be thrown. + /// If the file exists and is empty or does not exist, a new Zip file will be created. + /// Note that creating a Zip file with the ZipArchiveMode.Create mode is more efficient when creating a new Zip file. + /// The cancellation token to monitor for cancellation requests. + public static Task OpenAsync(string archiveFileName, ZipArchiveMode mode, CancellationToken cancellationToken = default) => OpenAsync(archiveFileName, mode, entryNameEncoding: null, cancellationToken); + + /// + /// Asynchronously opens a ZipArchive on the specified archiveFileName in the specified ZipArchiveMode mode. + /// + /// + /// archiveFileName is a zero-length string, contains only whitespace, + /// or contains one or more invalid characters as defined by InvalidPathChars. + /// path is null. + /// The specified archiveFileName exceeds the system-defined maximum length. + /// For example, on Windows-based platforms, paths must be less than 248 characters, + /// and file names must be less than 260 characters. + /// The specified archiveFileName is invalid, (for example, it is on an unmapped drive). + /// An unspecified I/O error occurred while opening the file. + /// archiveFileName specified a directory. + /// -OR- The caller does not have the required permission. + /// mode specified an invalid value. + /// The file specified in archiveFileName was not found. + /// archiveFileName is in an invalid format. + /// The specified file could not be interpreted as a Zip file. + /// -OR- mode is Update and an entry is missing from the archive or + /// is corrupt and cannot be read. + /// -OR- mode is Update and an entry is too large to fit into memory. + /// An asynchronous operation is cancelled. + /// + /// A string specifying the path on the filesystem to open the archive on. + /// The path is permitted to specify relative or absolute path information. + /// Relative path information is interpreted as relative to the current working directory. + /// See the description of the ZipArchiveMode enum. + /// If Read is specified, the file is opened with System.IO.FileMode.Open, and will throw + /// a FileNotFoundException if the file does not exist. + /// If Create is specified, the file is opened with System.IO.FileMode.CreateNew, and will throw + /// a System.IO.IOException if the file already exists. + /// If Update is specified, the file is opened with System.IO.FileMode.OpenOrCreate. + /// If the file exists and is a Zip file, its entries will become accessible, and may be modified, and new entries may be created. + /// If the file exists and is not a Zip file, a ZipArchiveException will be thrown. + /// If the file exists and is empty or does not exist, a new Zip file will be created. + /// Note that creating a Zip file with the ZipArchiveMode.Create mode is more efficient when creating a new Zip file. + /// The encoding to use when reading or writing entry names and comments in this ZipArchive. + /// /// NOTE: Specifying this parameter to values other than null is discouraged. + /// However, this may be necessary for interoperability with ZIP archive tools and libraries that do not correctly support + /// UTF-8 encoding for entry names or comments.
+ /// This value is used as follows:
+ /// Reading (opening) ZIP archive files: + /// If entryNameEncoding is not specified (== null): + /// + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is not set, + /// use the current system default code page (Encoding.Default) in order to decode the entry name and comment. + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is set, + /// use UTF-8 (Encoding.UTF8) in order to decode the entry name and comment. + /// + /// If entryNameEncoding is specified (!= null): + /// + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is not set, + /// use the specified entryNameEncoding in order to decode the entry name and comment. + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is set, + /// use UTF-8 (Encoding.UTF8) in order to decode the entry name and comment. + /// + /// Writing (saving) ZIP archive files: + /// If entryNameEncoding is not specified (== null): + /// + /// For entry names or comments that contain characters outside the ASCII range, + /// the language encoding flag (EFS) will be set in the general purpose bit flag of the local file header, + /// and UTF-8 (Encoding.UTF8) will be used in order to encode the entry name and comment into bytes. + /// For entry names or comments that do not contain characters outside the ASCII range, + /// the language encoding flag (EFS) will not be set in the general purpose bit flag of the local file header, + /// and the current system default code page (Encoding.Default) will be used to encode the entry names and comments into bytes. + /// + /// If entryNameEncoding is specified (!= null): + /// + /// The specified entryNameEncoding will always be used to encode the entry names and comments into bytes. + /// The language encoding flag (EFS) in the general purpose bit flag of the local file header will be set if and only + /// if the specified entryNameEncoding is a UTF-8 encoding. + /// + /// Note that Unicode encodings other than UTF-8 may not be currently used for the entryNameEncoding, + /// otherwise an is thrown. + /// + /// The cancellation token to monitor for cancellation requests. + public static async Task OpenAsync(string archiveFileName, ZipArchiveMode mode, Encoding? entryNameEncoding, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + // the FileStream gets passed to the new ZipArchive, which stores it internally. + // The stream will then be owned by the archive and be disposed when the archive is disposed. + // If the ZipArchive ctor completes without throwing, we know fs has been successfully stores in the archive; + // If the ctor throws, we need to close it in a try finally for the ZipArchive. + + FileStream fs = GetFileStreamForOpen(mode, archiveFileName, useAsync: true); + + try + { + return await ZipArchive.CreateAsync(fs, mode, leaveOpen: false, entryNameEncoding: entryNameEncoding, cancellationToken).ConfigureAwait(false); + } + catch + { + await fs.DisposeAsync().ConfigureAwait(false); + throw; + } + } + + /// + ///

Asynchronously creates a Zip archive at the path destinationArchiveFileName that contains the files and directories from + /// the directory specified by sourceDirectoryName. The directory structure is preserved in the archive, and a + /// recursive search is done for files to be archived. The archive must not exist. If the directory is empty, an empty + /// archive will be created. If a file in the directory cannot be added to the archive, the archive will be left incomplete + /// and invalid and the method will throw an exception. This method does not include the base directory into the archive. + /// If an error is encountered while adding files to the archive, this method will stop adding files and leave the archive + /// in an invalid state. The paths are permitted to specify relative or absolute path information. Relative path information + /// is interpreted as relative to the current working directory. If a file in the archive has data in the last write time + /// field that is not a valid Zip timestamp, an indicator value of 1980 January 1 at midnight will be used for the file's + /// last modified time.

+ /// + ///

If an entry with the specified name already exists in the archive, a second entry will be created that has an identical name.

+ /// + ///

Since no CompressionLevel is specified, the default provided by the implementation of the underlying compression + /// algorithm will be used; the ZipArchive will not impose its own default. + /// (Currently, the underlying compression algorithm is provided by the System.IO.Compression.DeflateStream class.)

+ ///
+ /// + /// sourceDirectoryName or destinationArchiveFileName is a zero-length + /// string, contains only whitespace, or contains one or more invalid characters as defined by + /// InvalidPathChars. + /// sourceDirectoryName or destinationArchiveFileName is null. + /// In sourceDirectoryName or destinationArchiveFileName, the specified + /// path, file name, or both exceed the system-defined maximum length. + /// For example, on Windows-based platforms, paths must be less than 248 characters, and file + /// names must be less than 260 characters. + /// The path specified in sourceDirectoryName or destinationArchiveFileName + /// is invalid, (for example, it is on an unmapped drive). + /// -OR- The directory specified by sourceDirectoryName does not exist. + /// destinationArchiveFileName already exists. + /// -OR- An I/O error occurred while opening a file to be archived. + /// destinationArchiveFileName specified a directory. + /// -OR- The caller does not have the required permission. + /// sourceDirectoryName or destinationArchiveFileName is + /// in an invalid format. + /// An asynchronous operation is cancelled. + /// + /// The path to the directory on the file system to be archived. + /// The name of the archive to be created. + /// The cancellation token to monitor for cancellation requests. + public static Task CreateFromDirectoryAsync(string sourceDirectoryName, string destinationArchiveFileName, CancellationToken cancellationToken = default) => + DoCreateFromDirectoryAsync(sourceDirectoryName, destinationArchiveFileName, compressionLevel: null, includeBaseDirectory: false, entryNameEncoding: null, cancellationToken); + + /// + ///

Asynchronously creates a Zip archive at the path destinationArchiveFileName that contains the files and directories in the directory + /// specified by sourceDirectoryName. The directory structure is preserved in the archive, and a recursive search is + /// done for files to be archived. The archive must not exist. If the directory is empty, an empty archive will be created. + /// If a file in the directory cannot be added to the archive, the archive will be left incomplete and invalid and the + /// method will throw an exception. This method optionally includes the base directory in the archive. + /// If an error is encountered while adding files to the archive, this method will stop adding files and leave the archive + /// in an invalid state. The paths are permitted to specify relative or absolute path information. Relative path information + /// is interpreted as relative to the current working directory. If a file in the archive has data in the last write time + /// field that is not a valid Zip timestamp, an indicator value of 1980 January 1 at midnight will be used for the file's + /// last modified time.

+ /// + ///

If an entry with the specified name already exists in the archive, a second entry will be created that has an identical name.

+ /// + ///

Since no CompressionLevel is specified, the default provided by the implementation of the underlying compression + /// algorithm will be used; the ZipArchive will not impose its own default. + /// (Currently, the underlying compression algorithm is provided by the System.IO.Compression.DeflateStream class.)

+ ///
+ /// + /// sourceDirectoryName or destinationArchiveFileName is a zero-length + /// string, contains only whitespace, or contains one or more invalid characters as defined by + /// InvalidPathChars. + /// sourceDirectoryName or destinationArchiveFileName is null. + /// In sourceDirectoryName or destinationArchiveFileName, the + /// specified path, file name, or both exceed the system-defined maximum length. + /// For example, on Windows-based platforms, paths must be less than 248 characters, + /// and file names must be less than 260 characters. + /// The path specified in sourceDirectoryName or + /// destinationArchiveFileName is invalid, (for example, it is on an unmapped drive). + /// -OR- The directory specified by sourceDirectoryName does not exist. + /// destinationArchiveFileName already exists. + /// -OR- An I/O error occurred while opening a file to be archived. + /// destinationArchiveFileName specified a directory. + /// -OR- The caller does not have the required permission. + /// sourceDirectoryName or destinationArchiveFileName + /// is in an invalid format. + /// An asynchronous operation is cancelled. + /// + /// The path to the directory on the file system to be archived. + /// The name of the archive to be created. + /// The level of the compression (speed/memory vs. compressed size trade-off). + /// true to indicate that a directory named sourceDirectoryName should + /// be included at the root of the archive. false to indicate that the files and directories in sourceDirectoryName + /// should be included directly in the archive. + /// The cancellation token to monitor for cancellation requests. + public static Task CreateFromDirectoryAsync(string sourceDirectoryName, string destinationArchiveFileName, CompressionLevel compressionLevel, bool includeBaseDirectory, CancellationToken cancellationToken = default) => + DoCreateFromDirectoryAsync(sourceDirectoryName, destinationArchiveFileName, compressionLevel, includeBaseDirectory, entryNameEncoding: null, cancellationToken); + + /// + ///

Asynchronously creates a Zip archive at the path destinationArchiveFileName that contains the files and directories in the directory + /// specified by sourceDirectoryName. The directory structure is preserved in the archive, and a recursive search is + /// done for files to be archived. The archive must not exist. If the directory is empty, an empty archive will be created. + /// If a file in the directory cannot be added to the archive, the archive will be left incomplete and invalid and the + /// method will throw an exception. This method optionally includes the base directory in the archive. + /// If an error is encountered while adding files to the archive, this method will stop adding files and leave the archive + /// in an invalid state. The paths are permitted to specify relative or absolute path information. Relative path information + /// is interpreted as relative to the current working directory. If a file in the archive has data in the last write time + /// field that is not a valid Zip timestamp, an indicator value of 1980 January 1 at midnight will be used for the file's + /// last modified time.

+ /// + ///

If an entry with the specified name already exists in the archive, a second entry will be created that has an identical name.

+ /// + ///

Since no CompressionLevel is specified, the default provided by the implementation of the underlying compression + /// algorithm will be used; the ZipArchive will not impose its own default. + /// (Currently, the underlying compression algorithm is provided by the System.IO.Compression.DeflateStream class.)

+ ///
+ /// + /// sourceDirectoryName or destinationArchiveFileName is a zero-length + /// string, contains only whitespace, or contains one or more invalid characters as defined by + /// InvalidPathChars. + /// sourceDirectoryName or destinationArchiveFileName is null. + /// In sourceDirectoryName or destinationArchiveFileName, the + /// specified path, file name, or both exceed the system-defined maximum length. + /// For example, on Windows-based platforms, paths must be less than 248 characters, + /// and file names must be less than 260 characters. + /// The path specified in sourceDirectoryName or + /// destinationArchiveFileName is invalid, (for example, it is on an unmapped drive). + /// -OR- The directory specified by sourceDirectoryName does not exist. + /// destinationArchiveFileName already exists. + /// -OR- An I/O error occurred while opening a file to be archived. + /// destinationArchiveFileName specified a directory. + /// -OR- The caller does not have the required permission. + /// sourceDirectoryName or destinationArchiveFileName + /// is in an invalid format. + /// An asynchronous operation is cancelled. + /// + /// The path to the directory on the file system to be archived. + /// The name of the archive to be created. + /// The level of the compression (speed/memory vs. compressed size trade-off). + /// true to indicate that a directory named sourceDirectoryName should + /// be included at the root of the archive. false to indicate that the files and directories in sourceDirectoryName + /// should be included directly in the archive. + /// The encoding to use when reading or writing entry names and comments in this ZipArchive. + /// /// NOTE: Specifying this parameter to values other than null is discouraged. + /// However, this may be necessary for interoperability with ZIP archive tools and libraries that do not correctly support + /// UTF-8 encoding for entry names or comments.
+ /// This value is used as follows while creating the archive:
+ /// If entryNameEncoding is not specified (== null): + /// + /// For file names or comments that contain characters outside the ASCII range:
+ /// The language encoding flag (EFS) will be set in the general purpose bit flag of the local file header of the corresponding entry, + /// and UTF-8 (Encoding.UTF8) will be used in order to encode the entry name and comment into bytes.
+ /// For file names or comments that do not contain characters outside the ASCII range:
+ /// the language encoding flag (EFS) will not be set in the general purpose bit flag of the local file header of the corresponding entry, + /// and the current system default code page (Encoding.Default) will be used to encode the entry names and comments into bytes.
+ ///
+ /// If entryNameEncoding is specified (!= null): + /// + /// The specified entryNameEncoding will always be used to encode the entry names and comments into bytes. + /// The language encoding flag (EFS) in the general purpose bit flag of the local file header for each entry will be set if and only + /// if the specified entryNameEncoding is a UTF-8 encoding. + /// + /// Note that Unicode encodings other than UTF-8 may not be currently used for the entryNameEncoding, + /// otherwise an is thrown. + /// + /// The cancellation token to monitor for cancellation requests. + public static Task CreateFromDirectoryAsync(string sourceDirectoryName, string destinationArchiveFileName, + CompressionLevel compressionLevel, bool includeBaseDirectory, Encoding? entryNameEncoding, CancellationToken cancellationToken = default) => + DoCreateFromDirectoryAsync(sourceDirectoryName, destinationArchiveFileName, compressionLevel, includeBaseDirectory, entryNameEncoding, cancellationToken); + + /// + /// Asynchronously creates a zip archive in the specified stream that contains the files and directories from the specified directory. + /// + /// The path to the directory to be archived, specified as a relative or absolute path. A relative path is interpreted as relative to the current working directory. + /// The stream where the zip archive is to be stored. + /// The cancellation token to monitor for cancellation requests. + /// + /// The directory structure from the file system is preserved in the archive. If the directory is empty, an empty archive is created. + /// This method overload does not include the base directory in the archive and does not allow you to specify a compression level. + /// If you want to include the base directory or specify a compression level, call the method overload. + /// If a file in the directory cannot be added to the archive, the archive is left incomplete and invalid, and the method throws an exception. + /// + /// is , contains only white space, or contains at least one invalid character. + /// -or- + /// The stream does not support writing. + /// + /// or is . + /// In the specified path, file name, or both exceed the system-defined maximum length. + /// is invalid or does not exist (for example, it is on an unmapped drive). + /// A file in the specified directory could not be opened. + ///-or- + ///An I/O error occurred while opening a file to be archived. + /// contains an invalid format. + /// An asynchronous operation is cancelled. + public static Task CreateFromDirectoryAsync(string sourceDirectoryName, Stream destination, CancellationToken cancellationToken = default) => + DoCreateFromDirectoryAsync(sourceDirectoryName, destination, compressionLevel: null, includeBaseDirectory: false, entryNameEncoding: null, cancellationToken); + + /// + /// Asynchronously creates a zip archive in the specified stream that contains the files and directories from the specified directory, uses the specified compression level, and optionally includes the base directory. + /// + /// The path to the directory to be archived, specified as a relative or absolute path. A relative path is interpreted as relative to the current working directory. + /// The stream where the zip archive is to be stored. + /// One of the enumeration values that indicates whether to emphasize speed or compression effectiveness when creating the entry. + /// to include the directory name from at the root of the archive; to include only the contents of the directory. + /// The token to monitor for cancellation requests. + /// + /// The directory structure from the file system is preserved in the archive. If the directory is empty, an empty archive is created. + /// Use this method overload to specify the compression level and whether to include the base directory in the archive. + /// If a file in the directory cannot be added to the archive, the archive is left incomplete and invalid, and the method throws an exception. + /// + /// is , contains only white space, or contains at least one invalid character. + /// -or- + /// The stream does not support writing. + /// + /// or is . + /// In the specified path, file name, or both exceed the system-defined maximum length. + /// is invalid or does not exist (for example, it is on an unmapped drive). + /// A file in the specified directory could not be opened. + ///-or- + ///An I/O error occurred while opening a file to be archived. + /// contains an invalid format. + /// is not a valid value. + /// An asynchronous operation is cancelled. + public static Task CreateFromDirectoryAsync(string sourceDirectoryName, Stream destination, CompressionLevel compressionLevel, bool includeBaseDirectory, CancellationToken cancellationToken = default) => + DoCreateFromDirectoryAsync(sourceDirectoryName, destination, compressionLevel, includeBaseDirectory, entryNameEncoding: null, cancellationToken); + + /// + /// Asynchronously creates a zip archive in the specified stream that contains the files and directories from the specified directory, uses the specified compression level and character encoding for entry names, and optionally includes the base directory. + /// + /// The path to the directory to be archived, specified as a relative or absolute path. A relative path is interpreted as relative to the current working directory. + /// The stream where the zip archive is to be stored. + /// One of the enumeration values that indicates whether to emphasize speed or compression effectiveness when creating the entry. + /// to include the directory name from at the root of the archive; to include only the contents of the directory. + /// The encoding to use when reading or writing entry names in this archive. Specify a value for this parameter only when an encoding is required for interoperability with zip archive tools and libraries that do not support UTF-8 encoding for entry names or comments. + /// The cancellation token to monitor for cancellation requests. + /// + /// The directory structure from the file system is preserved in the archive. If the directory is empty, an empty archive is created. + /// Use this method overload to specify the compression level and character encoding, and whether to include the base directory in the archive. + /// If a file in the directory cannot be added to the archive, the archive is left incomplete and invalid, and the method throws an exception. + /// + /// is , contains only white space, or contains at least one invalid character. + /// -or- + /// The stream does not support writing. + /// + /// or is . + /// In the specified path, file name, or both exceed the system-defined maximum length. + /// is invalid or does not exist (for example, it is on an unmapped drive). + /// A file in the specified directory could not be opened. + ///-or- + ///An I/O error occurred while opening a file to be archived. + /// contains an invalid format. + /// is not a valid value. + /// An asynchronous operation is cancelled. + public static Task CreateFromDirectoryAsync(string sourceDirectoryName, Stream destination, + CompressionLevel compressionLevel, bool includeBaseDirectory, Encoding? entryNameEncoding, CancellationToken cancellationToken = default) => + DoCreateFromDirectoryAsync(sourceDirectoryName, destination, compressionLevel, includeBaseDirectory, entryNameEncoding, cancellationToken); + + private static async Task DoCreateFromDirectoryAsync(string sourceDirectoryName, string destinationArchiveFileName, + CompressionLevel? compressionLevel, bool includeBaseDirectory, Encoding? entryNameEncoding, CancellationToken cancellationToken) + + { + cancellationToken.ThrowIfCancellationRequested(); + + // Rely on Path.GetFullPath for validation of sourceDirectoryName and destinationArchive + (sourceDirectoryName, destinationArchiveFileName) = GetFullPathsForDoCreateFromDirectory(sourceDirectoryName, destinationArchiveFileName); + + // Checking of compressionLevel is passed down to DeflateStream and the IDeflater implementation + // as it is a pluggable component that completely encapsulates the meaning of compressionLevel. + + ZipArchive archive = await OpenAsync(destinationArchiveFileName, ZipArchiveMode.Create, entryNameEncoding, cancellationToken).ConfigureAwait(false); + await using (archive) + { + await CreateZipArchiveFromDirectoryAsync(sourceDirectoryName, archive, compressionLevel, includeBaseDirectory, cancellationToken).ConfigureAwait(false); + } + } + + private static async Task DoCreateFromDirectoryAsync(string sourceDirectoryName, Stream destination, + CompressionLevel? compressionLevel, bool includeBaseDirectory, Encoding? entryNameEncoding, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + sourceDirectoryName = ValidateAndGetFullPathForDoCreateFromDirectory(sourceDirectoryName, destination, compressionLevel); + + ZipArchive archive = await ZipArchive.CreateAsync(destination, ZipArchiveMode.Create, leaveOpen: true, entryNameEncoding, cancellationToken).ConfigureAwait(false); + await using (archive) + { + await CreateZipArchiveFromDirectoryAsync(sourceDirectoryName, archive, compressionLevel, includeBaseDirectory, cancellationToken).ConfigureAwait(false); + } + } + + private static async Task CreateZipArchiveFromDirectoryAsync(string sourceDirectoryName, ZipArchive archive, + CompressionLevel? compressionLevel, bool includeBaseDirectory, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + (bool directoryIsEmpty, string basePath, DirectoryInfo di, FileSystemEnumerable<(string, CreateEntryType)> fse) = + InitializeCreateZipArchiveFromDirectory(sourceDirectoryName, includeBaseDirectory); + + foreach ((string fullPath, CreateEntryType type) in fse) + { + directoryIsEmpty = false; + + switch (type) + { + case CreateEntryType.File: + { + // Create entry for file: + string entryName = ArchivingUtils.EntryFromPath(fullPath.AsSpan(basePath.Length)); + await ZipFileExtensions.DoCreateEntryFromFileAsync(archive, fullPath, entryName, compressionLevel, cancellationToken).ConfigureAwait(false); + } + break; + case CreateEntryType.Directory: + if (ArchivingUtils.IsDirEmpty(fullPath)) + { + // Create entry marking an empty dir: + // FullName never returns a directory separator character on the end, + // but Zip archives require it to specify an explicit directory: + string entryName = ArchivingUtils.EntryFromPath(fullPath.AsSpan(basePath.Length), appendPathSeparator: true); + archive.CreateEntry(entryName); + } + break; + default: + throw new IOException(SR.Format(SR.ZipUnsupportedFile, fullPath)); + } + } + + FinalizeCreateZipArchiveFromDirectory(archive, di, includeBaseDirectory, directoryIsEmpty); + } +} diff --git a/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFile.Create.cs b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFile.Create.cs index 5e3a616b0e1de2..1ee4cb90a3fa6b 100644 --- a/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFile.Create.cs +++ b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFile.Create.cs @@ -143,42 +143,12 @@ public static partial class ZipFile /// public static ZipArchive Open(string archiveFileName, ZipArchiveMode mode, Encoding? entryNameEncoding) { - // Relies on FileStream's ctor for checking of archiveFileName - - FileMode fileMode; - FileAccess access; - FileShare fileShare; - - switch (mode) - { - case ZipArchiveMode.Read: - fileMode = FileMode.Open; - access = FileAccess.Read; - fileShare = FileShare.Read; - break; - - case ZipArchiveMode.Create: - fileMode = FileMode.CreateNew; - access = FileAccess.Write; - fileShare = FileShare.None; - break; - - case ZipArchiveMode.Update: - fileMode = FileMode.OpenOrCreate; - access = FileAccess.ReadWrite; - fileShare = FileShare.None; - break; - - default: - throw new ArgumentOutOfRangeException(nameof(mode)); - } - - // Suppress CA2000: fs gets passed to the new ZipArchive, which stores it internally. + // the FileStream gets passed to the new ZipArchive, which stores it internally. // The stream will then be owned by the archive and be disposed when the archive is disposed. - // If the ctor completes without throwing, we know fs has been successfully stores in the archive; - // If the ctor throws, we need to close it here. + // If the ZipArchive ctor completes without throwing, we know fs has been successfully stores in the archive; + // If the ctor throws, we need to close it in a try finally for the ZipArchive. - FileStream fs = new FileStream(archiveFileName, fileMode, access, fileShare, bufferSize: 0x1000, useAsync: false); + FileStream fs = GetFileStreamForOpen(mode, archiveFileName, useAsync: false); try { @@ -435,13 +405,11 @@ private static void DoCreateFromDirectory(string sourceDirectoryName, string des { // Rely on Path.GetFullPath for validation of sourceDirectoryName and destinationArchive + (sourceDirectoryName, destinationArchiveFileName) = GetFullPathsForDoCreateFromDirectory(sourceDirectoryName, destinationArchiveFileName); // Checking of compressionLevel is passed down to DeflateStream and the IDeflater implementation // as it is a pluggable component that completely encapsulates the meaning of compressionLevel. - sourceDirectoryName = Path.GetFullPath(sourceDirectoryName); - destinationArchiveFileName = Path.GetFullPath(destinationArchiveFileName); - using ZipArchive archive = Open(destinationArchiveFileName, ZipArchiveMode.Create, entryNameEncoding); CreateZipArchiveFromDirectory(sourceDirectoryName, archive, compressionLevel, includeBaseDirectory); } @@ -449,19 +417,7 @@ private static void DoCreateFromDirectory(string sourceDirectoryName, string des private static void DoCreateFromDirectory(string sourceDirectoryName, Stream destination, CompressionLevel? compressionLevel, bool includeBaseDirectory, Encoding? entryNameEncoding) { - ArgumentNullException.ThrowIfNull(destination); - if (!destination.CanWrite) - { - throw new ArgumentException(SR.UnwritableStream, nameof(destination)); - } - if (compressionLevel.HasValue && !Enum.IsDefined(compressionLevel.Value)) - { - throw new ArgumentOutOfRangeException(nameof(compressionLevel)); - } - - // Rely on Path.GetFullPath for validation of sourceDirectoryName - - sourceDirectoryName = Path.GetFullPath(sourceDirectoryName); + sourceDirectoryName = ValidateAndGetFullPathForDoCreateFromDirectory(sourceDirectoryName, destination, compressionLevel); using ZipArchive archive = new ZipArchive(destination, ZipArchiveMode.Create, leaveOpen: true, entryNameEncoding); CreateZipArchiveFromDirectory(sourceDirectoryName, archive, compressionLevel, includeBaseDirectory); @@ -470,17 +426,8 @@ private static void DoCreateFromDirectory(string sourceDirectoryName, Stream des private static void CreateZipArchiveFromDirectory(string sourceDirectoryName, ZipArchive archive, CompressionLevel? compressionLevel, bool includeBaseDirectory) { - bool directoryIsEmpty = true; - - //add files and directories - DirectoryInfo di = new DirectoryInfo(sourceDirectoryName); - - string basePath = di.FullName; - - if (includeBaseDirectory && di.Parent != null) - basePath = di.Parent.FullName; - - FileSystemEnumerable<(string, CreateEntryType)> fse = CreateEnumerableForCreate(di.FullName); + (bool directoryIsEmpty, string basePath, DirectoryInfo di, FileSystemEnumerable<(string, CreateEntryType)> fse) = + InitializeCreateZipArchiveFromDirectory(sourceDirectoryName, includeBaseDirectory); foreach ((string fullPath, CreateEntryType type) in fse) { @@ -511,11 +458,81 @@ private static void CreateZipArchiveFromDirectory(string sourceDirectoryName, Zi } } + FinalizeCreateZipArchiveFromDirectory(archive, di, includeBaseDirectory, directoryIsEmpty); + } + + private static FileStream GetFileStreamForOpen(ZipArchiveMode mode, string archiveFileName, bool useAsync) + { + // Relies on FileStream's ctor for checking of archiveFileName + + (FileMode fileMode, FileAccess access, FileShare fileShare) = mode switch + { + ZipArchiveMode.Read => (FileMode.Open, FileAccess.Read, FileShare.Read), + ZipArchiveMode.Create => (FileMode.CreateNew, FileAccess.Write, FileShare.None), + ZipArchiveMode.Update => (FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None), + _ => throw new ArgumentOutOfRangeException(nameof(mode)), + }; + + return new FileStream(archiveFileName, fileMode, access, fileShare, bufferSize: FileStreamBufferSize, useAsync); + } + + private static (string, string) GetFullPathsForDoCreateFromDirectory(string sourceDirectoryName, string destinationArchiveFileName) + { + // Rely on Path.GetFullPath for validation of sourceDirectoryName and destinationArchive + + sourceDirectoryName = Path.GetFullPath(sourceDirectoryName); + destinationArchiveFileName = Path.GetFullPath(destinationArchiveFileName); + + return (sourceDirectoryName, destinationArchiveFileName); + } + + private static string ValidateAndGetFullPathForDoCreateFromDirectory(string sourceDirectoryName, Stream destination, CompressionLevel? compressionLevel) + { + ArgumentNullException.ThrowIfNull(destination); + if (!destination.CanWrite) + { + throw new ArgumentException(SR.UnwritableStream, nameof(destination)); + } + if (compressionLevel.HasValue && !Enum.IsDefined(compressionLevel.Value)) + { + throw new ArgumentOutOfRangeException(nameof(compressionLevel)); + } + + // Rely on Path.GetFullPath for validation of sourceDirectoryName + + return Path.GetFullPath(sourceDirectoryName); + } + + private static (bool, string, DirectoryInfo, FileSystemEnumerable<(string, CreateEntryType)>) InitializeCreateZipArchiveFromDirectory(string sourceDirectoryName, bool includeBaseDirectory) + { + bool directoryIsEmpty = true; + + //add files and directories + DirectoryInfo di = new DirectoryInfo(sourceDirectoryName); + + string basePath = di.FullName; + + if (includeBaseDirectory && di.Parent != null) + { + basePath = di.Parent.FullName; + } + + FileSystemEnumerable<(string, CreateEntryType)> fse = CreateEnumerableForCreate(di.FullName); + + return (directoryIsEmpty, basePath, di, fse); + } + + private static void FinalizeCreateZipArchiveFromDirectory(ZipArchive archive, DirectoryInfo di, bool includeBaseDirectory, bool directoryIsEmpty) + { // If no entries create an empty root directory entry: if (includeBaseDirectory && directoryIsEmpty) + { archive.CreateEntry(ArchivingUtils.EntryFromPath(di.Name, appendPathSeparator: true)); + } } + internal const int FileStreamBufferSize = 0x4000; // 16K + private enum CreateEntryType { File, diff --git a/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFile.Extract.Async.cs b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFile.Extract.Async.cs new file mode 100644 index 00000000000000..7c10cf783cf757 --- /dev/null +++ b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFile.Extract.Async.cs @@ -0,0 +1,357 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace System.IO.Compression; + +public static partial class ZipFile +{ + /// + /// Asynchronously extracts all of the files in the specified archive to a directory on the file system. + /// The specified directory must not exist. This method will create all subdirectories and the specified directory. + /// If there is an error while extracting the archive, the archive will remain partially extracted. Each entry will + /// be extracted such that the extracted file has the same relative path to the destinationDirectoryName as the entry + /// has to the archive. The path is permitted to specify relative or absolute path information. Relative path information + /// is interpreted as relative to the current working directory. If a file to be archived has an invalid last modified + /// time, the first datetime representable in the Zip timestamp format (midnight on January 1, 1980) will be used. + /// + /// + /// sourceArchive or destinationDirectoryName is a zero-length string, contains only whitespace, + /// or contains one or more invalid characters as defined by InvalidPathChars. + /// sourceArchive or destinationDirectoryName is null. + /// sourceArchive or destinationDirectoryName specifies a path, file name, + /// or both exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, + /// and file names must be less than 260 characters. + /// The path specified by sourceArchive or destinationDirectoryName is invalid, + /// (for example, it is on an unmapped drive). + /// An I/O error has occurred. -or- An archive entry's name is zero-length, contains only whitespace, or contains one or + /// more invalid characters as defined by InvalidPathChars. -or- Extracting an archive entry would result in a file destination that is outside the destination directory (for example, because of parent directory accessors). -or- An archive entry has the same name as an already extracted entry from the same archive. + /// The caller does not have the required permission. + /// sourceArchive or destinationDirectoryName is in an invalid format. + /// sourceArchive was not found. + /// The archive specified by sourceArchive: Is not a valid ZipArchive + /// -or- An archive entry was not found or was corrupt. -or- An archive entry has been compressed using a compression method + /// that is not supported. + /// An asynchronous operation is cancelled. + /// + /// The path to the archive on the file system that is to be extracted. + /// The path to the directory in which to place the extracted files, specified as a relative or absolute path. A relative path is interpreted as relative to the current working directory. + /// The cancellation token to monitor for cancellation requests. + public static Task ExtractToDirectoryAsync(string sourceArchiveFileName, string destinationDirectoryName, CancellationToken cancellationToken = default) => + ExtractToDirectoryAsync(sourceArchiveFileName, destinationDirectoryName, entryNameEncoding: null, overwriteFiles: false, cancellationToken); + + /// + /// Asynchronously extracts all of the files in the specified archive to a directory on the file system. + /// The specified directory must not exist. This method will create all subdirectories and the specified directory. + /// If there is an error while extracting the archive, the archive will remain partially extracted. Each entry will + /// be extracted such that the extracted file has the same relative path to the destinationDirectoryName as the entry + /// has to the archive. The path is permitted to specify relative or absolute path information. Relative path information + /// is interpreted as relative to the current working directory. If a file to be archived has an invalid last modified + /// time, the first datetime representable in the Zip timestamp format (midnight on January 1, 1980) will be used. + /// + /// + /// sourceArchive or destinationDirectoryName is a zero-length string, contains only whitespace, + /// or contains one or more invalid characters as defined by InvalidPathChars. + /// sourceArchive or destinationDirectoryName is null. + /// sourceArchive or destinationDirectoryName specifies a path, file name, + /// or both exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, + /// and file names must be less than 260 characters. + /// The path specified by sourceArchive or destinationDirectoryName is invalid, + /// (for example, it is on an unmapped drive). + /// An I/O error has occurred. -or- An archive entry's name is zero-length, contains only whitespace, or contains one or + /// more invalid characters as defined by InvalidPathChars. -or- Extracting an archive entry would result in a file destination that is outside the destination directory (for example, because of parent directory accessors). -or- An archive entry has the same name as an already extracted entry from the same archive. + /// The caller does not have the required permission. + /// sourceArchive or destinationDirectoryName is in an invalid format. + /// sourceArchive was not found. + /// The archive specified by sourceArchive: Is not a valid ZipArchive + /// -or- An archive entry was not found or was corrupt. -or- An archive entry has been compressed using a compression method + /// that is not supported. + /// An asynchronous operation is cancelled. + /// + /// The path to the archive on the file system that is to be extracted. + /// The path to the directory in which to place the extracted files, specified as a relative or absolute path. A relative path is interpreted as relative to the current working directory. + /// True to indicate overwrite. + /// The cancellation token to monitor for cancellation requests. + public static Task ExtractToDirectoryAsync(string sourceArchiveFileName, string destinationDirectoryName, bool overwriteFiles, CancellationToken cancellationToken = default) => + ExtractToDirectoryAsync(sourceArchiveFileName, destinationDirectoryName, entryNameEncoding: null, overwriteFiles: overwriteFiles, cancellationToken); + + /// + /// Asynchronously extracts all of the files in the specified archive to a directory on the file system. + /// The specified directory must not exist. This method will create all subdirectories and the specified directory. + /// If there is an error while extracting the archive, the archive will remain partially extracted. Each entry will + /// be extracted such that the extracted file has the same relative path to the destinationDirectoryName as the entry + /// has to the archive. The path is permitted to specify relative or absolute path information. Relative path information + /// is interpreted as relative to the current working directory. If a file to be archived has an invalid last modified + /// time, the first datetime representable in the Zip timestamp format (midnight on January 1, 1980) will be used. + /// + /// + /// sourceArchive or destinationDirectoryName is a zero-length string, contains only whitespace, + /// or contains one or more invalid characters as defined by InvalidPathChars. + /// sourceArchive or destinationDirectoryName is null. + /// sourceArchive or destinationDirectoryName specifies a path, file name, + /// or both exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, + /// and file names must be less than 260 characters. + /// The path specified by sourceArchive or destinationDirectoryName is invalid, + /// (for example, it is on an unmapped drive). + /// An I/O error has occurred. -or- An archive entry's name is zero-length, contains only whitespace, or contains one or + /// more invalid characters as defined by InvalidPathChars. -or- Extracting an archive entry would result in a file destination that is outside the destination directory (for example, because of parent directory accessors). -or- An archive entry has the same name as an already extracted entry from the same archive. + /// The caller does not have the required permission. + /// sourceArchive or destinationDirectoryName is in an invalid format. + /// sourceArchive was not found. + /// The archive specified by sourceArchive: Is not a valid ZipArchive + /// -or- An archive entry was not found or was corrupt. -or- An archive entry has been compressed using a compression method + /// that is not supported. + /// An asynchronous operation is cancelled. + /// + /// The path to the archive on the file system that is to be extracted. + /// The path to the directory on the file system. The directory specified must not exist, but the directory that it is contained in must exist. + /// The encoding to use when reading or writing entry names and comments in this ZipArchive. + /// /// NOTE: Specifying this parameter to values other than null is discouraged. + /// However, this may be necessary for interoperability with ZIP archive tools and libraries that do not correctly support + /// UTF-8 encoding for entry names or comments.
+ /// This value is used as follows:
+ /// If entryNameEncoding is not specified (== null): + /// + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is not set, + /// use the current system default code page (Encoding.Default) in order to decode the entry name and comment. + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is set, + /// use UTF-8 (Encoding.UTF8) in order to decode the entry name and comment. + /// + /// If entryNameEncoding is specified (!= null): + /// + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is not set, + /// use the specified entryNameEncoding in order to decode the entry name and comment. + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is set, + /// use UTF-8 (Encoding.UTF8) in order to decode the entry name and comment. + /// + /// Note that Unicode encodings other than UTF-8 may not be currently used for the entryNameEncoding, + /// otherwise an is thrown. + /// + /// The cancellation token to monitor for cancellation requests. + public static Task ExtractToDirectoryAsync(string sourceArchiveFileName, string destinationDirectoryName, Encoding? entryNameEncoding, CancellationToken cancellationToken = default) => + ExtractToDirectoryAsync(sourceArchiveFileName, destinationDirectoryName, entryNameEncoding: entryNameEncoding, overwriteFiles: false, cancellationToken); + + /// + /// Asynchronously extracts all of the files in the specified archive to a directory on the file system. + /// The specified directory must not exist. This method will create all subdirectories and the specified directory. + /// If there is an error while extracting the archive, the archive will remain partially extracted. Each entry will + /// be extracted such that the extracted file has the same relative path to the destinationDirectoryName as the entry + /// has to the archive. The path is permitted to specify relative or absolute path information. Relative path information + /// is interpreted as relative to the current working directory. If a file to be archived has an invalid last modified + /// time, the first datetime representable in the Zip timestamp format (midnight on January 1, 1980) will be used. + /// + /// + /// sourceArchive or destinationDirectoryName is a zero-length string, contains only whitespace, + /// or contains one or more invalid characters as defined by InvalidPathChars. + /// sourceArchive or destinationDirectoryName is null. + /// sourceArchive or destinationDirectoryName specifies a path, file name, + /// or both exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, + /// and file names must be less than 260 characters. + /// The path specified by sourceArchive or destinationDirectoryName is invalid, + /// (for example, it is on an unmapped drive). + /// An I/O error has occurred. -or- An archive entry's name is zero-length, contains only whitespace, or contains one or + /// more invalid characters as defined by InvalidPathChars. -or- Extracting an archive entry would result in a file destination that is outside the destination directory (for example, because of parent directory accessors). -or- An archive entry has the same name as an already extracted entry from the same archive. + /// The caller does not have the required permission. + /// sourceArchive or destinationDirectoryName is in an invalid format. + /// sourceArchive was not found. + /// The archive specified by sourceArchive: Is not a valid ZipArchive + /// -or- An archive entry was not found or was corrupt. -or- An archive entry has been compressed using a compression method + /// that is not supported. + /// An asynchronous operation is cancelled. + /// + /// The path to the archive on the file system that is to be extracted. + /// The path to the directory in which to place the extracted files, specified as a relative or absolute path. A relative path is interpreted as relative to the current working directory. + /// True to indicate overwrite. + /// The encoding to use when reading or writing entry names and comments in this ZipArchive. + /// /// NOTE: Specifying this parameter to values other than null is discouraged. + /// However, this may be necessary for interoperability with ZIP archive tools and libraries that do not correctly support + /// UTF-8 encoding for entry names or comments.
+ /// This value is used as follows:
+ /// If entryNameEncoding is not specified (== null): + /// + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is not set, + /// use the current system default code page (Encoding.Default) in order to decode the entry name and comment. + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is set, + /// use UTF-8 (Encoding.UTF8) in order to decode the entry name and comment. + /// + /// If entryNameEncoding is specified (!= null): + /// + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is not set, + /// use the specified entryNameEncoding in order to decode the entry name and comment. + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is set, + /// use UTF-8 (Encoding.UTF8) in order to decode the entry name and comment. + /// + /// Note that Unicode encodings other than UTF-8 may not be currently used for the entryNameEncoding, + /// otherwise an is thrown. + /// + /// The cancellation token to monitor for cancellation requests. + public static async Task ExtractToDirectoryAsync(string sourceArchiveFileName, string destinationDirectoryName, Encoding? entryNameEncoding, bool overwriteFiles, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + ArgumentNullException.ThrowIfNull(sourceArchiveFileName); + + ZipArchive archive = await OpenAsync(sourceArchiveFileName, ZipArchiveMode.Read, entryNameEncoding, cancellationToken).ConfigureAwait(false); + await using (archive) + { + await archive.ExtractToDirectoryAsync(destinationDirectoryName, overwriteFiles, cancellationToken).ConfigureAwait(false); + } + } + + /// + /// Asynchronously extracts all the files from the zip archive stored in the specified stream and places them in the specified destination directory on the file system. + /// + /// The stream from which the zip archive is to be extracted. + /// The path to the directory in which to place the extracted files, specified as a relative or absolute path. A relative path is interpreted as relative to the current working directory. + /// The cancellation token to monitor for cancellation requests. + /// This method creates the specified directory and all subdirectories. The destination directory cannot already exist. + /// Exceptions related to validating the paths in the or the files in the zip archive contained in parameters are thrown before extraction. Otherwise, if an error occurs during extraction, the archive remains partially extracted. + /// Each extracted file has the same relative path to the directory specified by as its source entry has to the root of the archive. + /// If a file to be archived has an invalid last modified time, the first date and time representable in the Zip timestamp format (midnight on January 1, 1980) will be used. + /// > is , contains only white space, or contains at least one invalid character. + /// or is . + /// The specified path in exceeds the system-defined maximum length. + /// The specified path is invalid (for example, it is on an unmapped drive). + /// The name of an entry in the archive is , contains only white space, or contains at least one invalid character. + /// -or- + /// Extracting an archive entry would create a file that is outside the directory specified by . (For example, this might happen if the entry name contains parent directory accessors.) + /// -or- + /// An archive entry to extract has the same name as an entry that has already been extracted or that exists in . + /// The caller does not have the required permission to access the archive or the destination directory. + /// contains an invalid format. + /// The archive contained in the stream is not a valid zip archive. + /// -or- + /// An archive entry was not found or was corrupt. + /// -or- + /// An archive entry was compressed by using a compression method that is not supported. + /// An asynchronous operation is cancelled. + public static Task ExtractToDirectoryAsync(Stream source, string destinationDirectoryName, CancellationToken cancellationToken = default) => + ExtractToDirectoryAsync(source, destinationDirectoryName, entryNameEncoding: null, overwriteFiles: false, cancellationToken); + + /// + /// Asynchronously extracts all the files from the zip archive stored in the specified stream and places them in the specified destination directory on the file system, and optionally allows choosing if the files in the destination directory should be overwritten. + /// + /// The stream from which the zip archive is to be extracted. + /// The path to the directory in which to place the extracted files, specified as a relative or absolute path. A relative path is interpreted as relative to the current working directory. + /// to overwrite files; otherwise. + /// The cancellation token to monitor for cancellation requests. + /// This method creates the specified directory and all subdirectories. The destination directory cannot already exist. + /// Exceptions related to validating the paths in the or the files in the zip archive contained in parameters are thrown before extraction. Otherwise, if an error occurs during extraction, the archive remains partially extracted. + /// Each extracted file has the same relative path to the directory specified by as its source entry has to the root of the archive. + /// If a file to be archived has an invalid last modified time, the first date and time representable in the Zip timestamp format (midnight on January 1, 1980) will be used. + /// > is , contains only white space, or contains at least one invalid character. + /// or is . + /// The specified path in exceeds the system-defined maximum length. + /// The specified path is invalid (for example, it is on an unmapped drive). + /// The name of an entry in the archive is , contains only white space, or contains at least one invalid character. + /// -or- + /// Extracting an archive entry would create a file that is outside the directory specified by . (For example, this might happen if the entry name contains parent directory accessors.) + /// -or- + /// is and an archive entry to extract has the same name as an entry that has already been extracted or that exists in . + /// The caller does not have the required permission to access the archive or the destination directory. + /// contains an invalid format. + /// The archive contained in the stream is not a valid zip archive. + /// -or- + /// An archive entry was not found or was corrupt. + /// -or- + /// An archive entry was compressed by using a compression method that is not supported. + /// An asynchronous operation is cancelled. + public static Task ExtractToDirectoryAsync(Stream source, string destinationDirectoryName, bool overwriteFiles, CancellationToken cancellationToken = default) => + ExtractToDirectoryAsync(source, destinationDirectoryName, entryNameEncoding: null, overwriteFiles: overwriteFiles, cancellationToken); + + /// + /// Asynchronously extracts all the files from the zip archive stored in the specified stream and places them in the specified destination directory on the file system and uses the specified character encoding for entry names. + /// + /// The stream from which the zip archive is to be extracted. + /// The path to the directory in which to place the extracted files, specified as a relative or absolute path. A relative path is interpreted as relative to the current working directory. + /// The encoding to use when reading or writing entry names and comments in this archive. Specify a value for this parameter only when an encoding is required for interoperability with zip archive tools and libraries that do not support UTF-8 encoding for entry names or comments. + /// The cancellation token to monitor for cancellation requests. + /// This method creates the specified directory and all subdirectories. The destination directory cannot already exist. + /// Exceptions related to validating the paths in the or the files in the zip archive contained in parameters are thrown before extraction. Otherwise, if an error occurs during extraction, the archive remains partially extracted. + /// Each extracted file has the same relative path to the directory specified by as its source entry has to the root of the archive. + /// If a file to be archived has an invalid last modified time, the first date and time representable in the Zip timestamp format (midnight on January 1, 1980) will be used. + /// If is set to a value other than , entry names and comments are decoded according to the following rules: + /// - For entry names and comments where the language encoding flag (in the general-purpose bit flag of the local file header) is not set, the entry names and comments are decoded by using the specified encoding. + /// - For entries where the language encoding flag is set, the entry names and comments are decoded by using UTF-8. + /// If is set to , entry names and comments are decoded according to the following rules: + /// - For entries where the language encoding flag (in the general-purpose bit flag of the local file header) is not set, entry names and comments are decoded by using the current system default code page. + /// - For entries where the language encoding flag is set, the entry names and comments are decoded by using UTF-8. + /// > is , contains only white space, or contains at least one invalid character. + /// -or- + /// is set to a Unicode encoding other than UTF-8. + /// or is . + /// The specified path in exceeds the system-defined maximum length. + /// The specified path is invalid (for example, it is on an unmapped drive). + /// The name of an entry in the archive is , contains only white space, or contains at least one invalid character. + /// -or- + /// Extracting an archive entry would create a file that is outside the directory specified by . (For example, this might happen if the entry name contains parent directory accessors.) + /// -or- + /// An archive entry to extract has the same name as an entry that has already been extracted or that exists in . + /// The caller does not have the required permission to access the archive or the destination directory. + /// contains an invalid format. + /// The archive contained in the stream is not a valid zip archive. + /// -or- + /// An archive entry was not found or was corrupt. + /// -or- + /// An archive entry was compressed by using a compression method that is not supported. + /// An asynchronous operation is cancelled. + public static Task ExtractToDirectoryAsync(Stream source, string destinationDirectoryName, Encoding? entryNameEncoding, CancellationToken cancellationToken = default) => + ExtractToDirectoryAsync(source, destinationDirectoryName, entryNameEncoding: entryNameEncoding, overwriteFiles: false, cancellationToken); + + /// + /// Asynchronously extracts all the files from the zip archive stored in the specified stream and places them in the specified destination directory on the file system, uses the specified character encoding for entry names, and optionally allows choosing if the files in the destination directory should be overwritten. + /// + /// The stream from which the zip archive is to be extracted. + /// The path to the directory in which to place the extracted files, specified as a relative or absolute path. A relative path is interpreted as relative to the current working directory. + /// The encoding to use when reading or writing entry names and comments in this archive. Specify a value for this parameter only when an encoding is required for interoperability with zip archive tools and libraries that do not support UTF-8 encoding for entry names or comments. + /// to overwrite files; otherwise. + /// The cancellation token to monitor for cancellation requests. + /// This method creates the specified directory and all subdirectories. The destination directory cannot already exist. + /// Exceptions related to validating the paths in the or the files in the zip archive contained in parameters are thrown before extraction. Otherwise, if an error occurs during extraction, the archive remains partially extracted. + /// Each extracted file has the same relative path to the directory specified by as its source entry has to the root of the archive. + /// If a file to be archived has an invalid last modified time, the first date and time representable in the Zip timestamp format (midnight on January 1, 1980) will be used. + /// If is set to a value other than , entry names and comments are decoded according to the following rules: + /// - For entry names and comments where the language encoding flag (in the general-purpose bit flag of the local file header) is not set, the entry names and comments are decoded by using the specified encoding. + /// - For entries where the language encoding flag is set, the entry names and comments are decoded by using UTF-8. + /// If is set to , entry names and comments are decoded according to the following rules: + /// - For entries where the language encoding flag (in the general-purpose bit flag of the local file header) is not set, entry names are decoded by using the current system default code page. + /// - For entries where the language encoding flag is set, the entry names and comments are decoded by using UTF-8. + /// > is , contains only white space, or contains at least one invalid character. + /// -or- + /// is set to a Unicode encoding other than UTF-8. + /// or is . + /// The specified path in exceeds the system-defined maximum length. + /// The specified path is invalid (for example, it is on an unmapped drive). + /// The name of an entry in the archive is , contains only white space, or contains at least one invalid character. + /// -or- + /// Extracting an archive entry would create a file that is outside the directory specified by . (For example, this might happen if the entry name contains parent directory accessors.) + /// -or- + /// is and an archive entry to extract has the same name as an entry that has already been extracted or that exists in . + /// The caller does not have the required permission to access the archive or the destination directory. + /// contains an invalid format. + /// The archive contained in the stream is not a valid zip archive. + /// -or- + /// An archive entry was not found or was corrupt. + /// -or- + /// An archive entry was compressed by using a compression method that is not supported. + /// An asynchronous operation is cancelled. + public static async Task ExtractToDirectoryAsync(Stream source, string destinationDirectoryName, Encoding? entryNameEncoding, bool overwriteFiles, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + ArgumentNullException.ThrowIfNull(source); + if (!source.CanRead) + { + throw new ArgumentException(SR.UnreadableStream, nameof(source)); + } + + ZipArchive archive = await ZipArchive.CreateAsync(source, ZipArchiveMode.Read, leaveOpen: true, entryNameEncoding, cancellationToken).ConfigureAwait(false); + await using (archive) + { + await archive.ExtractToDirectoryAsync(destinationDirectoryName, overwriteFiles, cancellationToken).ConfigureAwait(false); + } + } +} diff --git a/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Create.Async.cs b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Create.Async.cs new file mode 100644 index 00000000000000..189b3a9076535e --- /dev/null +++ b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Create.Async.cs @@ -0,0 +1,102 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Threading; +using System.Threading.Tasks; + +namespace System.IO.Compression; + +public static partial class ZipFileExtensions +{ + /// + ///

Asynchronously adds a file from the file system to the archive under the specified entry name. + /// The new entry in the archive will contain the contents of the file. + /// The last write time of the archive entry is set to the last write time of the file on the file system. + /// If an entry with the specified name already exists in the archive, a second entry will be created that has an identical name. + /// If the specified source file has an invalid last modified time, the first datetime representable in the Zip timestamp format + /// (midnight on January 1, 1980) will be used.

+ /// + ///

If an entry with the specified name already exists in the archive, a second entry will be created that has an identical name.

+ /// + ///

Since no CompressionLevel is specified, the default provided by the implementation of the underlying compression + /// algorithm will be used; the ZipArchive will not impose its own default. + /// (Currently, the underlying compression algorithm is provided by the System.IO.Compression.DeflateStream class.)

+ ///
+ /// + /// sourceFileName is a zero-length string, contains only whitespace, or contains one or more + /// invalid characters as defined by InvalidPathChars. -or- entryName is a zero-length string. + /// sourceFileName or entryName is null. + /// In sourceFileName, the specified path, file name, or both exceed the system-defined maximum length. + /// For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters. + /// The specified sourceFileName is invalid, (for example, it is on an unmapped drive). + /// An I/O error occurred while opening the file specified by sourceFileName. + /// sourceFileName specified a directory. -or- The caller does not have the + /// required permission. + /// The file specified in sourceFileName was not found. + /// sourceFileName is in an invalid format or the ZipArchive does not support writing. + /// The ZipArchive has already been closed. + /// An asynchronous operation is cancelled. + /// + /// The zip archive to add the file to. + /// The path to the file on the file system to be copied from. The path is permitted to specify + /// relative or absolute path information. Relative path information is interpreted as relative to the current working directory. + /// The name of the entry to be created. + /// The cancellation token to monitor for cancellation requests. + /// A wrapper for the newly created entry. + public static Task CreateEntryFromFileAsync(this ZipArchive destination, string sourceFileName, string entryName, CancellationToken cancellationToken = default) => + DoCreateEntryFromFileAsync(destination, sourceFileName, entryName, null, cancellationToken); + + /// + ///

Asynchronously adds a file from the file system to the archive under the specified entry name. + /// The new entry in the archive will contain the contents of the file. + /// The last write time of the archive entry is set to the last write time of the file on the file system. + /// If an entry with the specified name already exists in the archive, a second entry will be created that has an identical name. + /// If the specified source file has an invalid last modified time, the first datetime representable in the Zip timestamp format + /// (midnight on January 1, 1980) will be used.

+ ///

If an entry with the specified name already exists in the archive, a second entry will be created that has an identical name.

+ ///
+ /// sourceFileName is a zero-length string, contains only whitespace, or contains one or more + /// invalid characters as defined by InvalidPathChars. -or- entryName is a zero-length string. + /// sourceFileName or entryName is null. + /// In sourceFileName, the specified path, file name, or both exceed the system-defined maximum length. + /// For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters. + /// The specified sourceFileName is invalid, (for example, it is on an unmapped drive). + /// An I/O error occurred while opening the file specified by sourceFileName. + /// sourceFileName specified a directory. + /// -or- The caller does not have the required permission. + /// The file specified in sourceFileName was not found. + /// sourceFileName is in an invalid format or the ZipArchive does not support writing. + /// The ZipArchive has already been closed. + /// An asynchronous operation is cancelled. + /// + /// The zip archive to add the file to. + /// The path to the file on the file system to be copied from. The path is permitted to specify relative + /// or absolute path information. Relative path information is interpreted as relative to the current working directory. + /// The name of the entry to be created. + /// The level of the compression (speed/memory vs. compressed size trade-off). + /// The cancellation token to monitor for cancellation requests. + /// A wrapper for the newly created entry. + public static Task CreateEntryFromFileAsync(this ZipArchive destination, + string sourceFileName, string entryName, CompressionLevel compressionLevel, CancellationToken cancellationToken = default) => + DoCreateEntryFromFileAsync(destination, sourceFileName, entryName, compressionLevel, cancellationToken); + + internal static async Task DoCreateEntryFromFileAsync(this ZipArchive destination, string sourceFileName, string entryName, + CompressionLevel? compressionLevel, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + (FileStream fs, ZipArchiveEntry entry) = InitializeDoCreateEntryFromFile(destination, sourceFileName, entryName, compressionLevel, useAsync: true); + + await using (fs) + { + Stream es = await entry.OpenAsync(cancellationToken).ConfigureAwait(false); + await using (es) + { + await fs.CopyToAsync(es, cancellationToken).ConfigureAwait(false); + } + } + + return entry; + } + +} diff --git a/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Create.cs b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Create.cs index c909d818123009..a523577247fd43 100644 --- a/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Create.cs +++ b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Create.cs @@ -79,6 +79,21 @@ public static ZipArchiveEntry CreateEntryFromFile(this ZipArchive destination, internal static ZipArchiveEntry DoCreateEntryFromFile(this ZipArchive destination, string sourceFileName, string entryName, CompressionLevel? compressionLevel) + { + (FileStream fs, ZipArchiveEntry entry) = InitializeDoCreateEntryFromFile(destination, sourceFileName, entryName, compressionLevel, useAsync: true); + + using (fs) + { + using (Stream es = entry.Open()) + { + fs.CopyTo(es); + } + } + + return entry; + } + + private static (FileStream, ZipArchiveEntry) InitializeDoCreateEntryFromFile(ZipArchive destination, string sourceFileName, string entryName, CompressionLevel? compressionLevel, bool useAsync) { ArgumentNullException.ThrowIfNull(destination); ArgumentNullException.ThrowIfNull(sourceFileName); @@ -89,28 +104,25 @@ internal static ZipArchiveEntry DoCreateEntryFromFile(this ZipArchive destinatio // Argument checking gets passed down to FileStream's ctor and CreateEntry - using (FileStream fs = new FileStream(sourceFileName, FileMode.Open, FileAccess.Read, FileShare.Read, bufferSize: 0x1000, useAsync: false)) - { - ZipArchiveEntry entry = compressionLevel.HasValue - ? destination.CreateEntry(entryName, compressionLevel.Value) - : destination.CreateEntry(entryName); - - DateTime lastWrite = File.GetLastWriteTime(sourceFileName); + FileStream fs = new FileStream(sourceFileName, FileMode.Open, FileAccess.Read, FileShare.Read, ZipFile.FileStreamBufferSize, useAsync); - // If file to be archived has an invalid last modified time, use the first datetime representable in the Zip timestamp format - // (midnight on January 1, 1980): - if (lastWrite.Year < 1980 || lastWrite.Year > 2107) - lastWrite = new DateTime(1980, 1, 1, 0, 0, 0); + ZipArchiveEntry entry = compressionLevel.HasValue ? + destination.CreateEntry(entryName, compressionLevel.Value) : + destination.CreateEntry(entryName); - entry.LastWriteTime = lastWrite; + DateTime lastWrite = File.GetLastWriteTime(sourceFileName); - SetExternalAttributes(fs, entry); + // If file to be archived has an invalid last modified time, use the first datetime representable in the Zip timestamp format + // (midnight on January 1, 1980): + if (lastWrite.Year is < 1980 or > 2107) + { + lastWrite = new DateTime(1980, 1, 1, 0, 0, 0); + } + entry.LastWriteTime = lastWrite; - using (Stream es = entry.Open()) - fs.CopyTo(es); + SetExternalAttributes(fs, entry); - return entry; - } + return (fs, entry); } static partial void SetExternalAttributes(FileStream fs, ZipArchiveEntry entry); diff --git a/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Extract.Async.cs b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Extract.Async.cs new file mode 100644 index 00000000000000..f4be66987a5ec5 --- /dev/null +++ b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Extract.Async.cs @@ -0,0 +1,83 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Threading; +using System.Threading.Tasks; + +namespace System.IO.Compression; + +public static partial class ZipFileExtensions +{ + /// + /// Asynchronously extracts all of the files in the archive to a directory on the file system. The specified directory may already exist. + /// This method will create all subdirectories and the specified directory if necessary. + /// If there is an error while extracting the archive, the archive will remain partially extracted. + /// Each entry will be extracted such that the extracted file has the same relative path to destinationDirectoryName as the + /// entry has to the root of the archive. If a file to be archived has an invalid last modified time, the first datetime + /// representable in the Zip timestamp format (midnight on January 1, 1980) will be used. + /// + /// destinationDirectoryName is a zero-length string, contains only whitespace, + /// or contains one or more invalid characters as defined by InvalidPathChars. + /// destinationDirectoryName is null. + /// The specified path, file name, or both exceed the system-defined maximum length. + /// For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters. + /// The specified path is invalid, (for example, it is on an unmapped drive). + /// An archive entry?s name is zero-length, contains only whitespace, or contains one or more invalid + /// characters as defined by InvalidPathChars. -or- Extracting an archive entry would have resulted in a destination + /// file that is outside destinationDirectoryName (for example, if the entry name contains parent directory accessors). + /// -or- An archive entry has the same name as an already extracted entry from the same archive. + /// The caller does not have the required permission. + /// destinationDirectoryName is in an invalid format. + /// An archive entry was not found or was corrupt. + /// -or- An archive entry has been compressed using a compression method that is not supported. + /// An asynchronous operation is cancelled. + /// The zip archive to extract files from. + /// The path to the directory on the file system. + /// The directory specified must not exist. The path is permitted to specify relative or absolute path information. + /// Relative path information is interpreted as relative to the current working directory. + /// The cancellation token to monitor for cancellation requests. + public static Task ExtractToDirectoryAsync(this ZipArchive source, string destinationDirectoryName, CancellationToken cancellationToken = default) => + ExtractToDirectoryAsync(source, destinationDirectoryName, overwriteFiles: false, cancellationToken); + + /// + /// Extracts all of the files in the archive to a directory on the file system. The specified directory may already exist. + /// This method will create all subdirectories and the specified directory if necessary. + /// If there is an error while extracting the archive, the archive will remain partially extracted. + /// Each entry will be extracted such that the extracted file has the same relative path to destinationDirectoryName as the + /// entry has to the root of the archive. If a file to be archived has an invalid last modified time, the first datetime + /// representable in the Zip timestamp format (midnight on January 1, 1980) will be used. + /// + /// + /// destinationDirectoryName is a zero-length string, contains only whitespace, + /// or contains one or more invalid characters as defined by InvalidPathChars. + /// destinationDirectoryName is null. + /// The specified path, file name, or both exceed the system-defined maximum length. + /// For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters. + /// The specified path is invalid, (for example, it is on an unmapped drive). + /// An archive entry?s name is zero-length, contains only whitespace, or contains one or more invalid + /// characters as defined by InvalidPathChars. -or- Extracting an archive entry would have resulted in a destination + /// file that is outside destinationDirectoryName (for example, if the entry name contains parent directory accessors). + /// -or- An archive entry has the same name as an already extracted entry from the same archive. + /// The caller does not have the required permission. + /// destinationDirectoryName is in an invalid format. + /// An archive entry was not found or was corrupt. + /// -or- An archive entry has been compressed using a compression method that is not supported. + /// The zip archive to extract files from. + /// The path to the directory on the file system. + /// The directory specified must not exist. The path is permitted to specify relative or absolute path information. + /// Relative path information is interpreted as relative to the current working directory. + /// True to indicate overwrite. + /// The cancellation token to monitor for cancellation requests. + public static async Task ExtractToDirectoryAsync(this ZipArchive source, string destinationDirectoryName, bool overwriteFiles, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + ArgumentNullException.ThrowIfNull(source); + ArgumentNullException.ThrowIfNull(destinationDirectoryName); + + foreach (ZipArchiveEntry entry in source.Entries) + { + await entry.ExtractRelativeToDirectoryAsync(destinationDirectoryName, overwriteFiles, cancellationToken).ConfigureAwait(false); + } + } +} diff --git a/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Extract.cs b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Extract.cs index addddb489fcfc6..ad9cfd4a6c2e63 100644 --- a/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Extract.cs +++ b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchive.Extract.cs @@ -1,8 +1,6 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. -using System.ComponentModel; - namespace System.IO.Compression { public static partial class ZipFileExtensions diff --git a/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchiveEntry.Extract.Async.cs b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchiveEntry.Extract.Async.cs new file mode 100644 index 00000000000000..c311d8d84bc690 --- /dev/null +++ b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchiveEntry.Extract.Async.cs @@ -0,0 +1,99 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Threading; +using System.Threading.Tasks; + +namespace System.IO.Compression; + +public static partial class ZipFileExtensions +{ + /// + /// Asynchronously creates a file on the file system with the entry's contents and the specified name. The last write time of the file is set to the + /// entry's last write time. This method does not allow overwriting of an existing file with the same name. Attempting to extract explicit + /// directories (entries with names that end in directory separator characters) will not result in the creation of a directory. + /// + /// + /// The caller does not have the required permission. + /// destinationFileName is a zero-length string, contains only whitespace, or contains one or more + /// invalid characters as defined by InvalidPathChars. -or- destinationFileName specifies a directory. + /// destinationFileName is null. + /// The specified path, file name, or both exceed the system-defined maximum length. + /// For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters. + /// The path specified in destinationFileName is invalid (for example, it is on + /// an unmapped drive). + /// An I/O error has occurred. -or- The entry is currently open for writing. + /// -or- The entry has been deleted from the archive. + /// destinationFileName is in an invalid format + /// -or- The ZipArchive that this entry belongs to was opened in a write-only mode. + /// The entry is missing from the archive or is corrupt and cannot be read + /// -or- The entry has been compressed using a compression method that is not supported. + /// The ZipArchive that this entry belongs to has been disposed. + /// The zip archive entry to extract a file from. + /// The name of the file that will hold the contents of the entry. + /// The path is permitted to specify relative or absolute path information. + /// Relative path information is interpreted as relative to the current working directory. + /// /// The cancellation token to monitor for cancellation requests. + public static Task ExtractToFileAsync(this ZipArchiveEntry source, string destinationFileName, CancellationToken cancellationToken = default) => + ExtractToFileAsync(source, destinationFileName, false, cancellationToken); + + /// + /// Asynchronously creates a file on the file system with the entry's contents and the specified name. + /// The last write time of the file is set to the entry's last write time. + /// This method does allows overwriting of an existing file with the same name. + /// + /// The caller does not have the required permission. + /// destinationFileName is a zero-length string, contains only whitespace, + /// or contains one or more invalid characters as defined by InvalidPathChars. -or- destinationFileName specifies a directory. + /// destinationFileName is null. + /// The specified path, file name, or both exceed the system-defined maximum length. + /// For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters. + /// The path specified in destinationFileName is invalid + /// (for example, it is on an unmapped drive). + /// An I/O error has occurred. + /// -or- The entry is currently open for writing. + /// -or- The entry has been deleted from the archive. + /// destinationFileName is in an invalid format + /// -or- The ZipArchive that this entry belongs to was opened in a write-only mode. + /// The entry is missing from the archive or is corrupt and cannot be read + /// -or- The entry has been compressed using a compression method that is not supported. + /// The ZipArchive that this entry belongs to has been disposed. + /// An asynchronous operation is cancelled. + /// The zip archive entry to extract a file from. + /// The name of the file that will hold the contents of the entry. + /// The path is permitted to specify relative or absolute path information. + /// Relative path information is interpreted as relative to the current working directory. + /// True to indicate overwrite. + /// The cancellation token to monitor for cancellation requests. + public static async Task ExtractToFileAsync(this ZipArchiveEntry source, string destinationFileName, bool overwrite, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + ExtractToFileInitialize(source, destinationFileName, overwrite, out FileStreamOptions fileStreamOptions); + + FileStream fs = new FileStream(destinationFileName, fileStreamOptions); + await using (fs) + { + Stream es = await source.OpenAsync(cancellationToken).ConfigureAwait(false); + await using (es) + { + await es.CopyToAsync(fs, cancellationToken).ConfigureAwait(false); + } + } + + ExtractToFileFinalize(source, destinationFileName); + } + + internal static async Task ExtractRelativeToDirectoryAsync(this ZipArchiveEntry source, string destinationDirectoryName, bool overwrite, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (ExtractRelativeToDirectoryCheckIfFile(source, destinationDirectoryName, out string fileDestinationPath)) + { + // If it is a file: + // Create containing directory: + Directory.CreateDirectory(Path.GetDirectoryName(fileDestinationPath)!); + await source.ExtractToFileAsync(fileDestinationPath, overwrite: overwrite, cancellationToken).ConfigureAwait(false); + } + } +} diff --git a/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchiveEntry.Extract.cs b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchiveEntry.Extract.cs index d77de24adbb03d..cbd0ebd901ba2d 100644 --- a/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchiveEntry.Extract.cs +++ b/src/libraries/System.IO.Compression.ZipFile/src/System/IO/Compression/ZipFileExtensions.ZipArchiveEntry.Extract.cs @@ -61,16 +61,29 @@ public static void ExtractToFile(this ZipArchiveEntry source, string destination /// Relative path information is interpreted as relative to the current working directory. /// True to indicate overwrite. public static void ExtractToFile(this ZipArchiveEntry source, string destinationFileName, bool overwrite) + { + ExtractToFileInitialize(source, destinationFileName, overwrite, out FileStreamOptions fileStreamOptions); + + using (FileStream fs = new FileStream(destinationFileName, fileStreamOptions)) + { + using (Stream es = source.Open()) + es.CopyTo(fs); + } + + ExtractToFileFinalize(source, destinationFileName); + } + + private static void ExtractToFileInitialize(ZipArchiveEntry source, string destinationFileName, bool overwrite, out FileStreamOptions fileStreamOptions) { ArgumentNullException.ThrowIfNull(source); ArgumentNullException.ThrowIfNull(destinationFileName); - FileStreamOptions fileStreamOptions = new() + fileStreamOptions = new() { Access = FileAccess.Write, Mode = overwrite ? FileMode.Create : FileMode.CreateNew, Share = FileShare.None, - BufferSize = 0x1000 + BufferSize = ZipFile.FileStreamBufferSize }; const UnixFileMode OwnershipPermissions = @@ -87,20 +100,12 @@ public static void ExtractToFile(this ZipArchiveEntry source, string destination { fileStreamOptions.UnixCreateMode = mode; } - - using (FileStream fs = new FileStream(destinationFileName, fileStreamOptions)) - { - using (Stream es = source.Open()) - es.CopyTo(fs); - } - - ArchivingUtils.AttemptSetLastWriteTime(destinationFileName, source.LastWriteTime); } - internal static void ExtractRelativeToDirectory(this ZipArchiveEntry source, string destinationDirectoryName) => - ExtractRelativeToDirectory(source, destinationDirectoryName, overwrite: false); + private static void ExtractToFileFinalize(ZipArchiveEntry source, string destinationFileName) => + ArchivingUtils.AttemptSetLastWriteTime(destinationFileName, source.LastWriteTime); - internal static void ExtractRelativeToDirectory(this ZipArchiveEntry source, string destinationDirectoryName, bool overwrite) + private static bool ExtractRelativeToDirectoryCheckIfFile(ZipArchiveEntry source, string destinationDirectoryName, out string fileDestinationPath) { ArgumentNullException.ThrowIfNull(source); ArgumentNullException.ThrowIfNull(destinationDirectoryName); @@ -114,21 +119,27 @@ internal static void ExtractRelativeToDirectory(this ZipArchiveEntry source, str destinationDirectoryFullPath = string.Concat(destinationDirectoryFullPath, new ReadOnlySpan(in sep)); } - string fileDestinationPath = Path.GetFullPath(Path.Combine(destinationDirectoryFullPath, ArchivingUtils.SanitizeEntryFilePath(source.FullName))); + fileDestinationPath = Path.GetFullPath(Path.Combine(destinationDirectoryFullPath, ArchivingUtils.SanitizeEntryFilePath(source.FullName))); if (!fileDestinationPath.StartsWith(destinationDirectoryFullPath, PathInternal.StringComparison)) throw new IOException(SR.IO_ExtractingResultsInOutside); if (Path.GetFileName(fileDestinationPath).Length == 0) { - // If it is a directory: - if (source.Length != 0) throw new IOException(SR.IO_DirectoryNameWithData); Directory.CreateDirectory(fileDestinationPath); + + return false; // It is a directory } - else + + return true; // It is a file + } + + internal static void ExtractRelativeToDirectory(this ZipArchiveEntry source, string destinationDirectoryName, bool overwrite) + { + if (ExtractRelativeToDirectoryCheckIfFile(source, destinationDirectoryName, out string fileDestinationPath)) { // If it is a file: // Create containing directory: diff --git a/src/libraries/System.IO.Compression.ZipFile/tests/System.IO.Compression.ZipFile.Tests.csproj b/src/libraries/System.IO.Compression.ZipFile/tests/System.IO.Compression.ZipFile.Tests.csproj index 97f0b31f599637..12005f1aae45aa 100644 --- a/src/libraries/System.IO.Compression.ZipFile/tests/System.IO.Compression.ZipFile.Tests.csproj +++ b/src/libraries/System.IO.Compression.ZipFile/tests/System.IO.Compression.ZipFile.Tests.csproj @@ -40,6 +40,8 @@ Link="Common\System\IO\Compression\StreamHelpers.cs" /> +
diff --git a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Create.Stream.cs b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Create.Stream.cs index 852545a23e0e20..74bdffe0cc98af 100644 --- a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Create.Stream.cs +++ b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Create.Stream.cs @@ -11,152 +11,176 @@ namespace System.IO.Compression.Tests; public class ZipFile_Create_Stream : ZipFileTestBase { - [Fact] - public void CreateFromDirectory_NullSourceDirectory_Throws() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreateFromDirectory_NullSourceDirectory_Throws(bool async) { using MemoryStream ms = new MemoryStream(); - Assert.Throws(() => ZipFile.CreateFromDirectory(sourceDirectoryName: null, ms)); - Assert.Throws(() => ZipFile.CreateFromDirectory(sourceDirectoryName: null, ms, CompressionLevel.NoCompression, includeBaseDirectory: false)); - Assert.Throws(() => ZipFile.CreateFromDirectory(sourceDirectoryName: null, ms, CompressionLevel.NoCompression, includeBaseDirectory: false, Encoding.UTF8)); + await Assert.ThrowsAsync(() => CallZipFileCreateFromDirectory(async, sourceDirectoryName: null, ms)); + await Assert.ThrowsAsync(() => CallZipFileCreateFromDirectory(async, sourceDirectoryName: null, ms, CompressionLevel.NoCompression, includeBaseDirectory: false)); + await Assert.ThrowsAsync(() => CallZipFileCreateFromDirectory(async, sourceDirectoryName: null, ms, CompressionLevel.NoCompression, includeBaseDirectory: false, Encoding.UTF8)); + } + + public static IEnumerable Get_CreateFromDirectory_CompressionLevel_OutOfRange_Throws_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { (CompressionLevel)int.MinValue, async }; + yield return new object[] { (CompressionLevel)(-1), async }; + yield return new object[] { (CompressionLevel)4, async }; + yield return new object[] { (CompressionLevel)int.MaxValue, async }; + } } [Theory] - [InlineData((CompressionLevel)int.MinValue)] - [InlineData((CompressionLevel)(-1))] - [InlineData((CompressionLevel)4)] - [InlineData((CompressionLevel)int.MaxValue)] - public void CreateFromDirectory_CompressionLevel_OutOfRange_Throws(CompressionLevel invalidCompressionLevel) + [MemberData(nameof(Get_CreateFromDirectory_CompressionLevel_OutOfRange_Throws_Data))] + public async Task CreateFromDirectory_CompressionLevel_OutOfRange_Throws(CompressionLevel invalidCompressionLevel, bool async) { using MemoryStream ms = new MemoryStream(); - Assert.Throws(() => ZipFile.CreateFromDirectory("sourceDirectory", ms, invalidCompressionLevel, includeBaseDirectory: false)); - Assert.Throws(() => ZipFile.CreateFromDirectory("sourceDirectory", ms, invalidCompressionLevel, includeBaseDirectory: false, Encoding.UTF8)); + await Assert.ThrowsAsync(() => CallZipFileCreateFromDirectory(async, "sourceDirectory", ms, invalidCompressionLevel, includeBaseDirectory: false)); + await Assert.ThrowsAsync(() => CallZipFileCreateFromDirectory(async, "sourceDirectory", ms, invalidCompressionLevel, includeBaseDirectory: false, Encoding.UTF8)); } - - [Fact] - public void CreateFromDirectory_UnwritableStream_Throws() + + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreateFromDirectory_UnwritableStream_Throws(bool async) { using MemoryStream ms = new(); using WrappedStream destination = new(ms, canRead: true, canWrite: false, canSeek: true); - Assert.Throws("destination", () => ZipFile.CreateFromDirectory(GetTestFilePath(), destination)); + await Assert.ThrowsAsync("destination", () => CallZipFileCreateFromDirectory(async, GetTestFilePath(), destination)); } - [Fact] - public void CreateFromDirectoryNormal() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreateFromDirectoryNormal(bool async) { string folderName = zfolder("normal"); using MemoryStream destination = new(); - ZipFile.CreateFromDirectory(folderName, destination); + await CallZipFileCreateFromDirectory(async, folderName, destination); destination.Position = 0; - IsZipSameAsDir(destination, folderName, ZipArchiveMode.Read, requireExplicit: false, checkTimes: false); + await IsZipSameAsDir(destination, folderName, ZipArchiveMode.Read, requireExplicit: false, checkTimes: false, async); } - [Fact] - public void CreateFromDirectoryNormal_Unreadable_Unseekable() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreateFromDirectoryNormal_Unreadable_Unseekable(bool async) { string folderName = zfolder("normal"); using MemoryStream ms = new(); using WrappedStream destination = new(ms, canRead: false, canWrite: true, canSeek: false); - ZipFile.CreateFromDirectory(folderName, destination); + await CallZipFileCreateFromDirectory(async, folderName, destination); ms.Position = 0; - IsZipSameAsDir(ms, folderName, ZipArchiveMode.Read, requireExplicit: false, checkTimes: false); + await IsZipSameAsDir(ms, folderName, ZipArchiveMode.Read, requireExplicit: false, checkTimes: false, async); } - [Fact] - public void CreateFromDirectory_IncludeBaseDirectory() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreateFromDirectory_IncludeBaseDirectory(bool async) { string folderName = zfolder("normal"); using MemoryStream destination = new(); - ZipFile.CreateFromDirectory(folderName, destination, CompressionLevel.Optimal, true); + await CallZipFileCreateFromDirectory(async, folderName, destination, CompressionLevel.Optimal, true); IEnumerable expected = Directory.EnumerateFiles(zfolder("normal"), "*", SearchOption.AllDirectories); destination.Position = 0; - using ZipArchive archive = new(destination); + ZipArchive archive = await CreateZipArchive(async, destination, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null); + foreach (ZipArchiveEntry actualEntry in archive.Entries) { string expectedFile = expected.Single(i => Path.GetFileName(i).Equals(actualEntry.Name)); Assert.StartsWith("normal", actualEntry.FullName); Assert.Equal(new FileInfo(expectedFile).Length, actualEntry.Length); using Stream expectedStream = File.OpenRead(expectedFile); - using Stream actualStream = actualEntry.Open(); + + Stream actualStream = await OpenEntryStream(async, actualEntry); StreamsEqual(expectedStream, actualStream); + } + + await DisposeZipArchive(async, archive); } - [Fact] - public void CreateFromDirectoryUnicode() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreateFromDirectoryUnicode(bool async) { string folderName = zfolder("unicode"); using MemoryStream destination = new(); - ZipFile.CreateFromDirectory(folderName, destination); + await CallZipFileCreateFromDirectory(async, folderName, destination); - using ZipArchive archive = new(destination); + ZipArchive archive = await CreateZipArchive(async, destination, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null); IEnumerable actual = archive.Entries.Select(entry => entry.Name); IEnumerable expected = Directory.EnumerateFileSystemEntries(zfolder("unicode"), "*", SearchOption.AllDirectories).ToList(); Assert.True(Enumerable.SequenceEqual(expected.Select(i => Path.GetFileName(i)), actual.Select(i => i))); + await DisposeZipArchive(async, archive); } - [Fact] - public void CreatedEmptyDirectoriesRoundtrip() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreatedEmptyDirectoriesRoundtrip(bool async) { using TempDirectory tempFolder = new(GetTestFilePath()); - + DirectoryInfo rootDir = new(tempFolder.Path); - rootDir.CreateSubdirectory("empty1"); + string folderName = "empty1"; + rootDir.CreateSubdirectory(folderName); using MemoryStream destination = new(); - ZipFile.CreateFromDirectory( - rootDir.FullName, destination, + await CallZipFileCreateFromDirectory(async, rootDir.FullName, destination, CompressionLevel.Optimal, false, Encoding.UTF8); - using ZipArchive archive = new(destination); + ZipArchive archive = await CreateZipArchive(async, destination, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null); Assert.Equal(1, archive.Entries.Count); - Assert.StartsWith("empty1", archive.Entries[0].FullName); + Assert.StartsWith(folderName, archive.Entries[0].FullName); } - [Fact] - public void CreatedEmptyUtf32DirectoriesRoundtrip() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreatedEmptyUtf32DirectoriesRoundtrip(bool async) { using TempDirectory tempFolder = new(GetTestFilePath()); Encoding entryEncoding = Encoding.UTF32; DirectoryInfo rootDir = new(tempFolder.Path); - rootDir.CreateSubdirectory("empty1"); + string folderName = "empty1"; + rootDir.CreateSubdirectory(folderName); using MemoryStream destination = new(); - ZipFile.CreateFromDirectory( - rootDir.FullName, destination, + await CallZipFileCreateFromDirectory(async, rootDir.FullName, destination, CompressionLevel.Optimal, false, entryEncoding); - using ZipArchive archive = new(destination, ZipArchiveMode.Read, leaveOpen: false, entryEncoding); + ZipArchive archive = await CreateZipArchive(async, destination, ZipArchiveMode.Read, leaveOpen: false, entryEncoding); Assert.Equal(1, archive.Entries.Count); - Assert.StartsWith("empty1", archive.Entries[0].FullName); + Assert.StartsWith(folderName, archive.Entries[0].FullName); + await DisposeZipArchive(async, archive); } - [Fact] - public void CreatedEmptyRootDirectoryRoundtrips() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreatedEmptyRootDirectoryRoundtrips(bool async) { using TempDirectory tempFolder = new(GetTestFilePath()); DirectoryInfo emptyRoot = new(tempFolder.Path); using MemoryStream destination = new(); - ZipFile.CreateFromDirectory( - emptyRoot.FullName, destination, + await CallZipFileCreateFromDirectory(async, emptyRoot.FullName, destination, CompressionLevel.Optimal, true); - using ZipArchive archive = new(destination); + ZipArchive archive = await CreateZipArchive(async, destination, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null); Assert.Equal(1, archive.Entries.Count); + await DisposeZipArchive(async, archive); } - [Fact] - public void CreateSetsExternalAttributesCorrectly() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreateSetsExternalAttributesCorrectly(bool async) { string folderName = zfolder("normal"); using MemoryStream destination = new(); - ZipFile.CreateFromDirectory(folderName, destination); + await CallZipFileCreateFromDirectory(async, folderName, destination); destination.Position = 0; - using ZipArchive archive = new(destination); - + ZipArchive archive = await CreateZipArchive(async, destination, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null); foreach (ZipArchiveEntry entry in archive.Entries) { if (OperatingSystem.IsWindows()) @@ -168,5 +192,6 @@ public void CreateSetsExternalAttributesCorrectly() Assert.NotEqual(0, entry.ExternalAttributes); } } + await DisposeZipArchive(async, archive); } } diff --git a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Create.cs b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Create.cs index cec2dc9f7482ee..5188549dec67e1 100644 --- a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Create.cs +++ b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Create.cs @@ -11,161 +11,147 @@ namespace System.IO.Compression.Tests { public class ZipFile_Create : ZipFileTestBase { - [Fact] - public async Task CreateFromDirectoryNormal() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreateFromDirectoryNormal(bool async) { string folderName = zfolder("normal"); string noBaseDir = GetTestFilePath(); - ZipFile.CreateFromDirectory(folderName, noBaseDir); + await CallZipFileCreateFromDirectory(async, folderName, noBaseDir); - await IsZipSameAsDirAsync(noBaseDir, folderName, ZipArchiveMode.Read, requireExplicit: false, checkTimes: false); + await IsZipSameAsDir(noBaseDir, folderName, ZipArchiveMode.Read, requireExplicit: false, checkTimes: false, async); } - [Fact] - public void CreateFromDirectory_IncludeBaseDirectory() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreateFromDirectory_IncludeBaseDirectory(bool async) { string folderName = zfolder("normal"); string withBaseDir = GetTestFilePath(); - ZipFile.CreateFromDirectory(folderName, withBaseDir, CompressionLevel.Optimal, true); + await CallZipFileCreateFromDirectory(async, folderName, withBaseDir, CompressionLevel.Optimal, true); IEnumerable expected = Directory.EnumerateFiles(zfolder("normal"), "*", SearchOption.AllDirectories); - using (ZipArchive actual_withbasedir = ZipFile.Open(withBaseDir, ZipArchiveMode.Read)) - { - foreach (ZipArchiveEntry actualEntry in actual_withbasedir.Entries) - { - string expectedFile = expected.Single(i => Path.GetFileName(i).Equals(actualEntry.Name)); - Assert.StartsWith("normal", actualEntry.FullName); - Assert.Equal(new FileInfo(expectedFile).Length, actualEntry.Length); - using (Stream expectedStream = File.OpenRead(expectedFile)) - using (Stream actualStream = actualEntry.Open()) - { - StreamsEqual(expectedStream, actualStream); - } - } - } - } - [Fact] - public async Task CreateFromDirectory_IncludeBaseDirectoryAsync() - { - string folderName = zfolder("normal"); - string withBaseDir = GetTestFilePath(); - ZipFile.CreateFromDirectory(folderName, withBaseDir, CompressionLevel.Optimal, true); + ZipArchive actual_withbasedir = await CallZipFileOpen(async, withBaseDir, ZipArchiveMode.Read); - IEnumerable expected = Directory.EnumerateFiles(zfolder("normal"), "*", SearchOption.AllDirectories); - using (ZipArchive actual_withbasedir = ZipFile.Open(withBaseDir, ZipArchiveMode.Read)) + foreach (ZipArchiveEntry actualEntry in actual_withbasedir.Entries) { - foreach (ZipArchiveEntry actualEntry in actual_withbasedir.Entries) - { - string expectedFile = expected.Single(i => Path.GetFileName(i).Equals(actualEntry.Name)); - Assert.StartsWith("normal", actualEntry.FullName); - Assert.Equal(new FileInfo(expectedFile).Length, actualEntry.Length); - using (Stream expectedStream = File.OpenRead(expectedFile)) - using (Stream actualStream = actualEntry.Open()) - { - await StreamsEqualAsync(expectedStream, actualStream); - } - } + string expectedFile = expected.Single(i => Path.GetFileName(i).Equals(actualEntry.Name)); + Assert.StartsWith("normal", actualEntry.FullName); + Assert.Equal(new FileInfo(expectedFile).Length, actualEntry.Length); + using Stream expectedStream = File.OpenRead(expectedFile); + Stream actualStream = await OpenEntryStream(async, actualEntry); + StreamsEqual(expectedStream, actualStream); + await DisposeStream(async, actualStream); } + + await DisposeZipArchive(async, actual_withbasedir); } - [Fact] - public void CreateFromDirectoryUnicode() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreateFromDirectoryUnicode(bool async) { string folderName = zfolder("unicode"); string noBaseDir = GetTestFilePath(); - ZipFile.CreateFromDirectory(folderName, noBaseDir); + await CallZipFileCreateFromDirectory(async, folderName, noBaseDir); - using (ZipArchive archive = ZipFile.OpenRead(noBaseDir)) - { - IEnumerable actual = archive.Entries.Select(entry => entry.Name); - IEnumerable expected = Directory.EnumerateFileSystemEntries(zfolder("unicode"), "*", SearchOption.AllDirectories).ToList(); - Assert.True(Enumerable.SequenceEqual(expected.Select(i => Path.GetFileName(i)), actual.Select(i => i))); - } + ZipArchive archive = await CallZipFileOpenRead(async, noBaseDir); + + IEnumerable actual = archive.Entries.Select(entry => entry.Name); + IEnumerable expected = Directory.EnumerateFileSystemEntries(zfolder("unicode"), "*", SearchOption.AllDirectories).ToList(); + Assert.True(Enumerable.SequenceEqual(expected.Select(i => Path.GetFileName(i)), actual.Select(i => i))); + + await DisposeZipArchive(async, archive); } - [Fact] - public void CreatedEmptyDirectoriesRoundtrip() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreatedEmptyDirectoriesRoundtrip(bool async) { + string folderName = "empty1"; using (var tempFolder = new TempDirectory(GetTestFilePath())) { DirectoryInfo rootDir = new DirectoryInfo(tempFolder.Path); - rootDir.CreateSubdirectory("empty1"); + rootDir.CreateSubdirectory(folderName); string archivePath = GetTestFilePath(); - ZipFile.CreateFromDirectory( - rootDir.FullName, archivePath, - CompressionLevel.Optimal, false, Encoding.UTF8); + await CallZipFileCreateFromDirectory(async, rootDir.FullName, archivePath, + CompressionLevel.Optimal, includeBaseDirectory: false, Encoding.UTF8); - using (ZipArchive archive = ZipFile.OpenRead(archivePath)) - { - Assert.Equal(1, archive.Entries.Count); - Assert.StartsWith("empty1", archive.Entries[0].FullName); - } + ZipArchive archive = await CallZipFileOpenRead(async, archivePath); + + Assert.Equal(1, archive.Entries.Count); + Assert.StartsWith(folderName, archive.Entries[0].FullName); + + await DisposeZipArchive(async, archive); } } - [Fact] - public void CreatedEmptyUtf32DirectoriesRoundtrip() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreatedEmptyUtf32DirectoriesRoundtrip(bool async) { using (var tempFolder = new TempDirectory(GetTestFilePath())) { Encoding entryEncoding = Encoding.UTF32; DirectoryInfo rootDir = new DirectoryInfo(tempFolder.Path); - rootDir.CreateSubdirectory("empty1"); + string folderName = "empty1"; + rootDir.CreateSubdirectory(folderName); string archivePath = GetTestFilePath(); - ZipFile.CreateFromDirectory( - rootDir.FullName, archivePath, + await CallZipFileCreateFromDirectory(async, rootDir.FullName, archivePath, CompressionLevel.Optimal, false, entryEncoding); - using (ZipArchive archive = ZipFile.Open(archivePath, ZipArchiveMode.Read, entryEncoding)) - { - Assert.Equal(1, archive.Entries.Count); - Assert.StartsWith("empty1", archive.Entries[0].FullName); - } + ZipArchive archive = await CallZipFileOpen(async, archivePath, ZipArchiveMode.Read, entryEncoding); + + Assert.Equal(1, archive.Entries.Count); + Assert.StartsWith(folderName, archive.Entries[0].FullName); + + await DisposeZipArchive(async, archive); } } - [Fact] - public void CreatedEmptyRootDirectoryRoundtrips() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreatedEmptyRootDirectoryRoundtrips(bool async) { using (var tempFolder = new TempDirectory(GetTestFilePath())) { DirectoryInfo emptyRoot = new DirectoryInfo(tempFolder.Path); string archivePath = GetTestFilePath(); - ZipFile.CreateFromDirectory( - emptyRoot.FullName, archivePath, - CompressionLevel.Optimal, true); + await CallZipFileCreateFromDirectory(async, emptyRoot.FullName, + archivePath, CompressionLevel.Optimal, includeBaseDirectory: true); - using (ZipArchive archive = ZipFile.OpenRead(archivePath)) - { - Assert.Equal(1, archive.Entries.Count); - } + ZipArchive archive = await CallZipFileOpenRead(async, archivePath); + Assert.Equal(1, archive.Entries.Count); + await DisposeZipArchive(async, archive); } } - [Fact] - public void CreateSetsExternalAttributesCorrectly() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task CreateSetsExternalAttributesCorrectly(bool async) { string folderName = zfolder("normal"); string filepath = GetTestFilePath(); - ZipFile.CreateFromDirectory(folderName, filepath); + await CallZipFileCreateFromDirectory(async, folderName, filepath); + + ZipArchive archive = await CallZipFileOpen(async, filepath, ZipArchiveMode.Read); - using (ZipArchive archive = ZipFile.Open(filepath, ZipArchiveMode.Read)) + foreach (ZipArchiveEntry entry in archive.Entries) { - foreach (ZipArchiveEntry entry in archive.Entries) + if (OperatingSystem.IsWindows()) { - if (OperatingSystem.IsWindows()) - { - Assert.Equal(0, entry.ExternalAttributes); - } - else - { - Assert.NotEqual(0, entry.ExternalAttributes); - } + Assert.Equal(0, entry.ExternalAttributes); + } + else + { + Assert.NotEqual(0, entry.ExternalAttributes); } } + + await DisposeZipArchive(async, archive); } } } diff --git a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Extract.Stream.cs b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Extract.Stream.cs index 0905778c475801..d3be8e83e377b4 100644 --- a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Extract.Stream.cs +++ b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Extract.Stream.cs @@ -1,7 +1,9 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Collections.Generic; using System.Text; +using System.Threading.Tasks; using Xunit; namespace System.IO.Compression.Tests; @@ -22,50 +24,70 @@ public void ExtractToDirectory_UnreadableStream_Throws() Assert.Throws("source", () => ZipFile.ExtractToDirectory(source, GetTestFilePath())); } + public static IEnumerable Get_ExtractToDirectoryNormal_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "normal.zip", "normal", async }; + yield return new object[] { "empty.zip", "empty", async }; + yield return new object[] { "explicitdir1.zip", "explicitdir", async }; + yield return new object[] { "explicitdir2.zip", "explicitdir", async }; + yield return new object[] { "appended.zip", "small", async }; + yield return new object[] { "prepended.zip", "small", async }; + yield return new object[] { "noexplicitdir.zip", "explicitdir", async }; + } + } + [Theory] - [InlineData("normal.zip", "normal")] - [InlineData("empty.zip", "empty")] - [InlineData("explicitdir1.zip", "explicitdir")] - [InlineData("explicitdir2.zip", "explicitdir")] - [InlineData("appended.zip", "small")] - [InlineData("prepended.zip", "small")] - [InlineData("noexplicitdir.zip", "explicitdir")] - public void ExtractToDirectoryNormal(string file, string folder) - { - using FileStream source = File.OpenRead(zfile(file)); + [MemberData(nameof(Get_ExtractToDirectoryNormal_Data))] + public async Task ExtractToDirectoryNormal(string file, string folder, bool async) + { + FileStream source = CreateFileStreamRead(async, zfile(file)); string folderName = zfolder(folder); using TempDirectory tempFolder = new(GetTestFilePath()); - ZipFile.ExtractToDirectory(source, tempFolder.Path); + await CallZipFileExtractToDirectory(async, source, tempFolder.Path); DirsEqual(tempFolder.Path, folderName); + await DisposeStream(async, source); + } + + public static IEnumerable Get_ExtractToDirectoryNormal_Unwritable_Unseekable_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "normal.zip", "normal", async }; + yield return new object[] { "empty.zip", "empty", async }; + yield return new object[] { "explicitdir1.zip", "explicitdir", async }; + yield return new object[] { "explicitdir2.zip", "explicitdir", async }; + yield return new object[] { "appended.zip", "small", async }; + yield return new object[] { "prepended.zip", "small", async }; + yield return new object[] { "noexplicitdir.zip", "explicitdir", async }; + } } [Theory] - [InlineData("normal.zip", "normal")] - [InlineData("empty.zip", "empty")] - [InlineData("explicitdir1.zip", "explicitdir")] - [InlineData("explicitdir2.zip", "explicitdir")] - [InlineData("appended.zip", "small")] - [InlineData("prepended.zip", "small")] - [InlineData("noexplicitdir.zip", "explicitdir")] - public void ExtractToDirectoryNormal_Unwritable_Unseekable(string file, string folder) - { - using FileStream fs = File.OpenRead(zfile(file)); + [MemberData(nameof(Get_ExtractToDirectoryNormal_Unwritable_Unseekable_Data))] + public async Task ExtractToDirectoryNormal_Unwritable_Unseekable(string file, string folder, bool async) + { + FileStream fs = CreateFileStreamRead(async, zfile(file)); using WrappedStream source = new(fs, canRead: true, canWrite: false, canSeek: false); string folderName = zfolder(folder); using TempDirectory tempFolder = new(GetTestFilePath()); - ZipFile.ExtractToDirectory(source, tempFolder.Path); + await CallZipFileExtractToDirectory(async, source, tempFolder.Path); DirsEqual(tempFolder.Path, folderName); + await DisposeStream(async, fs); } - [Fact] + [Theory] + [MemberData(nameof(Get_Booleans_Data))] [ActiveIssue("https://github.com/dotnet/runtime/issues/72951", TestPlatforms.iOS | TestPlatforms.tvOS)] - public void ExtractToDirectoryUnicode() + public async Task ExtractToDirectoryUnicode(bool async) { - using Stream source = File.OpenRead(zfile("unicode.zip")); + FileStream source = CreateFileStreamRead(async, zfile("unicode.zip")); string folderName = zfolder("unicode"); using TempDirectory tempFolder = new TempDirectory(GetTestFilePath()); - ZipFile.ExtractToDirectory(source, tempFolder.Path); + await CallZipFileExtractToDirectory(async, source, tempFolder.Path); DirFileNamesEqual(tempFolder.Path, folderName); + await DisposeStream(async, source); } [Theory] @@ -84,161 +106,171 @@ public void ExtractOutOfRoot(string entryName) Assert.Throws(() => ZipFile.ExtractToDirectory(source, destination.FullName)); } + [Theory] + [InlineData("../Foo")] + [InlineData("../Barbell")] + public async Task ExtractOutOfRoot_Async(string entryName) + { + using FileStream source = new(GetTestFilePath(), FileMode.Create, FileAccess.ReadWrite); + await using (ZipArchive archive = await ZipArchive.CreateAsync(source, ZipArchiveMode.Create, leaveOpen: true, entryNameEncoding: null)) + { + ZipArchiveEntry entry = archive.CreateEntry(entryName); + } + + DirectoryInfo destination = Directory.CreateDirectory(Path.Combine(GetTestFilePath(), "Bar")); + source.Position = 0; + await Assert.ThrowsAsync(() => ZipFile.ExtractToDirectoryAsync(source, destination.FullName)); + } + /// /// This test ensures that a zipfile with path names that are invalid to this OS will throw errors /// when an attempt is made to extract them. /// [Theory] - [InlineData("NullCharFileName_FromWindows")] - [InlineData("NullCharFileName_FromUnix")] + [MemberData(nameof(Get_Unix_ZipWithInvalidFileNames_Data))] [PlatformSpecific(TestPlatforms.AnyUnix)] // Checks Unix-specific invalid file path - public void Unix_ZipWithInvalidFileNames(string zipName) + public async Task Unix_ZipWithInvalidFileNames(string zipName, bool async) { string testDirectory = GetTestFilePath(); - using Stream source = File.OpenRead(compat(zipName) + ".zip"); - ZipFile.ExtractToDirectory(source, testDirectory); - + FileStream source = CreateFileStreamRead(async, compat(zipName) + ".zip"); + await CallZipFileExtractToDirectory(async, source, testDirectory); Assert.True(File.Exists(Path.Combine(testDirectory, "a_6b6d"))); + await DisposeStream(async, source); } [Theory] - [InlineData("backslashes_FromUnix", "aa\\bb\\cc\\dd")] - [InlineData("backslashes_FromWindows", "aa\\bb\\cc\\dd")] - [InlineData("WindowsInvalid_FromUnix", "aad")] - [InlineData("WindowsInvalid_FromWindows", "aad")] + [MemberData(nameof(Get_Unix_ZipWithOSSpecificFileNames_Data))] [PlatformSpecific(TestPlatforms.AnyUnix)] // Checks Unix-specific invalid file path - public void Unix_ZipWithOSSpecificFileNames(string zipName, string fileName) + public async Task Unix_ZipWithOSSpecificFileNames(string zipName, string fileName, bool async) { string tempDir = GetTestFilePath(); - using Stream source = File.OpenRead(compat(zipName) + ".zip"); - ZipFile.ExtractToDirectory(source, tempDir); + Stream source = CreateFileStreamRead(async, compat(zipName) + ".zip"); + await CallZipFileExtractToDirectory(async, source, tempDir); string[] results = Directory.GetFiles(tempDir, "*", SearchOption.AllDirectories); Assert.Equal(1, results.Length); Assert.Equal(fileName, Path.GetFileName(results[0])); + await DisposeStream(async, source); } - /// - /// This test checks whether or not ZipFile.ExtractToDirectory() is capable of handling filenames - /// which contain invalid path characters in Windows. - /// Archive: InvalidWindowsFileNameChars.zip - /// Test/ - /// Test/normalText.txt - /// Test"<>|^A^B^C^D^E^F^G^H^I^J^K^L^M^N^O^P^Q^R^S^T^U^V^W^X^Y^Z^[^\^]^^^_/ - /// Test"<>|^A^B^C^D^E^F^G^H^I^J^K^L^M^N^O^P^Q^R^S^T^U^V^W^X^Y^Z^[^\^]^^^_/TestText1"<>|^A^B^C^D^E^F^G^H^I^J^K^L^M^N^O^P^Q^R^S^T^U^V^W^X^Y^Z^[^\^]^^^_.txt - /// TestEmpty/ - /// TestText"<>|^A^B^C^D^E^F^G^H^I^J^K^L^M^N^O^P^Q^R^S^T^U^V^W^X^Y^Z^[^\^]^^^_.txt - /// [Theory] + [MemberData(nameof(Get_Windows_ZipWithInvalidFileNames_Data))] [PlatformSpecific(TestPlatforms.Windows)] - [InlineData("InvalidWindowsFileNameChars.zip", new string[] { "TestText______________________________________.txt" , "Test______________________________________/TestText1______________________________________.txt" , "Test/normalText.txt" })] - [InlineData("NullCharFileName_FromWindows.zip", new string[] { "a_6b6d" })] - [InlineData("NullCharFileName_FromUnix.zip", new string[] { "a_6b6d" })] - [InlineData("WindowsInvalid_FromUnix.zip", new string[] { "aa_b_d" })] - [InlineData("WindowsInvalid_FromWindows.zip", new string[] { "aa_b_d" })] - public void Windows_ZipWithInvalidFileNames(string zipFileName, string[] expectedFiles) + public async Task Windows_ZipWithInvalidFileNames(string zipFileName, string[] expectedFiles, bool async) { string testDirectory = GetTestFilePath(); - using Stream source = File.OpenRead(compat(zipFileName)); - ZipFile.ExtractToDirectory(source, testDirectory); + FileStream source = CreateFileStreamRead(async, compat(zipFileName)); + await CallZipFileExtractToDirectory(async, source, testDirectory); foreach (string expectedFile in expectedFiles) { string path = Path.Combine(testDirectory, expectedFile); Assert.True(File.Exists(path)); File.Delete(path); } + await DisposeStream(async, source); } [Theory] - [InlineData("backslashes_FromUnix", "dd")] - [InlineData("backslashes_FromWindows", "dd")] + [MemberData(nameof(Get_Windows_ZipWithOSSpecificFileNames_Data))] [PlatformSpecific(TestPlatforms.Windows)] // Checks Windows-specific invalid file path - public void Windows_ZipWithOSSpecificFileNames(string zipName, string fileName) + public async Task Windows_ZipWithOSSpecificFileNames(string zipName, string fileName, bool async) { string tempDir = GetTestFilePath(); - using Stream source = File.OpenRead(compat(zipName) + ".zip"); - ZipFile.ExtractToDirectory(source, tempDir); + using Stream source = CreateFileStreamRead(async, compat(zipName) + ".zip"); + await CallZipFileExtractToDirectory(async, source, tempDir); string[] results = Directory.GetFiles(tempDir, "*", SearchOption.AllDirectories); Assert.Equal(1, results.Length); Assert.Equal(fileName, Path.GetFileName(results[0])); } - [Fact] - public void ExtractToDirectoryOverwrite() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ExtractToDirectoryOverwrite(bool async) { string folderName = zfolder("normal"); using TempDirectory tempFolder = new(GetTestFilePath()); - using Stream source = File.OpenRead(zfile("normal.zip")); - ZipFile.ExtractToDirectory(source, tempFolder.Path, overwriteFiles: false); + using FileStream source = CreateFileStreamRead(async, zfile("normal.zip")); + await CallZipFileExtractToDirectory(async, source, tempFolder.Path, overwriteFiles: false); source.Position = 0; - Assert.Throws(() => ZipFile.ExtractToDirectory(source, tempFolder.Path /* default false */)); + await Assert.ThrowsAsync(() => CallZipFileExtractToDirectory(async, source, tempFolder.Path /* default false */)); source.Position = 0; - Assert.Throws(() => ZipFile.ExtractToDirectory(source, tempFolder.Path, overwriteFiles: false)); + await Assert.ThrowsAsync(() => CallZipFileExtractToDirectory(async, source, tempFolder.Path, overwriteFiles: false)); source.Position = 0; - ZipFile.ExtractToDirectory(source, tempFolder.Path, overwriteFiles: true); + await CallZipFileExtractToDirectory(async, source, tempFolder.Path, overwriteFiles: true); DirsEqual(tempFolder.Path, folderName); + + await DisposeStream(async, source); } - [Fact] - public void ExtractToDirectoryOverwriteEncoding() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ExtractToDirectoryOverwriteEncoding(bool async) { string folderName = zfolder("normal"); using TempDirectory tempFolder = new TempDirectory(GetTestFilePath()); - using Stream source = File.OpenRead(zfile("normal.zip")); - ZipFile.ExtractToDirectory(source, tempFolder.Path, Encoding.UTF8, overwriteFiles: false); + using FileStream source = CreateFileStreamRead(async, zfile("normal.zip")); + + await CallZipFileExtractToDirectory(async, source, tempFolder.Path, Encoding.UTF8, overwriteFiles: false); source.Position = 0; - Assert.Throws(() => ZipFile.ExtractToDirectory(source, tempFolder.Path, Encoding.UTF8 /* default false */)); + await Assert.ThrowsAsync(() => CallZipFileExtractToDirectory(async, source, tempFolder.Path, Encoding.UTF8 /* default false */)); source.Position = 0; - Assert.Throws(() => ZipFile.ExtractToDirectory(source, tempFolder.Path, Encoding.UTF8, overwriteFiles: false)); + await Assert.ThrowsAsync(() => CallZipFileExtractToDirectory(async, source, tempFolder.Path, Encoding.UTF8, overwriteFiles: false)); source.Position = 0; - ZipFile.ExtractToDirectory(source, tempFolder.Path, Encoding.UTF8, overwriteFiles: true); + await CallZipFileExtractToDirectory(async, source, tempFolder.Path, Encoding.UTF8, overwriteFiles: true); DirsEqual(tempFolder.Path, folderName); } - [Fact] - public void FilesOutsideDirectory() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task FilesOutsideDirectory(bool async) { using MemoryStream source = new(); - using (ZipArchive archive = new(source, ZipArchiveMode.Create, leaveOpen: true)) + ZipArchive archive = await CreateZipArchive(async, source, ZipArchiveMode.Create, leaveOpen: true, entryNameEncoding: null); + ZipArchiveEntry entry = archive.CreateEntry(Path.Combine("..", "entry1"), CompressionLevel.Optimal); + Stream entryStream = await OpenEntryStream(async, entry); + using (StreamWriter writer = new(entryStream)) { - using (StreamWriter writer = new(archive.CreateEntry(Path.Combine("..", "entry1"), CompressionLevel.Optimal).Open())) - { - writer.Write("This is a test."); - } + writer.Write("This is a test."); } + await DisposeStream(async, entryStream); + await DisposeZipArchive(async, archive); source.Position = 0; - Assert.Throws(() => ZipFile.ExtractToDirectory(source, GetTestFilePath())); + await Assert.ThrowsAsync(() => CallZipFileExtractToDirectory(async, source, GetTestFilePath())); } - [Fact] - public void DirectoryEntryWithData() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task DirectoryEntryWithData(bool async) { using MemoryStream source = new(); - using (ZipArchive archive = new(source, ZipArchiveMode.Create, leaveOpen: true)) + ZipArchive archive = new(source, ZipArchiveMode.Create, leaveOpen: true); + ZipArchiveEntry entry = archive.CreateEntry("testdir" + Path.DirectorySeparatorChar, CompressionLevel.Optimal); + Stream entryStream = await OpenEntryStream(async, entry); + using (StreamWriter writer = new(entryStream)) { - using (StreamWriter writer = new(archive.CreateEntry("testdir" + Path.DirectorySeparatorChar, CompressionLevel.Optimal).Open())) - { - writer.Write("This is a test."); - } + writer.Write("This is a test."); } + await DisposeStream(async, entryStream); + await DisposeZipArchive(async, archive); source.Position = 0; - Assert.Throws(() => ZipFile.ExtractToDirectory(source, GetTestFilePath())); + await Assert.ThrowsAsync(() => CallZipFileExtractToDirectory(async, source, GetTestFilePath())); } - [Fact] - public void ExtractToDirectoryRoundTrip() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ExtractToDirectoryRoundTrip(bool async) { string folderName = zfolder("normal"); MemoryStream source = new(); using TempDirectory tempFolder = new(); - ZipFile.CreateFromDirectory(folderName, source); + await CallZipFileCreateFromDirectory(async, folderName, source); source.Position = 0; - ZipFile.ExtractToDirectory(source, tempFolder.Path, overwriteFiles: false); + await CallZipFileExtractToDirectory(async, source, tempFolder.Path, overwriteFiles: false); DirFileNamesEqual(tempFolder.Path, folderName); } diff --git a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Extract.cs b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Extract.cs index 950e301c93c5cd..f6c6300cf7bb42 100644 --- a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Extract.cs +++ b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Extract.cs @@ -1,47 +1,57 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Collections.Generic; using System.Text; +using System.Threading.Tasks; using Xunit; namespace System.IO.Compression.Tests { public class ZipFile_Extract : ZipFileTestBase { + public static IEnumerable Get_ExtractToDirectoryNormal_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "normal.zip", "normal", async }; + yield return new object[] { "empty.zip", "empty", async }; + yield return new object[] { "explicitdir1.zip", "explicitdir", async }; + yield return new object[] { "explicitdir2.zip", "explicitdir", async }; + yield return new object[] { "appended.zip", "small", async }; + yield return new object[] { "prepended.zip", "small", async }; + yield return new object[] { "noexplicitdir.zip", "explicitdir", async }; + } + } + [Theory] - [InlineData("normal.zip", "normal")] - [InlineData("empty.zip", "empty")] - [InlineData("explicitdir1.zip", "explicitdir")] - [InlineData("explicitdir2.zip", "explicitdir")] - [InlineData("appended.zip", "small")] - [InlineData("prepended.zip", "small")] - [InlineData("noexplicitdir.zip", "explicitdir")] - public void ExtractToDirectoryNormal(string file, string folder) + [MemberData(nameof(Get_ExtractToDirectoryNormal_Data))] + public async Task ExtractToDirectoryNormal(string file, string folder, bool async) { string zipFileName = zfile(file); string folderName = zfolder(folder); - using (var tempFolder = new TempDirectory(GetTestFilePath())) - { - ZipFile.ExtractToDirectory(zipFileName, tempFolder.Path); - DirsEqual(tempFolder.Path, folderName); - } + using TempDirectory tempFolder = new TempDirectory(GetTestFilePath()); + await CallZipFileExtractToDirectory(async, zipFileName, tempFolder.Path); + DirsEqual(tempFolder.Path, folderName); } - [Fact] - public void ExtractToDirectoryNull() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ExtractToDirectoryNull(bool async) { - AssertExtensions.Throws("sourceArchiveFileName", () => ZipFile.ExtractToDirectory(sourceArchiveFileName: null, GetTestFilePath())); + await AssertExtensions.ThrowsAsync("sourceArchiveFileName", () => CallZipFileExtractToDirectory(async, sourceArchiveFileName: null, GetTestFilePath())); } - [Fact] + [Theory] + [MemberData(nameof(Get_Booleans_Data))] [ActiveIssue("https://github.com/dotnet/runtime/issues/72951", TestPlatforms.iOS | TestPlatforms.tvOS)] - public void ExtractToDirectoryUnicode() + public async Task ExtractToDirectoryUnicode(bool async) { string zipFileName = zfile("unicode.zip"); string folderName = zfolder("unicode"); using (var tempFolder = new TempDirectory(GetTestFilePath())) { - ZipFile.ExtractToDirectory(zipFileName, tempFolder.Path); + await CallZipFileExtractToDirectory(async, zipFileName, tempFolder.Path); DirFileNamesEqual(tempFolder.Path, folderName); } } @@ -62,149 +72,143 @@ public void ExtractOutOfRoot(string entryName) Assert.Throws(() => ZipFile.ExtractToDirectory(archivePath, destination.FullName)); } + [Theory] + [InlineData("../Foo")] + [InlineData("../Barbell")] + public async Task ExtractOutOfRoot_Async(string entryName) + { + string archivePath = GetTestFilePath(); + using (FileStream stream = new FileStream(archivePath, FileMode.Create)) + await using (ZipArchive archive = await ZipArchive.CreateAsync(stream, ZipArchiveMode.Create, leaveOpen: true, entryNameEncoding: null)) + { + ZipArchiveEntry entry = archive.CreateEntry(entryName); + } + + DirectoryInfo destination = Directory.CreateDirectory(Path.Combine(GetTestFilePath(), "Bar")); + await Assert.ThrowsAsync(() => ZipFile.ExtractToDirectoryAsync(archivePath, destination.FullName)); + } + /// /// This test ensures that a zipfile with path names that are invalid to this OS will throw errors /// when an attempt is made to extract them. /// [Theory] - [InlineData("NullCharFileName_FromWindows")] - [InlineData("NullCharFileName_FromUnix")] + [MemberData(nameof(Get_Unix_ZipWithInvalidFileNames_Data))] [PlatformSpecific(TestPlatforms.AnyUnix)] // Checks Unix-specific invalid file path - public void Unix_ZipWithInvalidFileNames(string zipName) + public async Task Unix_ZipWithInvalidFileNames(string zipName, bool async) { var testDirectory = GetTestFilePath(); - ZipFile.ExtractToDirectory(compat(zipName) + ".zip", testDirectory); - + await CallZipFileExtractToDirectory(async, compat(zipName) + ".zip", testDirectory); Assert.True(File.Exists(Path.Combine(testDirectory, "a_6b6d"))); } [Theory] - [InlineData("backslashes_FromUnix", "aa\\bb\\cc\\dd")] - [InlineData("backslashes_FromWindows", "aa\\bb\\cc\\dd")] - [InlineData("WindowsInvalid_FromUnix", "aad")] - [InlineData("WindowsInvalid_FromWindows", "aad")] + [MemberData(nameof(Get_Unix_ZipWithOSSpecificFileNames_Data))] [PlatformSpecific(TestPlatforms.AnyUnix)] // Checks Unix-specific invalid file path - public void Unix_ZipWithOSSpecificFileNames(string zipName, string fileName) + public async Task Unix_ZipWithOSSpecificFileNames(string zipName, string fileName, bool async) { string tempDir = GetTestFilePath(); - ZipFile.ExtractToDirectory(compat(zipName) + ".zip", tempDir); + await CallZipFileExtractToDirectory(async, compat(zipName) + ".zip", tempDir); string[] results = Directory.GetFiles(tempDir, "*", SearchOption.AllDirectories); Assert.Equal(1, results.Length); Assert.Equal(fileName, Path.GetFileName(results[0])); } - - /// - /// This test checks whether or not ZipFile.ExtractToDirectory() is capable of handling filenames - /// which contain invalid path characters in Windows. - /// Archive: InvalidWindowsFileNameChars.zip - /// Test/ - /// Test/normalText.txt - /// Test"<>|^A^B^C^D^E^F^G^H^I^J^K^L^M^N^O^P^Q^R^S^T^U^V^W^X^Y^Z^[^\^]^^^_/ - /// Test"<>|^A^B^C^D^E^F^G^H^I^J^K^L^M^N^O^P^Q^R^S^T^U^V^W^X^Y^Z^[^\^]^^^_/TestText1"<>|^A^B^C^D^E^F^G^H^I^J^K^L^M^N^O^P^Q^R^S^T^U^V^W^X^Y^Z^[^\^]^^^_.txt - /// TestEmpty/ - /// TestText"<>|^A^B^C^D^E^F^G^H^I^J^K^L^M^N^O^P^Q^R^S^T^U^V^W^X^Y^Z^[^\^]^^^_.txt - /// - [Fact] + [Theory] + [MemberData(nameof(Get_Windows_ZipWithInvalidFileNames_Data))] [PlatformSpecific(TestPlatforms.Windows)] - public void Windows_ZipWithInvalidFileNames() + public async Task Windows_ZipWithInvalidFileNames(string zipFileName, string[] expectedFiles, bool async) { - - var testDirectory = GetTestFilePath(); - ZipFile.ExtractToDirectory(compat("InvalidWindowsFileNameChars.zip"), testDirectory); - CheckExists(testDirectory, "TestText______________________________________.txt"); - CheckExists(testDirectory, "Test______________________________________/TestText1______________________________________.txt"); - CheckExists(testDirectory, "Test/normalText.txt"); + string testDirectory = GetTestFilePath(); - ZipFile.ExtractToDirectory(compat("NullCharFileName_FromWindows.zip"), testDirectory); - CheckExists(testDirectory, "a_6b6d"); - - ZipFile.ExtractToDirectory(compat("NullCharFileName_FromUnix.zip"), testDirectory); - CheckExists(testDirectory, "a_6b6d"); - - ZipFile.ExtractToDirectory(compat("WindowsInvalid_FromUnix.zip"), testDirectory); - CheckExists(testDirectory, "aa_b_d"); - - ZipFile.ExtractToDirectory(compat("WindowsInvalid_FromWindows.zip"), testDirectory); - CheckExists(testDirectory, "aa_b_d"); - - void CheckExists(string testDirectory, string file) + await CallZipFileExtractToDirectory(async, compat(zipFileName), testDirectory); + foreach (string expectedFile in expectedFiles) { - string path = Path.Combine(testDirectory, file); + string path = Path.Combine(testDirectory, expectedFile); Assert.True(File.Exists(path)); File.Delete(path); } } [Theory] - [InlineData("backslashes_FromUnix", "dd")] - [InlineData("backslashes_FromWindows", "dd")] + [MemberData(nameof(Get_Windows_ZipWithOSSpecificFileNames_Data))] [PlatformSpecific(TestPlatforms.Windows)] // Checks Windows-specific invalid file path - public void Windows_ZipWithOSSpecificFileNames(string zipName, string fileName) + public async Task Windows_ZipWithOSSpecificFileNames(string zipName, string fileName, bool async) { string tempDir = GetTestFilePath(); - ZipFile.ExtractToDirectory(compat(zipName) + ".zip", tempDir); + await CallZipFileExtractToDirectory(async, compat(zipName) + ".zip", tempDir); string[] results = Directory.GetFiles(tempDir, "*", SearchOption.AllDirectories); Assert.Equal(1, results.Length); Assert.Equal(fileName, Path.GetFileName(results[0])); } - [Fact] - public void ExtractToDirectoryOverwrite() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ExtractToDirectoryOverwrite(bool async) { string zipFileName = zfile("normal.zip"); string folderName = zfolder("normal"); - using (var tempFolder = new TempDirectory(GetTestFilePath())) - { - ZipFile.ExtractToDirectory(zipFileName, tempFolder.Path, overwriteFiles: false); - Assert.Throws(() => ZipFile.ExtractToDirectory(zipFileName, tempFolder.Path /* default false */)); - Assert.Throws(() => ZipFile.ExtractToDirectory(zipFileName, tempFolder.Path, overwriteFiles: false)); - ZipFile.ExtractToDirectory(zipFileName, tempFolder.Path, overwriteFiles: true); + using TempDirectory tempFolder = new TempDirectory(GetTestFilePath()); - DirsEqual(tempFolder.Path, folderName); - } + await CallZipFileExtractToDirectory(async, zipFileName, tempFolder.Path, overwriteFiles: false); + await Assert.ThrowsAsync(() => CallZipFileExtractToDirectory(async, zipFileName, tempFolder.Path /* default false */)); + await Assert.ThrowsAsync(() => CallZipFileExtractToDirectory(async, zipFileName, tempFolder.Path, overwriteFiles: false)); + await CallZipFileExtractToDirectory(async, zipFileName, tempFolder.Path, overwriteFiles: true); + + DirsEqual(tempFolder.Path, folderName); } - [Fact] - public void ExtractToDirectoryOverwriteEncoding() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ExtractToDirectoryOverwriteEncoding(bool async) { string zipFileName = zfile("normal.zip"); string folderName = zfolder("normal"); - using (var tempFolder = new TempDirectory(GetTestFilePath())) - { - ZipFile.ExtractToDirectory(zipFileName, tempFolder.Path, Encoding.UTF8, overwriteFiles: false); - Assert.Throws(() => ZipFile.ExtractToDirectory(zipFileName, tempFolder.Path, Encoding.UTF8 /* default false */)); - Assert.Throws(() => ZipFile.ExtractToDirectory(zipFileName, tempFolder.Path, Encoding.UTF8, overwriteFiles: false)); - ZipFile.ExtractToDirectory(zipFileName, tempFolder.Path, Encoding.UTF8, overwriteFiles: true); + using TempDirectory tempFolder = new(GetTestFilePath()); - DirsEqual(tempFolder.Path, folderName); - } + await CallZipFileExtractToDirectory(async, zipFileName, tempFolder.Path, Encoding.UTF8, overwriteFiles: false); + await Assert.ThrowsAsync(() => CallZipFileExtractToDirectory(async, zipFileName, tempFolder.Path, Encoding.UTF8 /* default false */)); + await Assert.ThrowsAsync(() => CallZipFileExtractToDirectory(async, zipFileName, tempFolder.Path, Encoding.UTF8, overwriteFiles: false)); + await CallZipFileExtractToDirectory(async, zipFileName, tempFolder.Path, Encoding.UTF8, overwriteFiles: true); + + DirsEqual(tempFolder.Path, folderName); } - [Fact] - public void FilesOutsideDirectory() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task FilesOutsideDirectory(bool async) { string archivePath = GetTestFilePath(); - using (ZipArchive archive = ZipFile.Open(archivePath, ZipArchiveMode.Create)) - using (StreamWriter writer = new StreamWriter(archive.CreateEntry(Path.Combine("..", "entry1"), CompressionLevel.Optimal).Open())) + ZipArchive archive = await CallZipFileOpen(async, archivePath, ZipArchiveMode.Create); + + ZipArchiveEntry entry = archive.CreateEntry(Path.Combine("..", "entry1"), CompressionLevel.Optimal); + Stream entryStream = await OpenEntryStream(async, entry); + using (StreamWriter writer = new StreamWriter(entryStream)) { writer.Write("This is a test."); } - Assert.Throws(() => ZipFile.ExtractToDirectory(archivePath, GetTestFilePath())); + await DisposeStream(async, entryStream); + await DisposeZipArchive(async, archive); + await Assert.ThrowsAsync(() => CallZipFileExtractToDirectory(async, archivePath, GetTestFilePath())); } - [Fact] - public void DirectoryEntryWithData() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task DirectoryEntryWithData(bool async) { string archivePath = GetTestFilePath(); - using (ZipArchive archive = ZipFile.Open(archivePath, ZipArchiveMode.Create)) - using (StreamWriter writer = new StreamWriter(archive.CreateEntry("testdir" + Path.DirectorySeparatorChar, CompressionLevel.Optimal).Open())) + ZipArchive archive = await CallZipFileOpen(async, archivePath, ZipArchiveMode.Create); + ZipArchiveEntry entry = archive.CreateEntry("testdir" + Path.DirectorySeparatorChar, CompressionLevel.Optimal); + Stream entryStream = await OpenEntryStream(async, entry); + using (StreamWriter writer = new StreamWriter(entryStream)) { writer.Write("This is a test."); } - Assert.Throws(() => ZipFile.ExtractToDirectory(archivePath, GetTestFilePath())); + await DisposeStream(async, entryStream); + await DisposeZipArchive(async, archive); + await Assert.ThrowsAsync(() => CallZipFileExtractToDirectory(async, archivePath, GetTestFilePath())); } } } diff --git a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Open.cs b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Open.cs index 6738a8351c477c..b7ea50579cb339 100644 --- a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Open.cs +++ b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Open.cs @@ -1,6 +1,8 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Collections; +using System.Collections.Generic; using System.Threading.Tasks; using Xunit; @@ -8,11 +10,12 @@ namespace System.IO.Compression.Tests; public class ZipFile_Open : ZipFileTestBase { - [Fact] - public void InvalidConstructors() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public Task InvalidConstructors(bool async) { //out of range enum values - Assert.Throws(() => ZipFile.Open("bad file", (ZipArchiveMode)(10))); + return Assert.ThrowsAsync(() => CallZipFileOpen(async, "bad file", (ZipArchiveMode)(10))); } [Fact] @@ -93,81 +96,171 @@ public void InvalidFiles() } [Fact] - public void InvalidInstanceMethods() + public async Task InvalidFilesAsync() { - using (TempFile testArchive = CreateTempCopyFile(zfile("normal.zip"), GetTestFilePath())) - using (ZipArchive archive = ZipFile.Open(testArchive.Path, ZipArchiveMode.Update)) + await Assert.ThrowsAsync(() => ZipFile.OpenReadAsync(bad("EOCDmissing.zip"), default)); + using (TempFile testArchive = CreateTempCopyFile(bad("EOCDmissing.zip"), GetTestFilePath())) + { + await Assert.ThrowsAsync(() => ZipFile.OpenAsync(testArchive.Path, ZipArchiveMode.Update, default)); + } + + await Assert.ThrowsAsync(() => ZipFile.OpenReadAsync(bad("CDoffsetOutOfBounds.zip"), default)); + using (TempFile testArchive = CreateTempCopyFile(bad("CDoffsetOutOfBounds.zip"), GetTestFilePath())) + { + await Assert.ThrowsAsync(() => ZipFile.OpenAsync(testArchive.Path, ZipArchiveMode.Update, default)); + } + + await using (ZipArchive archive = await ZipFile.OpenReadAsync(bad("CDoffsetInBoundsWrong.zip"), default)) + { + Assert.Throws(() => { var x = archive.Entries; }); + } + + using (TempFile testArchive = CreateTempCopyFile(bad("CDoffsetInBoundsWrong.zip"), GetTestFilePath())) + { + await Assert.ThrowsAsync(() => ZipFile.OpenAsync(testArchive.Path, ZipArchiveMode.Update, default)); + } + + await using (ZipArchive archive = await ZipFile.OpenReadAsync(bad("numberOfEntriesDifferent.zip"), default)) + { + Assert.Throws(() => { var x = archive.Entries; }); + } + using (TempFile testArchive = CreateTempCopyFile(bad("numberOfEntriesDifferent.zip"), GetTestFilePath())) { - //non-existent entry - Assert.True(null == archive.GetEntry("nonExistentEntry")); - //null/empty string - Assert.Throws(() => archive.GetEntry(null)); + await Assert.ThrowsAsync(() => ZipFile.OpenAsync(testArchive.Path, ZipArchiveMode.Update, default)); + } + + //read mode on empty file + await using (var memoryStream = new MemoryStream()) + { + await Assert.ThrowsAsync(() => ZipArchive.CreateAsync(memoryStream, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null)); + } + + //offset out of bounds + await using (ZipArchive archive = await ZipFile.OpenReadAsync(bad("localFileOffsetOutOfBounds.zip"), default)) + { + ZipArchiveEntry e = archive.Entries[0]; + await Assert.ThrowsAsync(() => e.OpenAsync(default)); + } + + using (TempFile testArchive = CreateTempCopyFile(bad("localFileOffsetOutOfBounds.zip"), GetTestFilePath())) + { + await Assert.ThrowsAsync(() => ZipFile.OpenAsync(testArchive.Path, ZipArchiveMode.Update, default)); + } + + //compressed data offset + compressed size out of bounds + await using (ZipArchive archive = await ZipFile.OpenReadAsync(bad("compressedSizeOutOfBounds.zip"), default)) + { + ZipArchiveEntry e = archive.Entries[0]; + await Assert.ThrowsAsync(() => e.OpenAsync(default)); + } + + using (TempFile testArchive = CreateTempCopyFile(bad("compressedSizeOutOfBounds.zip"), GetTestFilePath())) + { + await Assert.ThrowsAsync(() => ZipFile.OpenAsync(testArchive.Path, ZipArchiveMode.Update, default)); + } + + //signature wrong + await using (ZipArchive archive = await ZipFile.OpenReadAsync(bad("localFileHeaderSignatureWrong.zip"), default)) + { + ZipArchiveEntry e = archive.Entries[0]; + await Assert.ThrowsAsync(() => e.OpenAsync(default)); + } + + using (TempFile testArchive = CreateTempCopyFile(bad("localFileHeaderSignatureWrong.zip"), GetTestFilePath())) + { + await Assert.ThrowsAsync(() => ZipFile.OpenAsync(testArchive.Path, ZipArchiveMode.Update, default)); + } + } + + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task InvalidInstanceMethods(bool async) + { + using TempFile testArchive = CreateTempCopyFile(zfile("normal.zip"), GetTestFilePath()); - ZipArchiveEntry entry = archive.GetEntry("first.txt"); + ZipArchive archive = await CallZipFileOpen(async, testArchive.Path, ZipArchiveMode.Update); - //null/empty string - AssertExtensions.Throws("entryName", () => archive.CreateEntry("")); - Assert.Throws(() => archive.CreateEntry(null)); + //non-existent entry + Assert.True(null == archive.GetEntry("nonExistentEntry")); + //null/empty string + Assert.Throws(() => archive.GetEntry(null)); + + ZipArchiveEntry entry = archive.GetEntry("first.txt"); + + //null/empty string + AssertExtensions.Throws("entryName", () => archive.CreateEntry("")); + Assert.Throws(() => archive.CreateEntry(null)); + + await DisposeZipArchive(async, archive); + } + + public static IEnumerable Get_UnsupportedCompressionRoutine_Data() + { + foreach (bool b in _bools) + { + yield return new object[] { "LZMA.zip", true, b}; + yield return new object[] { "invalidDeflate.zip", false, b}; } } [Theory] - [InlineData("LZMA.zip", true)] - [InlineData("invalidDeflate.zip", false)] - public void UnsupportedCompressionRoutine(string zipName, bool throwsOnOpen) + [MemberData(nameof(Get_UnsupportedCompressionRoutine_Data))] + public async Task UnsupportedCompressionRoutine(string zipName, bool throwsOnOpen, bool async) { string filename = bad(zipName); - using (ZipArchive archive = ZipFile.OpenRead(filename)) + ZipArchive archive = await CallZipFileOpenRead(async, filename); + + ZipArchiveEntry e = archive.Entries[0]; + if (throwsOnOpen) { - ZipArchiveEntry e = archive.Entries[0]; - if (throwsOnOpen) - { - Assert.Throws(() => e.Open()); - } - else - { - using (Stream s = e.Open()) - { - Assert.Throws(() => s.ReadByte()); - } - } + await Assert.ThrowsAsync(() => OpenEntryStream(async, e)); + } + else + { + Stream s = await OpenEntryStream(async, e); + Assert.Throws(() => s.ReadByte()); + await DisposeStream(async, s); } + await DisposeZipArchive(async, archive); + using (TempFile updatedCopy = CreateTempCopyFile(filename, GetTestFilePath())) { string name; long length, compressedLength; DateTimeOffset lastWriteTime; - using (ZipArchive archive = ZipFile.Open(updatedCopy.Path, ZipArchiveMode.Update)) - { - ZipArchiveEntry e = archive.Entries[0]; - name = e.FullName; - lastWriteTime = e.LastWriteTime; - length = e.Length; - compressedLength = e.CompressedLength; - Assert.Throws(() => e.Open()); - } + archive = await CallZipFileOpen(async, updatedCopy.Path, ZipArchiveMode.Update); + + e = archive.Entries[0]; + name = e.FullName; + lastWriteTime = e.LastWriteTime; + length = e.Length; + compressedLength = e.CompressedLength; + await Assert.ThrowsAsync(() => OpenEntryStream(async, e)); + + await DisposeZipArchive(async, archive); //make sure that update mode preserves that unreadable file - using (ZipArchive archive = ZipFile.Open(updatedCopy.Path, ZipArchiveMode.Update)) - { - ZipArchiveEntry e = archive.Entries[0]; - Assert.Equal(name, e.FullName); - Assert.Equal(lastWriteTime, e.LastWriteTime); - Assert.Equal(length, e.Length); - Assert.Equal(compressedLength, e.CompressedLength); - Assert.Throws(() => e.Open()); - } + archive = await CallZipFileOpen(async, updatedCopy.Path, ZipArchiveMode.Update); + + e = archive.Entries[0]; + Assert.Equal(name, e.FullName); + Assert.Equal(lastWriteTime, e.LastWriteTime); + Assert.Equal(length, e.Length); + Assert.Equal(compressedLength, e.CompressedLength); + await Assert.ThrowsAsync(() => OpenEntryStream(async, e)); + + await DisposeZipArchive(async, archive); } } - [Fact] - public void InvalidDates() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task InvalidDates(bool async) { - using (ZipArchive archive = ZipFile.OpenRead(bad("invaliddate.zip"))) - { - Assert.Equal(new DateTime(1980, 1, 1, 0, 0, 0), archive.Entries[0].LastWriteTime.DateTime); - } + ZipArchive archive = await CallZipFileOpenRead(async, bad("invaliddate.zip")); + Assert.Equal(new DateTime(1980, 1, 1, 0, 0, 0), archive.Entries[0].LastWriteTime.DateTime); + await DisposeZipArchive(async, archive); // Browser VFS does not support saving file attributes, so skip if (!PlatformDetection.IsBrowser) @@ -177,94 +270,97 @@ public void InvalidDates() fileWithBadDate.LastWriteTimeUtc = new DateTime(1970, 1, 1, 1, 1, 1); string archivePath = GetTestFilePath(); using (FileStream output = File.Open(archivePath, FileMode.Create)) - using (ZipArchive archive = new ZipArchive(output, ZipArchiveMode.Create)) { + archive = await CreateZipArchive(async, output, ZipArchiveMode.Create, leaveOpen: false, entryNameEncoding: null); archive.CreateEntryFromFile(fileWithBadDate.FullName, "SomeEntryName"); + await DisposeZipArchive(async, archive); } - using (ZipArchive archive = ZipFile.OpenRead(archivePath)) - { - Assert.Equal(new DateTime(1980, 1, 1, 0, 0, 0), archive.Entries[0].LastWriteTime.DateTime); - } + + archive = await CallZipFileOpenRead(async, archivePath); + Assert.Equal(new DateTime(1980, 1, 1, 0, 0, 0), archive.Entries[0].LastWriteTime.DateTime); + await DisposeZipArchive(async, archive); } } - [Fact] - public void ReadStreamOps() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ReadStreamOps(bool async) { - using (ZipArchive archive = ZipFile.OpenRead(zfile("normal.zip"))) + using (ZipArchive archive = await CallZipFileOpenRead(async, zfile("normal.zip"))) { foreach (ZipArchiveEntry e in archive.Entries) { - using (Stream s = e.Open()) - { - Assert.True(s.CanRead, "Can read to read archive"); - Assert.False(s.CanWrite, "Can't write to read archive"); - Assert.False(s.CanSeek, "Can't seek on archive"); - Assert.Equal(LengthOfUnseekableStream(s), e.Length); - } + Stream s = await OpenEntryStream(async, e); + Assert.True(s.CanRead, "Can read to read archive"); + Assert.False(s.CanWrite, "Can't write to read archive"); + Assert.False(s.CanSeek, "Can't seek on archive"); + Assert.Equal(await LengthOfUnseekableStream(s), e.Length); + await DisposeStream(async, s); } } } - [Fact] - public void UpdateReadTwice() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task UpdateReadTwice(bool async) { - using (TempFile testArchive = CreateTempCopyFile(zfile("small.zip"), GetTestFilePath())) - using (ZipArchive archive = ZipFile.Open(testArchive.Path, ZipArchiveMode.Update)) + using TempFile testArchive = CreateTempCopyFile(zfile("small.zip"), GetTestFilePath()); + + ZipArchive archive = await CallZipFileOpen(async, testArchive.Path, ZipArchiveMode.Update); + + ZipArchiveEntry entry = archive.Entries[0]; + string contents1, contents2; + using (StreamReader s = new StreamReader(await OpenEntryStream(async, entry))) { - ZipArchiveEntry entry = archive.Entries[0]; - string contents1, contents2; - using (StreamReader s = new StreamReader(entry.Open())) - { - contents1 = s.ReadToEnd(); - } - using (StreamReader s = new StreamReader(entry.Open())) - { - contents2 = s.ReadToEnd(); - } - Assert.Equal(contents1, contents2); + contents1 = await s.ReadToEndAsync(); } + using (StreamReader s = new StreamReader(await OpenEntryStream(async, entry))) + { + contents2 = await s.ReadToEndAsync(); + } + Assert.Equal(contents1, contents2); + + await DisposeZipArchive(async, archive); } - [Fact] - public async Task UpdateAddFile() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task UpdateAddFile(bool async) { //add file using (TempFile testArchive = CreateTempCopyFile(zfile("normal.zip"), GetTestFilePath())) { - using (ZipArchive archive = ZipFile.Open(testArchive.Path, ZipArchiveMode.Update)) - { - await UpdateArchive(archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); - } - await IsZipSameAsDirAsync(testArchive.Path, zmodified("addFile"), ZipArchiveMode.Read); + ZipArchive archive = await CallZipFileOpen(async, testArchive.Path, ZipArchiveMode.Update); + await UpdateArchive(async, archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); + await DisposeZipArchive(async, archive); + + await IsZipSameAsDir(testArchive.Path, zmodified("addFile"), ZipArchiveMode.Read, async); } //add file and read entries before using (TempFile testArchive = CreateTempCopyFile(zfile("normal.zip"), GetTestFilePath())) { - using (ZipArchive archive = ZipFile.Open(testArchive.Path, ZipArchiveMode.Update)) - { - var x = archive.Entries; + ZipArchive archive = await CallZipFileOpen(async, testArchive.Path, ZipArchiveMode.Update); + var x = archive.Entries; + await UpdateArchive(async, archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); + await DisposeZipArchive(async, archive); - await UpdateArchive(archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); - } - await IsZipSameAsDirAsync(testArchive.Path, zmodified("addFile"), ZipArchiveMode.Read); + await IsZipSameAsDir(testArchive.Path, zmodified("addFile"), ZipArchiveMode.Read, async); } //add file and read entries after using (TempFile testArchive = CreateTempCopyFile(zfile("normal.zip"), GetTestFilePath())) { - using (ZipArchive archive = ZipFile.Open(testArchive.Path, ZipArchiveMode.Update)) - { - await UpdateArchive(archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); + ZipArchive archive = await CallZipFileOpen(async, testArchive.Path, ZipArchiveMode.Update); + await UpdateArchive(async, archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); + var x = archive.Entries; + await DisposeZipArchive(async, archive); - var x = archive.Entries; - } - await IsZipSameAsDirAsync(testArchive.Path, zmodified("addFile"), ZipArchiveMode.Read); + await IsZipSameAsDir(testArchive.Path, zmodified("addFile"), ZipArchiveMode.Read, async); } } - private static async Task UpdateArchive(ZipArchive archive, string installFile, string entryName) + private static async Task UpdateArchive(bool async, ZipArchive archive, string installFile, string entryName) { string fileName = installFile; ZipArchiveEntry e = archive.CreateEntry(entryName); @@ -274,11 +370,17 @@ private static async Task UpdateArchive(ZipArchive archive, string installFile, using (var stream = await StreamHelpers.CreateTempCopyStream(fileName)) { - using (Stream es = e.Open()) + Stream es = await OpenEntryStream(async, e); + es.SetLength(0); + if (async) + { + await stream.CopyToAsync(es); + } + else { - es.SetLength(0); stream.CopyTo(es); } + await DisposeStream(async, es); } } } diff --git a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Unix.cs b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Unix.cs index b2e59d9270d97d..e60fd22e336abd 100644 --- a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Unix.cs +++ b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFile.Unix.cs @@ -1,6 +1,8 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Collections; +using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; using System.Text; @@ -158,21 +160,29 @@ private static void EnsureFilePermissions(string filename, string permissions) Assert.Equal(Convert.ToInt32(permissions, 8), status.Mode & 0xFFF); } + public static IEnumerable Get_UnixExtractFilePermissionsCompat_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "sharpziplib.zip", null, async }; // ExternalAttributes are not set in this .zip, use the system default + yield return new object[] { "Linux_RW_RW_R__.zip", "664", async }; + yield return new object[] { "Linux_RWXRW_R__.zip", "764", async }; + yield return new object[] { "OSX_RWXRW_R__.zip", "764", async }; + } + } + [Theory] - [InlineData("sharpziplib.zip", null)] // ExternalAttributes are not set in this .zip, use the system default - [InlineData("Linux_RW_RW_R__.zip", "664")] - [InlineData("Linux_RWXRW_R__.zip", "764")] - [InlineData("OSX_RWXRW_R__.zip", "764")] - public void UnixExtractFilePermissionsCompat(string zipName, string expectedPermissions) + [MemberData(nameof(Get_UnixExtractFilePermissionsCompat_Data))] + public async Task UnixExtractFilePermissionsCompat(string zipName, string expectedPermissions, bool async) { expectedPermissions = GetExpectedPermissions(expectedPermissions); string zipFileName = compat(zipName); using (var tempFolder = new TempDirectory(GetTestFilePath())) { - ZipFile.ExtractToDirectory(zipFileName, tempFolder.Path); + await CallZipFileExtractToDirectory(async, zipFileName, tempFolder.Path); - using ZipArchive archive = ZipFile.Open(zipFileName, ZipArchiveMode.Read); + ZipArchive archive = await CallZipFileOpen(async, zipFileName, ZipArchiveMode.Read); foreach (ZipArchiveEntry entry in archive.Entries) { string filename = Path.Combine(tempFolder.Path, entry.FullName); @@ -180,13 +190,15 @@ public void UnixExtractFilePermissionsCompat(string zipName, string expectedPerm EnsureFilePermissions(filename, expectedPermissions); } + await DisposeZipArchive(async, archive); } } - [Fact] + [Theory] + [MemberData(nameof(Get_Booleans_Data))] [PlatformSpecific(TestPlatforms.AnyUnix & ~TestPlatforms.Browser & ~TestPlatforms.tvOS & ~TestPlatforms.iOS)] // browser doesn't have libc mkfifo. tvOS/iOS return an error for mkfifo. [SkipOnPlatform(TestPlatforms.LinuxBionic, "Bionic is not normal Linux, has no normal file permissions")] - public void ZipNamedPipeIsNotSupported() + public async Task ZipNamedPipeIsNotSupported(bool async) { string destPath = Path.Combine(TestDirectory, "dest.zip"); @@ -195,7 +207,7 @@ public void ZipNamedPipeIsNotSupported() Directory.CreateDirectory(subFolderPath); // mandatory before calling mkfifo Assert.Equal(0, mkfifo(fifoPath, 438 /* 666 in octal */)); - Assert.Throws(() => ZipFile.CreateFromDirectory(subFolderPath, destPath)); + await Assert.ThrowsAsync(() => CallZipFileCreateFromDirectory(async, subFolderPath, destPath)); } private static string GetExpectedPermissions(string expectedPermissions) diff --git a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFileExtensions.ZipArchive.Create.cs b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFileExtensions.ZipArchive.Create.cs index dbd8b8defd86fd..2c5bd8d04737fb 100644 --- a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFileExtensions.ZipArchive.Create.cs +++ b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFileExtensions.ZipArchive.Create.cs @@ -1,7 +1,7 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. -using System; +using System.Collections.Generic; using System.Threading.Tasks; using Xunit; @@ -9,29 +9,41 @@ namespace System.IO.Compression.Tests { public class ZipFile_ZipArchive_Create : ZipFileTestBase { + public static IEnumerable Get_CreateEntryFromFileExtension_Data() + { + foreach (bool withCompressionLevel in _bools) + { + foreach (bool async in _bools) + { + yield return new object[] { withCompressionLevel, async }; + } + } + } + [Theory] - [InlineData(true)] - [InlineData(false)] - public async Task CreateEntryFromFileExtension(bool withCompressionLevel) + [MemberData(nameof(Get_CreateEntryFromFileExtension_Data))] + public async Task CreateEntryFromFileExtension(bool withCompressionLevel, bool async) { //add file using (TempFile testArchive = CreateTempCopyFile(zfile("normal.zip"), GetTestFilePath())) { - using (ZipArchive archive = ZipFile.Open(testArchive.Path, ZipArchiveMode.Update)) - { + ZipArchive archive = await CallZipFileOpen(async, testArchive.Path, ZipArchiveMode.Update); + string entryName = "added.txt"; string sourceFilePath = zmodified(Path.Combine("addFile", entryName)); - Assert.Throws(() => ((ZipArchive)null).CreateEntryFromFile(sourceFilePath, entryName)); - Assert.Throws(() => archive.CreateEntryFromFile(null, entryName)); - Assert.Throws(() => archive.CreateEntryFromFile(sourceFilePath, null)); + await Assert.ThrowsAsync(() => CallZipFileExtensionsCreateEntryFromFile(async, (ZipArchive)null, sourceFilePath, entryName)); + await Assert.ThrowsAsync(() => CallZipFileExtensionsCreateEntryFromFile(async, archive, null, entryName)); + await Assert.ThrowsAsync(() => CallZipFileExtensionsCreateEntryFromFile(async, archive, sourceFilePath, null)); ZipArchiveEntry e = withCompressionLevel ? - archive.CreateEntryFromFile(sourceFilePath, entryName) : - archive.CreateEntryFromFile(sourceFilePath, entryName, CompressionLevel.Fastest); + await CallZipFileExtensionsCreateEntryFromFile(async, archive, sourceFilePath, entryName) : + await CallZipFileExtensionsCreateEntryFromFile(async, archive, sourceFilePath, entryName, CompressionLevel.Fastest); Assert.NotNull(e); - } - await IsZipSameAsDirAsync(testArchive.Path, zmodified("addFile"), ZipArchiveMode.Read, requireExplicit: false, checkTimes: false); + + await DisposeZipArchive(async, archive); + + await IsZipSameAsDir(testArchive.Path, zmodified("addFile"), ZipArchiveMode.Read, requireExplicit: false, checkTimes: false, async); } } } diff --git a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFileExtensions.ZipArchive.Extract.cs b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFileExtensions.ZipArchive.Extract.cs index 167079b14edd19..b67071f1c15061 100644 --- a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFileExtensions.ZipArchive.Extract.cs +++ b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFileExtensions.ZipArchive.Extract.cs @@ -1,36 +1,42 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Threading.Tasks; using Xunit; namespace System.IO.Compression.Tests { public class ZipFile_ZipArchive_Extract : ZipFileTestBase { - [Fact] - public void ExtractToDirectoryExtension() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ExtractToDirectoryExtension(bool async) { - using (ZipArchive archive = ZipFile.Open(zfile("normal.zip"), ZipArchiveMode.Read)) - { - string tempFolder = GetTestFilePath(); - Assert.Throws(() => ((ZipArchive)null).ExtractToDirectory(tempFolder)); - Assert.Throws(() => archive.ExtractToDirectory(null)); - archive.ExtractToDirectory(tempFolder); - - DirsEqual(tempFolder, zfolder("normal")); - } + ZipArchive archive = await CallZipFileOpen(async, zfile("normal.zip"), ZipArchiveMode.Read); + + string tempFolder = GetTestFilePath(); + await Assert.ThrowsAsync(() => CallZipFileExtensionsExtractToDirectory(async, (ZipArchive)null, tempFolder)); + await Assert.ThrowsAsync(() => CallZipFileExtensionsExtractToDirectory(async, archive, null)); + await CallZipFileExtensionsExtractToDirectory(async, archive, tempFolder); + + DirsEqual(tempFolder, zfolder("normal")); + + await DisposeZipArchive(async, archive); } - [Fact] [ActiveIssue("https://github.com/dotnet/runtime/issues/72951", TestPlatforms.iOS | TestPlatforms.tvOS)] - public void ExtractToDirectoryExtension_Unicode() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ExtractToDirectoryExtension_Unicode(bool async) { - using (ZipArchive archive = ZipFile.OpenRead(zfile("unicode.zip"))) - { - string tempFolder = GetTestFilePath(); - archive.ExtractToDirectory(tempFolder); - DirFileNamesEqual(tempFolder, zfolder("unicode")); - } + ZipArchive archive = await CallZipFileOpenRead(async, zfile("unicode.zip")); + + string tempFolder = GetTestFilePath(); + await CallZipFileExtensionsExtractToDirectory(async, archive, tempFolder); + DirFileNamesEqual(tempFolder, zfolder("unicode")); + + await DisposeZipArchive(async, archive); + } } diff --git a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFileExtensions.ZipArchiveEntry.Extract.cs b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFileExtensions.ZipArchiveEntry.Extract.cs index cd246ecce00cec..e85cf7a86eab56 100644 --- a/src/libraries/System.IO.Compression.ZipFile/tests/ZipFileExtensions.ZipArchiveEntry.Extract.cs +++ b/src/libraries/System.IO.Compression.ZipFile/tests/ZipFileExtensions.ZipArchiveEntry.Extract.cs @@ -1,45 +1,51 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Threading.Tasks; using Xunit; namespace System.IO.Compression.Tests { public class ZipFile_ZipArchiveEntry_Extract : ZipFileTestBase { - [Fact] - public void ExtractToFileExtension() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ExtractToFileExtension(bool async) { - using (ZipArchive archive = ZipFile.Open(zfile("normal.zip"), ZipArchiveMode.Read)) - { - string file = GetTestFilePath(); - ZipArchiveEntry e = archive.GetEntry("first.txt"); + ZipArchive archive = await CallZipFileOpen(async, zfile("normal.zip"), ZipArchiveMode.Read); + + string file = GetTestFilePath(); + ZipArchiveEntry e = archive.GetEntry("first.txt"); - Assert.Throws(() => ((ZipArchiveEntry)null).ExtractToFile(file)); - Assert.Throws(() => e.ExtractToFile(null)); + await Assert.ThrowsAsync(() => CallExtractToFile(async, (ZipArchiveEntry)null, file)); + await Assert.ThrowsAsync(() => CallExtractToFile(async, e, null)); - //extract when there is nothing there - e.ExtractToFile(file); + //extract when there is nothing there + await CallExtractToFile(async, e, file); - using (Stream fs = File.Open(file, FileMode.Open), es = e.Open()) - { - StreamsEqual(fs, es); - } + using (Stream fs = File.Open(file, FileMode.Open)) + { + Stream es = await OpenEntryStream(async, e); + StreamsEqual(fs, es); + await DisposeStream(async, es); + } - Assert.Throws(() => e.ExtractToFile(file, false)); + await Assert.ThrowsAsync(() => CallExtractToFile(async, e, file, false)); - //truncate file - using (Stream fs = File.Open(file, FileMode.Truncate)) - { } + //truncate file + using (Stream fs = File.Open(file, FileMode.Truncate)) { } - //now use overwrite mode - e.ExtractToFile(file, true); + //now use overwrite mode + await CallExtractToFile(async, e, file, true); - using (Stream fs = File.Open(file, FileMode.Open), es = e.Open()) - { - StreamsEqual(fs, es); - } + using (Stream fs = File.Open(file, FileMode.Open)) + { + Stream es = await OpenEntryStream(async, e); + StreamsEqual(fs, es); + await DisposeStream(async, es); } + + await DisposeZipArchive(async, archive); } } } diff --git a/src/libraries/System.IO.Compression/ref/System.IO.Compression.cs b/src/libraries/System.IO.Compression/ref/System.IO.Compression.cs index b0986385eabf16..e0e0bd0eda52f2 100644 --- a/src/libraries/System.IO.Compression/ref/System.IO.Compression.cs +++ b/src/libraries/System.IO.Compression/ref/System.IO.Compression.cs @@ -90,7 +90,7 @@ public override void Write(System.ReadOnlySpan buffer) { } public override System.Threading.Tasks.ValueTask WriteAsync(System.ReadOnlyMemory buffer, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public override void WriteByte(byte value) { } } - public partial class ZipArchive : System.IDisposable + public partial class ZipArchive : System.IAsyncDisposable, System.IDisposable { public ZipArchive(System.IO.Stream stream) { } public ZipArchive(System.IO.Stream stream, System.IO.Compression.ZipArchiveMode mode) { } @@ -100,10 +100,13 @@ public ZipArchive(System.IO.Stream stream, System.IO.Compression.ZipArchiveMode public string Comment { get { throw null; } set { } } public System.Collections.ObjectModel.ReadOnlyCollection Entries { get { throw null; } } public System.IO.Compression.ZipArchiveMode Mode { get { throw null; } } + public static System.Threading.Tasks.Task CreateAsync(System.IO.Stream stream, System.IO.Compression.ZipArchiveMode mode, bool leaveOpen, System.Text.Encoding? entryNameEncoding, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public System.IO.Compression.ZipArchiveEntry CreateEntry(string entryName) { throw null; } public System.IO.Compression.ZipArchiveEntry CreateEntry(string entryName, System.IO.Compression.CompressionLevel compressionLevel) { throw null; } public void Dispose() { } protected virtual void Dispose(bool disposing) { } + public System.Threading.Tasks.ValueTask DisposeAsync() { throw null; } + protected virtual System.Threading.Tasks.ValueTask DisposeAsyncCore() { throw null; } public System.IO.Compression.ZipArchiveEntry? GetEntry(string entryName) { throw null; } } public partial class ZipArchiveEntry @@ -123,6 +126,7 @@ internal ZipArchiveEntry() { } public string Name { get { throw null; } } public void Delete() { } public System.IO.Stream Open() { throw null; } + public System.Threading.Tasks.Task OpenAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public override string ToString() { throw null; } } public enum ZipArchiveMode diff --git a/src/libraries/System.IO.Compression/src/System.IO.Compression.csproj b/src/libraries/System.IO.Compression/src/System.IO.Compression.csproj index 1ad16f09e8af6e..835c3485162455 100644 --- a/src/libraries/System.IO.Compression/src/System.IO.Compression.csproj +++ b/src/libraries/System.IO.Compression/src/System.IO.Compression.csproj @@ -16,13 +16,18 @@ + + + + + diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/PositionPreservingWriteOnlyStreamWrapper.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/PositionPreservingWriteOnlyStreamWrapper.cs index b9d02c6010c232..a0e1b5843ff346 100644 --- a/src/libraries/System.IO.Compression/src/System/IO/Compression/PositionPreservingWriteOnlyStreamWrapper.cs +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/PositionPreservingWriteOnlyStreamWrapper.cs @@ -93,6 +93,8 @@ protected override void Dispose(bool disposing) _stream.Dispose(); } + public override ValueTask DisposeAsync() => _stream.DisposeAsync(); + public override long Length { get { throw new NotSupportedException(SR.NotSupported); } @@ -108,9 +110,10 @@ public override void SetLength(long value) throw new NotSupportedException(SR.NotSupported); } - public override int Read(byte[] buffer, int offset, int count) - { - throw new NotSupportedException(SR.NotSupported); - } + public override int Read(byte[] buffer, int offset, int count) => throw new NotSupportedException(SR.NotSupported); + + public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) => throw new NotSupportedException(SR.NotSupported); + + public override ValueTask ReadAsync(Memory buffer, CancellationToken cancellationToken = default) => throw new NotSupportedException(SR.NotSupported); } } diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchive.Async.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchive.Async.cs new file mode 100644 index 00000000000000..db0bdd7b1e63b0 --- /dev/null +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchive.Async.cs @@ -0,0 +1,430 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections.Generic; +using System.Diagnostics; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace System.IO.Compression; + +public partial class ZipArchive : IDisposable, IAsyncDisposable +{ + /// + /// Asynchronously initializes and returns a new instance of on the given stream in the specified mode, specifying whether to leave the stream open, with an optional encoding and an optional cancellation token. + /// + /// The input or output stream. + /// See the description of the ZipArchiveMode enum. Read requires the stream to support reading, Create requires the stream to support writing, and Update requires the stream to support reading, writing, and seeking. + /// true to leave the stream open upon disposing the ZipArchive, otherwise false. + /// The encoding to use when reading or writing entry names and comments in this ZipArchive. + /// /// NOTE: Specifying this parameter to values other than null is discouraged. + /// However, this may be necessary for interoperability with ZIP archive tools and libraries that do not correctly support + /// UTF-8 encoding for entry names.
+ /// This value is used as follows:
+ /// Reading (opening) ZIP archive files: + /// If entryNameEncoding is not specified (== null): + /// + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is not set, + /// use the current system default code page (Encoding.Default) in order to decode the entry name and comment. + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is set, + /// use UTF-8 (Encoding.UTF8) in order to decode the entry name and comment. + /// + /// If entryNameEncoding is specified (!= null): + /// + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is not set, + /// use the specified entryNameEncoding in order to decode the entry name and comment. + /// For entries where the language encoding flag (EFS) in the general purpose bit flag of the local file header is set, + /// use UTF-8 (Encoding.UTF8) in order to decode the entry name and comment. + /// + /// Writing (saving) ZIP archive files: + /// If entryNameEncoding is not specified (== null): + /// + /// For entry names and comments that contain characters outside the ASCII range, + /// the language encoding flag (EFS) will be set in the general purpose bit flag of the local file header, + /// and UTF-8 (Encoding.UTF8) will be used in order to encode the entry name and comment into bytes. + /// For entry names and comments that do not contain characters outside the ASCII range, + /// the language encoding flag (EFS) will not be set in the general purpose bit flag of the local file header, + /// and the current system default code page (Encoding.Default) will be used to encode the entry names and comments into bytes. + /// + /// If entryNameEncoding is specified (!= null): + /// + /// The specified entryNameEncoding will always be used to encode the entry names and comments into bytes. + /// The language encoding flag (EFS) in the general purpose bit flag of the local file header will be set if and only + /// if the specified entryNameEncoding is a UTF-8 encoding. + /// + /// Note that Unicode encodings other than UTF-8 may not be currently used for the entryNameEncoding, + /// otherwise an is thrown. + /// + /// The optional cancellation token to monitor. + /// The stream is already closed. -or- mode is incompatible with the capabilities of the stream. + /// The stream is null. + /// mode specified an invalid value. + /// The contents of the stream could not be interpreted as a Zip file. -or- mode is Update and an entry is missing from the archive or is corrupt and cannot be read. -or- mode is Update and an entry is too large to fit into memory. + /// If a Unicode encoding other than UTF-8 is specified for the entryNameEncoding. + public static async Task CreateAsync(Stream stream, ZipArchiveMode mode, bool leaveOpen, Encoding? entryNameEncoding, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + ArgumentNullException.ThrowIfNull(stream); + + Stream? extraTempStream = null; + + try + { + Stream? backingStream = null; + + if (ValidateMode(mode, stream)) + { + backingStream = stream; + extraTempStream = stream = new MemoryStream(); + await backingStream.CopyToAsync(stream, cancellationToken).ConfigureAwait(false); + stream.Seek(0, SeekOrigin.Begin); + } + + ZipArchive zipArchive = new(mode, leaveOpen, entryNameEncoding, backingStream, DecideArchiveStream(mode, stream)); + + switch (mode) + { + case ZipArchiveMode.Create: + zipArchive._readEntries = true; + break; + case ZipArchiveMode.Read: + await zipArchive.ReadEndOfCentralDirectoryAsync(cancellationToken).ConfigureAwait(false); + break; + case ZipArchiveMode.Update: + default: + Debug.Assert(mode == ZipArchiveMode.Update); + if (zipArchive._archiveStream.Length == 0) + { + zipArchive._readEntries = true; + } + else + { + await zipArchive.ReadEndOfCentralDirectoryAsync(cancellationToken).ConfigureAwait(false); + await zipArchive.EnsureCentralDirectoryReadAsync(cancellationToken).ConfigureAwait(false); + + foreach (ZipArchiveEntry entry in zipArchive._entries) + { + await entry.ThrowIfNotOpenableAsync(needToUncompress: false, needToLoadIntoMemory: true, cancellationToken).ConfigureAwait(false); + } + } + break; + } + + return zipArchive; + } + catch (Exception) + { + if (extraTempStream != null) + { + await extraTempStream.DisposeAsync().ConfigureAwait(false); + } + + throw; + } + } + + public async ValueTask DisposeAsync() => await DisposeAsyncCore().ConfigureAwait(false); + + protected virtual async ValueTask DisposeAsyncCore() + { + if (!_isDisposed) + { + try + { + switch (_mode) + { + case ZipArchiveMode.Read: + break; + case ZipArchiveMode.Create: + case ZipArchiveMode.Update: + default: + Debug.Assert(_mode == ZipArchiveMode.Update || _mode == ZipArchiveMode.Create); + await WriteFileAsync().ConfigureAwait(false); + break; + } + } + finally + { + await CloseStreamsAsync().ConfigureAwait(false); + _isDisposed = true; + } + } + } + + private async Task CloseStreamsAsync() + { + if (!_leaveOpen) + { + await _archiveStream.DisposeAsync().ConfigureAwait(false); + if (_backingStream != null) + { + await _backingStream.DisposeAsync().ConfigureAwait(false); + } + } + else + { + // if _backingStream isn't null, that means we assigned the original stream they passed + // us to _backingStream (which they requested we leave open), and _archiveStream was + // the temporary copy that we needed + if (_backingStream != null) + { + await _archiveStream.DisposeAsync().ConfigureAwait(false); + } + } + } + + private async Task EnsureCentralDirectoryReadAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (!_readEntries) + { + await ReadCentralDirectoryAsync(cancellationToken).ConfigureAwait(false); + _readEntries = true; + } + } + + private async Task ReadCentralDirectoryAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + ReadCentralDirectoryInitialize(out byte[] fileBuffer, out long numberOfEntries, out bool saveExtraFieldsAndComments, out bool continueReadingCentralDirectory, out int bytesRead, out int currPosition, out int bytesConsumed); + + // read the central directory + while (continueReadingCentralDirectory) + { + // the buffer read must always be large enough to fit the constant section size of at least one header + int currBytesRead = await _archiveStream.ReadAtLeastAsync(fileBuffer, ZipCentralDirectoryFileHeader.BlockConstantSectionSize, throwOnEndOfStream: false, cancellationToken).ConfigureAwait(false); + + byte[] sizedFileBuffer = fileBuffer[0..currBytesRead]; + continueReadingCentralDirectory = currBytesRead >= ZipCentralDirectoryFileHeader.BlockConstantSectionSize; + + while (currPosition + ZipCentralDirectoryFileHeader.BlockConstantSectionSize <= currBytesRead) + { + (bool result, bytesConsumed, ZipCentralDirectoryFileHeader? currentHeader) = + await ZipCentralDirectoryFileHeader.TryReadBlockAsync(sizedFileBuffer.AsMemory(currPosition), _archiveStream, saveExtraFieldsAndComments, cancellationToken).ConfigureAwait(false); + + if (!ReadCentralDirectoryEndOfInnerLoopWork(result, currentHeader, bytesConsumed, ref continueReadingCentralDirectory, ref numberOfEntries, ref currPosition, ref bytesRead)) + { + break; + } + } + + ReadCentralDirectoryEndOfOuterLoopWork(ref currPosition, sizedFileBuffer); + } + + ReadCentralDirectoryPostOuterLoopWork(numberOfEntries); + } + catch (EndOfStreamException ex) + { + throw new InvalidDataException(SR.Format(SR.CentralDirectoryInvalid, ex)); + } + } + + // This function reads all the EOCD stuff it needs to find the offset to the start of the central directory + // This offset gets put in _centralDirectoryStart and the number of this disk gets put in _numberOfThisDisk + // Also does some verification that this isn't a split/spanned archive + // Also checks that offset to CD isn't out of bounds + private async Task ReadEndOfCentralDirectoryAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + // This seeks backwards almost to the beginning of the EOCD, one byte after where the signature would be + // located if the EOCD had the minimum possible size (no file zip comment) + _archiveStream.Seek(-ZipEndOfCentralDirectoryBlock.SizeOfBlockWithoutSignature, SeekOrigin.End); + + // If the EOCD has the minimum possible size (no zip file comment), then exactly the previous 4 bytes will contain the signature + // But if the EOCD has max possible size, the signature should be found somewhere in the previous 64K + 4 bytes + if (!await ZipHelper.SeekBackwardsToSignatureAsync(_archiveStream, + ZipEndOfCentralDirectoryBlock.SignatureConstantBytes, + ZipEndOfCentralDirectoryBlock.ZipFileCommentMaxLength + ZipEndOfCentralDirectoryBlock.FieldLengths.Signature, + cancellationToken).ConfigureAwait(false)) + throw new InvalidDataException(SR.EOCDNotFound); + + // read the EOCD + ZipEndOfCentralDirectoryBlock eocd = await ZipEndOfCentralDirectoryBlock.ReadBlockAsync(_archiveStream, cancellationToken).ConfigureAwait(false); + + ReadEndOfCentralDirectoryInnerWork(eocd, out long eocdStart); + + await TryReadZip64EndOfCentralDirectoryAsync(eocd, eocdStart, cancellationToken).ConfigureAwait(false); + + if (_centralDirectoryStart > _archiveStream.Length) + { + throw new InvalidDataException(SR.FieldTooBigOffsetToCD); + } + } + catch (EndOfStreamException ex) + { + throw new InvalidDataException(SR.CDCorrupt, ex); + } + catch (IOException ex) + { + throw new InvalidDataException(SR.CDCorrupt, ex); + } + } + + // Tries to find the Zip64 End of Central Directory Locator, then the Zip64 End of Central Directory, assuming the + // End of Central Directory block has already been found, as well as the location in the stream where the EOCD starts. + private async ValueTask TryReadZip64EndOfCentralDirectoryAsync(ZipEndOfCentralDirectoryBlock eocd, long eocdStart, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + // Only bother looking for the Zip64-EOCD stuff if we suspect it is needed because some value is FFFFFFFFF + // because these are the only two values we need, we only worry about these + // if we don't find the Zip64-EOCD, we just give up and try to use the original values + if (eocd.NumberOfThisDisk == ZipHelper.Mask16Bit || + eocd.OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber == ZipHelper.Mask32Bit || + eocd.NumberOfEntriesInTheCentralDirectory == ZipHelper.Mask16Bit) + { + // Read Zip64 End of Central Directory Locator + + // This seeks forwards almost to the beginning of the Zip64-EOCDL, one byte after where the signature would be located + _archiveStream.Seek(eocdStart - Zip64EndOfCentralDirectoryLocator.SizeOfBlockWithoutSignature, SeekOrigin.Begin); + + // Exactly the previous 4 bytes should contain the Zip64-EOCDL signature + // if we don't find it, assume it doesn't exist and use data from normal EOCD + if (await ZipHelper.SeekBackwardsToSignatureAsync(_archiveStream, + Zip64EndOfCentralDirectoryLocator.SignatureConstantBytes, + Zip64EndOfCentralDirectoryLocator.FieldLengths.Signature, cancellationToken).ConfigureAwait(false)) + { + // use locator to get to Zip64-EOCD + Zip64EndOfCentralDirectoryLocator locator = await Zip64EndOfCentralDirectoryLocator.TryReadBlockAsync(_archiveStream, cancellationToken).ConfigureAwait(false); + TryReadZip64EndOfCentralDirectoryInnerInitialWork(locator); + + // Read Zip64 End of Central Directory Record + Zip64EndOfCentralDirectoryRecord record = await Zip64EndOfCentralDirectoryRecord.TryReadBlockAsync(_archiveStream, cancellationToken).ConfigureAwait(false); + + TryReadZip64EndOfCentralDirectoryInnerFinalWork(record); + } + } + } + + private async ValueTask WriteFileAsync(CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + // if we are in create mode, we always set readEntries to true in Init + // if we are in update mode, we call EnsureCentralDirectoryRead, which sets readEntries to true + Debug.Assert(_readEntries); + + // Entries starting after this offset have had a dynamically-sized change. Everything on or after this point must be rewritten. + long completeRewriteStartingOffset = 0; + List entriesToWrite = _entries; + + if (_mode == ZipArchiveMode.Update) + { + // Entries starting after this offset have some kind of change made to them. It might just be a fixed-length field though, in which case + // that single entry's metadata can be rewritten without impacting anything else. + long startingOffset = _firstDeletedEntryOffset; + long nextFileOffset = 0; + completeRewriteStartingOffset = startingOffset; + + entriesToWrite = new(_entries.Count); + foreach (ZipArchiveEntry entry in _entries) + { + if (!entry.OriginallyInArchive) + { + entriesToWrite.Add(entry); + } + else + { + + WriteFileCalculateOffsets(entry, ref startingOffset, ref nextFileOffset); + + // We want to re-write entries which are after the starting offset of the first entry which has pending data to write. + // NB: the existing ZipArchiveEntries are sorted in _entries by their position ascending. + if (entry.OffsetOfLocalHeader >= startingOffset) + { + WriteFileCheckStartingOffset(entry, ref completeRewriteStartingOffset); + + await entry.LoadLocalHeaderExtraFieldIfNeededAsync(cancellationToken).ConfigureAwait(false); + if (entry.OffsetOfLocalHeader >= completeRewriteStartingOffset) + { + await entry.LoadCompressedBytesIfNeededAsync(cancellationToken).ConfigureAwait(false); + } + + entriesToWrite.Add(entry); + } + } + } + + WriteFileUpdateModeFinalWork(startingOffset, nextFileOffset); + } + + foreach (ZipArchiveEntry entry in entriesToWrite) + { + // We don't always need to write the local header entry, ZipArchiveEntry is usually able to work out when it doesn't need to. + // We want to force this header entry to be written (even for completely untouched entries) if the entry comes after one + // which had a pending dynamically-sized write. + bool forceWriteLocalEntry = !entry.OriginallyInArchive || (entry.OriginallyInArchive && entry.OffsetOfLocalHeader >= completeRewriteStartingOffset); + + await entry.WriteAndFinishLocalEntryAsync(forceWriteLocalEntry, cancellationToken).ConfigureAwait(false); + } + + long plannedCentralDirectoryPosition = _archiveStream.Position; + // If there are no entries in the archive, we still want to create the archive epilogue. + bool archiveEpilogueRequiresUpdate = _entries.Count == 0; + + foreach (ZipArchiveEntry entry in _entries) + { + // The central directory needs to be rewritten if its position has moved, if there's a new entry in the archive, or if the entry might be different. + bool centralDirectoryEntryRequiresUpdate = plannedCentralDirectoryPosition != _centralDirectoryStart + || !entry.OriginallyInArchive || entry.OffsetOfLocalHeader >= completeRewriteStartingOffset; + + await entry.WriteCentralDirectoryFileHeaderAsync(centralDirectoryEntryRequiresUpdate, cancellationToken).ConfigureAwait(false); + archiveEpilogueRequiresUpdate |= centralDirectoryEntryRequiresUpdate; + } + + long sizeOfCentralDirectory = _archiveStream.Position - plannedCentralDirectoryPosition; + + await WriteArchiveEpilogueAsync(plannedCentralDirectoryPosition, sizeOfCentralDirectory, archiveEpilogueRequiresUpdate, cancellationToken).ConfigureAwait(false); + + WriteFileFinalWork(); + } + + // writes eocd, and if needed, zip 64 eocd, zip64 eocd locator + // should only throw an exception in extremely exceptional cases because it is called from dispose + private async ValueTask WriteArchiveEpilogueAsync(long startOfCentralDirectory, long sizeOfCentralDirectory, bool centralDirectoryChanged, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + // determine if we need Zip 64 + if (startOfCentralDirectory >= uint.MaxValue + || sizeOfCentralDirectory >= uint.MaxValue + || _entries.Count >= ZipHelper.Mask16Bit +#if DEBUG_FORCE_ZIP64 + || _forceZip64 +#endif + ) + { + // if we need zip 64, write zip 64 eocd and locator + long zip64EOCDRecordStart = _archiveStream.Position; + + if (centralDirectoryChanged) + { + await Zip64EndOfCentralDirectoryRecord.WriteBlockAsync(_archiveStream, _entries.Count, startOfCentralDirectory, sizeOfCentralDirectory, cancellationToken).ConfigureAwait(false); + await Zip64EndOfCentralDirectoryLocator.WriteBlockAsync(_archiveStream, zip64EOCDRecordStart, cancellationToken).ConfigureAwait(false); + } + else + { + WriteArchiveEpilogueNoCDChangesWork(); + } + } + + // write normal eocd + if (centralDirectoryChanged || (Changed != ChangeState.Unchanged)) + { + await ZipEndOfCentralDirectoryBlock.WriteBlockAsync(_archiveStream, _entries.Count, startOfCentralDirectory, sizeOfCentralDirectory, _archiveComment, cancellationToken).ConfigureAwait(false); + } + else + { + _archiveStream.Seek(ZipEndOfCentralDirectoryBlock.TotalSize + _archiveComment.Length, SeekOrigin.Current); + } + } +} diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchive.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchive.cs index 65890420d17107..900f40c8ccfcb2 100644 --- a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchive.cs +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchive.cs @@ -6,14 +6,14 @@ using System.Collections.Generic; using System.Collections.ObjectModel; +using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; -using System.Runtime.InteropServices; using System.Text; namespace System.IO.Compression { - public class ZipArchive : IDisposable + public partial class ZipArchive : IDisposable, IAsyncDisposable { private readonly Stream _archiveStream; private ZipArchiveEntry? _archiveStreamOwner; @@ -119,60 +119,25 @@ public ZipArchive(Stream stream, ZipArchiveMode mode, bool leaveOpen) : this(str /// /// If a Unicode encoding other than UTF-8 is specified for the entryNameEncoding. public ZipArchive(Stream stream, ZipArchiveMode mode, bool leaveOpen, Encoding? entryNameEncoding) + : this(mode, leaveOpen, entryNameEncoding, backingStream: null, archiveStream: DecideArchiveStream(mode, stream)) { ArgumentNullException.ThrowIfNull(stream); - EntryNameAndCommentEncoding = entryNameEncoding; Stream? extraTempStream = null; try { _backingStream = null; - // check stream against mode - switch (mode) + if (ValidateMode(mode, stream)) { - case ZipArchiveMode.Create: - if (!stream.CanWrite) - throw new ArgumentException(SR.CreateModeCapabilities); - break; - case ZipArchiveMode.Read: - if (!stream.CanRead) - throw new ArgumentException(SR.ReadModeCapabilities); - if (!stream.CanSeek) - { - _backingStream = stream; - extraTempStream = stream = new MemoryStream(); - _backingStream.CopyTo(stream); - stream.Seek(0, SeekOrigin.Begin); - } - break; - case ZipArchiveMode.Update: - if (!stream.CanRead || !stream.CanWrite || !stream.CanSeek) - throw new ArgumentException(SR.UpdateModeCapabilities); - break; - default: - // still have to throw this, because stream constructor doesn't do mode argument checks - throw new ArgumentOutOfRangeException(nameof(mode)); + _backingStream = stream; + extraTempStream = stream = new MemoryStream(); + _backingStream.CopyTo(stream); + stream.Seek(0, SeekOrigin.Begin); } - _mode = mode; - if (mode == ZipArchiveMode.Create && !stream.CanSeek) - _archiveStream = new PositionPreservingWriteOnlyStreamWrapper(stream); - else - _archiveStream = stream; - _archiveStreamOwner = null; - _entries = new List(); - _entriesCollection = new ReadOnlyCollection(_entries); - _entriesDictionary = new Dictionary(); - Changed = ChangeState.Unchanged; - _readEntries = false; - _leaveOpen = leaveOpen; - _centralDirectoryStart = 0; // invalid until ReadCentralDirectory - _isDisposed = false; - _numberOfThisDisk = 0; // invalid until ReadCentralDirectory - _archiveComment = Array.Empty(); - _firstDeletedEntryOffset = long.MaxValue; + _archiveStream = DecideArchiveStream(mode, stream); switch (mode) { @@ -193,6 +158,7 @@ public ZipArchive(Stream stream, ZipArchiveMode mode, bool leaveOpen, Encoding? { ReadEndOfCentralDirectory(); EnsureCentralDirectoryRead(); + foreach (ZipArchiveEntry entry in _entries) { entry.ThrowIfNotOpenable(needToUncompress: false, needToLoadIntoMemory: true); @@ -201,7 +167,7 @@ public ZipArchive(Stream stream, ZipArchiveMode mode, bool leaveOpen, Encoding? break; } } - catch + catch (Exception) { extraTempStream?.Dispose(); @@ -209,6 +175,29 @@ public ZipArchive(Stream stream, ZipArchiveMode mode, bool leaveOpen, Encoding? } } + /// Helper constructor that initializes some of the essential ZipArchive + /// information that other constructors initialize the same way. + /// Validations, checks and entry collection need to be done outside this constructor. + private ZipArchive(ZipArchiveMode mode, bool leaveOpen, Encoding? entryNameEncoding, Stream? backingStream, Stream archiveStream) + { + _backingStream = backingStream; + _archiveStream = archiveStream; + _mode = mode; + EntryNameAndCommentEncoding = entryNameEncoding; + _archiveStreamOwner = null; + _entries = new List(); + _entriesCollection = new ReadOnlyCollection(_entries); + _entriesDictionary = new Dictionary(); + Changed = ChangeState.Unchanged; + _readEntries = false; + _leaveOpen = leaveOpen; + _centralDirectoryStart = 0; // invalid until ReadCentralDirectory + _isDisposed = false; + _numberOfThisDisk = 0; // invalid until ReadCentralDirectory + _archiveComment = Array.Empty(); + _firstDeletedEntryOffset = long.MaxValue; + } + /// /// Gets or sets the optional archive comment. /// @@ -326,11 +315,7 @@ protected virtual void Dispose(bool disposing) /// /// Finishes writing the archive and releases all resources used by the ZipArchive object, unless the object was constructed with leaveOpen as true. Any streams from opened entries in the ZipArchive still open will throw exceptions on subsequent writes, as the underlying streams will have been closed. /// - public void Dispose() - { - Dispose(true); - GC.SuppressFinalize(this); - } + public void Dispose() => Dispose(true); /// /// Retrieves a wrapper for the file entry in the archive with the specified name. Names are compared using ordinal comparison. If there are multiple entries in the archive with the specified name, the first one found will be returned. @@ -490,91 +475,134 @@ private void EnsureCentralDirectoryRead() } } - private void ReadCentralDirectory() + private void ReadCentralDirectoryInitialize(out byte[] fileBuffer, out long numberOfEntries, out bool saveExtraFieldsAndComments, out bool continueReadingCentralDirectory, out int bytesRead, out int currPosition, out int bytesConsumed) { - const int ReadBufferSize = 4096; + const int ReadCentralDirectoryReadBufferSize = 4096; - byte[] fileBuffer = System.Buffers.ArrayPool.Shared.Rent(ReadBufferSize); - Span fileBufferSpan = fileBuffer.AsSpan(0, ReadBufferSize); + fileBuffer = new byte[ReadCentralDirectoryReadBufferSize]; - try + // assume ReadEndOfCentralDirectory has been called and has populated _centralDirectoryStart + + _archiveStream.Seek(_centralDirectoryStart, SeekOrigin.Begin); + + numberOfEntries = 0; + saveExtraFieldsAndComments = Mode == ZipArchiveMode.Update; + + continueReadingCentralDirectory = true; + // total bytes read from central directory + bytesRead = 0; + // current position in the current buffer + currPosition = 0; + // total bytes read from all file headers starting in the current buffer + bytesConsumed = 0; + + _entries.Clear(); + _entriesDictionary.Clear(); + } + + private bool ReadCentralDirectoryEndOfInnerLoopWork(bool result, ZipCentralDirectoryFileHeader? currentHeader, int bytesConsumed, ref bool continueReadingCentralDirectory, ref long numberOfEntries, ref int currPosition, ref int bytesRead) + { + if (!result) + { + continueReadingCentralDirectory = false; + return false; + } + + Debug.Assert(currentHeader != null, "currentHeader should not be null here"); + AddEntry(new ZipArchiveEntry(this, currentHeader)); + numberOfEntries++; + if (numberOfEntries > _expectedNumberOfEntries) + { + throw new InvalidDataException(SR.NumEntriesWrong); + } + + currPosition += bytesConsumed; + bytesRead += bytesConsumed; + + return true; + } + + private void ReadCentralDirectoryEndOfOuterLoopWork(ref int currPosition, ReadOnlySpan sizedFileBuffer) + { + // We've run out of possible space in the entry - seek backwards by the number of bytes remaining in + // this buffer (so that the next buffer overlaps with this one) and retry. + if (currPosition < sizedFileBuffer.Length) { - // assume ReadEndOfCentralDirectory has been called and has populated _centralDirectoryStart + _archiveStream.Seek(-(sizedFileBuffer.Length - currPosition), SeekOrigin.Current); + } + currPosition = 0; + } - _archiveStream.Seek(_centralDirectoryStart, SeekOrigin.Begin); + private void ReadCentralDirectoryPostOuterLoopWork(long numberOfEntries) + { + if (numberOfEntries != _expectedNumberOfEntries) + { + throw new InvalidDataException(SR.NumEntriesWrong); + } - long numberOfEntries = 0; - bool saveExtraFieldsAndComments = Mode == ZipArchiveMode.Update; + // Sort _entries by each archive entry's position. This supports the algorithm in WriteFile, so is only + // necessary when the ZipArchive has been opened in Update mode. + if (Mode == ZipArchiveMode.Update) + { + _entries.Sort(ZipArchiveEntry.LocalHeaderOffsetComparer.Instance); + } + } - bool continueReadingCentralDirectory = true; - // total bytes read from central directory - int bytesRead = 0; - // current position in the current buffer - int currPosition = 0; - // total bytes read from all file headers starting in the current buffer - int bytesConsumed = 0; + private void ReadCentralDirectory() + { + try + { + ReadCentralDirectoryInitialize(out byte[] fileBuffer, out long numberOfEntries, out bool saveExtraFieldsAndComments, out bool continueReadingCentralDirectory, out int bytesRead, out int currPosition, out int bytesConsumed); - _entries.Clear(); - _entriesDictionary.Clear(); + Span fileBufferSpan = fileBuffer.AsSpan(); // read the central directory while (continueReadingCentralDirectory) { // the buffer read must always be large enough to fit the constant section size of at least one header int currBytesRead = _archiveStream.ReadAtLeast(fileBufferSpan, ZipCentralDirectoryFileHeader.BlockConstantSectionSize, throwOnEndOfStream: false); - ReadOnlySpan sizedFileBuffer = fileBufferSpan.Slice(0, currBytesRead); - continueReadingCentralDirectory = sizedFileBuffer.Length >= ZipCentralDirectoryFileHeader.BlockConstantSectionSize; + ReadOnlySpan sizedFileBuffer = fileBufferSpan.Slice(0, currBytesRead); + continueReadingCentralDirectory = currBytesRead >= ZipCentralDirectoryFileHeader.BlockConstantSectionSize; - while (currPosition + ZipCentralDirectoryFileHeader.BlockConstantSectionSize <= sizedFileBuffer.Length) + while (currPosition + ZipCentralDirectoryFileHeader.BlockConstantSectionSize <= currBytesRead) { - if (!ZipCentralDirectoryFileHeader.TryReadBlock(sizedFileBuffer.Slice(currPosition), _archiveStream, - saveExtraFieldsAndComments, out bytesConsumed, out ZipCentralDirectoryFileHeader? currentHeader)) - { - continueReadingCentralDirectory = false; - break; - } + bool result = ZipCentralDirectoryFileHeader.TryReadBlock(sizedFileBuffer.Slice(currPosition), _archiveStream, + saveExtraFieldsAndComments, out bytesConsumed, out ZipCentralDirectoryFileHeader? currentHeader); - AddEntry(new ZipArchiveEntry(this, currentHeader)); - numberOfEntries++; - if (numberOfEntries > _expectedNumberOfEntries) + if (!ReadCentralDirectoryEndOfInnerLoopWork(result, currentHeader, bytesConsumed, ref continueReadingCentralDirectory, ref numberOfEntries, ref currPosition, ref bytesRead)) { - throw new InvalidDataException(SR.NumEntriesWrong); + break; } - - currPosition += bytesConsumed; - bytesRead += bytesConsumed; } - // We've run out of possible space in the entry - seek backwards by the number of bytes remaining in - // this buffer (so that the next buffer overlaps with this one) and retry. - if (currPosition < sizedFileBuffer.Length) - { - _archiveStream.Seek(-(sizedFileBuffer.Length - currPosition), SeekOrigin.Current); - } - currPosition = 0; + ReadCentralDirectoryEndOfOuterLoopWork(ref currPosition, sizedFileBuffer); } - if (numberOfEntries != _expectedNumberOfEntries) - { - throw new InvalidDataException(SR.NumEntriesWrong); - } - - // Sort _entries by each archive entry's position. This supports the algorithm in WriteFile, so is only - // necessary when the ZipArchive has been opened in Update mode. - if (Mode == ZipArchiveMode.Update) - { - _entries.Sort(ZipArchiveEntry.LocalHeaderOffsetComparer.Instance); - } + ReadCentralDirectoryPostOuterLoopWork(numberOfEntries); } catch (EndOfStreamException ex) { throw new InvalidDataException(SR.Format(SR.CentralDirectoryInvalid, ex)); } - finally - { - System.Buffers.ArrayPool.Shared.Return(fileBuffer); - } + } + + private void ReadEndOfCentralDirectoryInnerWork(ZipEndOfCentralDirectoryBlock eocd, out long eocdStart) + { + eocdStart = _archiveStream.Position; + + if (eocd.NumberOfThisDisk != eocd.NumberOfTheDiskWithTheStartOfTheCentralDirectory) + throw new InvalidDataException(SR.SplitSpanned); + + _numberOfThisDisk = eocd.NumberOfThisDisk; + _centralDirectoryStart = eocd.OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber; + + if (eocd.NumberOfEntriesInTheCentralDirectory != eocd.NumberOfEntriesInTheCentralDirectoryOnThisDisk) + throw new InvalidDataException(SR.SplitSpanned); + + _expectedNumberOfEntries = eocd.NumberOfEntriesInTheCentralDirectory; + + _archiveComment = eocd.ArchiveComment; } // This function reads all the EOCD stuff it needs to find the offset to the start of the central directory @@ -596,25 +624,10 @@ private void ReadEndOfCentralDirectory() ZipEndOfCentralDirectoryBlock.ZipFileCommentMaxLength + ZipEndOfCentralDirectoryBlock.FieldLengths.Signature)) throw new InvalidDataException(SR.EOCDNotFound); - long eocdStart = _archiveStream.Position; - // read the EOCD - ZipEndOfCentralDirectoryBlock eocd; - bool eocdProper = ZipEndOfCentralDirectoryBlock.TryReadBlock(_archiveStream, out eocd); - Debug.Assert(eocdProper); // we just found this using the signature finder, so it should be okay - - if (eocd.NumberOfThisDisk != eocd.NumberOfTheDiskWithTheStartOfTheCentralDirectory) - throw new InvalidDataException(SR.SplitSpanned); - - _numberOfThisDisk = eocd.NumberOfThisDisk; - _centralDirectoryStart = eocd.OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber; + ZipEndOfCentralDirectoryBlock eocd = ZipEndOfCentralDirectoryBlock.ReadBlock(_archiveStream); - if (eocd.NumberOfEntriesInTheCentralDirectory != eocd.NumberOfEntriesInTheCentralDirectoryOnThisDisk) - throw new InvalidDataException(SR.SplitSpanned); - - _expectedNumberOfEntries = eocd.NumberOfEntriesInTheCentralDirectory; - - _archiveComment = eocd.ArchiveComment; + ReadEndOfCentralDirectoryInnerWork(eocd, out long eocdStart); TryReadZip64EndOfCentralDirectory(eocd, eocdStart); @@ -633,6 +646,33 @@ private void ReadEndOfCentralDirectory() } } + private void TryReadZip64EndOfCentralDirectoryInnerInitialWork(Zip64EndOfCentralDirectoryLocator? locator) + { + if (locator == null || locator.OffsetOfZip64EOCD > long.MaxValue) + throw new InvalidDataException(SR.FieldTooBigOffsetToZip64EOCD); + + long zip64EOCDOffset = (long)locator.OffsetOfZip64EOCD; + + _archiveStream.Seek(zip64EOCDOffset, SeekOrigin.Begin); + } + + private void TryReadZip64EndOfCentralDirectoryInnerFinalWork(Zip64EndOfCentralDirectoryRecord record) + { + _numberOfThisDisk = record.NumberOfThisDisk; + + if (record.NumberOfEntriesTotal > long.MaxValue) + throw new InvalidDataException(SR.FieldTooBigNumEntries); + + if (record.OffsetOfCentralDirectory > long.MaxValue) + throw new InvalidDataException(SR.FieldTooBigOffsetToCD); + + if (record.NumberOfEntriesTotal != record.NumberOfEntriesOnThisDisk) + throw new InvalidDataException(SR.SplitSpanned); + + _expectedNumberOfEntries = (long)record.NumberOfEntriesTotal; + _centralDirectoryStart = (long)record.OffsetOfCentralDirectory; + } + // Tries to find the Zip64 End of Central Directory Locator, then the Zip64 End of Central Directory, assuming the // End of Central Directory block has already been found, as well as the location in the stream where the EOCD starts. private void TryReadZip64EndOfCentralDirectory(ZipEndOfCentralDirectoryBlock eocd, long eocdStart) @@ -656,36 +696,62 @@ private void TryReadZip64EndOfCentralDirectory(ZipEndOfCentralDirectoryBlock eoc Zip64EndOfCentralDirectoryLocator.FieldLengths.Signature)) { // use locator to get to Zip64-EOCD - bool zip64eocdLocatorProper = Zip64EndOfCentralDirectoryLocator.TryReadBlock(_archiveStream, out Zip64EndOfCentralDirectoryLocator locator); - Debug.Assert(zip64eocdLocatorProper); // we just found this using the signature finder, so it should be okay - - if (locator.OffsetOfZip64EOCD > long.MaxValue) - throw new InvalidDataException(SR.FieldTooBigOffsetToZip64EOCD); - - long zip64EOCDOffset = (long)locator.OffsetOfZip64EOCD; - - _archiveStream.Seek(zip64EOCDOffset, SeekOrigin.Begin); + Zip64EndOfCentralDirectoryLocator locator = Zip64EndOfCentralDirectoryLocator.TryReadBlock(_archiveStream); + TryReadZip64EndOfCentralDirectoryInnerInitialWork(locator); // Read Zip64 End of Central Directory Record + Zip64EndOfCentralDirectoryRecord record = Zip64EndOfCentralDirectoryRecord.TryReadBlock(_archiveStream); - Zip64EndOfCentralDirectoryRecord record; - if (!Zip64EndOfCentralDirectoryRecord.TryReadBlock(_archiveStream, out record)) - throw new InvalidDataException(SR.Zip64EOCDNotWhereExpected); + TryReadZip64EndOfCentralDirectoryInnerFinalWork(record); + } + } + } - _numberOfThisDisk = record.NumberOfThisDisk; + private static void WriteFileCalculateOffsets(ZipArchiveEntry entry, ref long startingOffset, ref long nextFileOffset) + { + if (entry.Changes == ChangeState.Unchanged) + { + // Keep track of the expected position of the file entry after the final untouched file entry so that when the loop completes, + // we'll know which position to start writing new entries from. + nextFileOffset = Math.Max(nextFileOffset, entry.GetOffsetOfCompressedData() + entry.CompressedLength); + } + // When calculating the starting offset to load the files from, only look at changed entries which are already in the archive. + else + { + startingOffset = Math.Min(startingOffset, entry.OffsetOfLocalHeader); + } + } - if (record.NumberOfEntriesTotal > long.MaxValue) - throw new InvalidDataException(SR.FieldTooBigNumEntries); + private static void WriteFileCheckStartingOffset(ZipArchiveEntry entry, ref long completeRewriteStartingOffset) + { + // If the pending data to write is fixed-length metadata in the header, there's no need to load the compressed file bits. + // We always need to load the local file header's metadata though - at this point, this entry will be written out and we + // want to make sure that we preserve that metadata. + if ((entry.Changes & (ChangeState.DynamicLengthMetadata | ChangeState.StoredData)) != 0) + { + completeRewriteStartingOffset = Math.Min(completeRewriteStartingOffset, entry.OffsetOfLocalHeader); + } + } - if (record.OffsetOfCentralDirectory > long.MaxValue) - throw new InvalidDataException(SR.FieldTooBigOffsetToCD); + private void WriteFileUpdateModeFinalWork(long startingOffset, long nextFileOffset) + { + // If the offset of entries to write from is still at long.MaxValue, then we know that nothing has been deleted, + // nothing has been modified - so we just want to move to the end of all remaining files in the archive. + if (startingOffset == long.MaxValue) + { + startingOffset = nextFileOffset; + } - if (record.NumberOfEntriesTotal != record.NumberOfEntriesOnThisDisk) - throw new InvalidDataException(SR.SplitSpanned); + _archiveStream.Seek(startingOffset, SeekOrigin.Begin); + } - _expectedNumberOfEntries = (long)record.NumberOfEntriesTotal; - _centralDirectoryStart = (long)record.OffsetOfCentralDirectory; - } + private void WriteFileFinalWork() + { + // If entries have been removed and new (smaller) ones added, there could be empty space at the end of the file. + // Shrink the file to reclaim this space. + if (_mode == ZipArchiveMode.Update && _archiveStream.Position != _archiveStream.Length) + { + _archiveStream.SetLength(_archiveStream.Position); } } @@ -706,8 +772,8 @@ private void WriteFile() long startingOffset = _firstDeletedEntryOffset; long nextFileOffset = 0; completeRewriteStartingOffset = startingOffset; - entriesToWrite = new(_entries.Count); + foreach (ZipArchiveEntry entry in _entries) { if (!entry.OriginallyInArchive) @@ -716,29 +782,13 @@ private void WriteFile() } else { - if (entry.Changes == ChangeState.Unchanged) - { - // Keep track of the expected position of the file entry after the final untouched file entry so that when the loop completes, - // we'll know which position to start writing new entries from. - nextFileOffset = Math.Max(nextFileOffset, entry.OffsetOfCompressedData + entry.CompressedLength); - } - // When calculating the starting offset to load the files from, only look at changed entries which are already in the archive. - else - { - startingOffset = Math.Min(startingOffset, entry.OffsetOfLocalHeader); - } + WriteFileCalculateOffsets(entry, ref startingOffset, ref nextFileOffset); // We want to re-write entries which are after the starting offset of the first entry which has pending data to write. // NB: the existing ZipArchiveEntries are sorted in _entries by their position ascending. if (entry.OffsetOfLocalHeader >= startingOffset) { - // If the pending data to write is fixed-length metadata in the header, there's no need to load the compressed file bits. - // We always need to load the local file header's metadata though - at this point, this entry will be written out and we - // want to make sure that we preserve that metadata. - if ((entry.Changes & (ChangeState.DynamicLengthMetadata | ChangeState.StoredData)) != 0) - { - completeRewriteStartingOffset = Math.Min(completeRewriteStartingOffset, entry.OffsetOfLocalHeader); - } + WriteFileCheckStartingOffset(entry, ref completeRewriteStartingOffset); entry.LoadLocalHeaderExtraFieldIfNeeded(); if (entry.OffsetOfLocalHeader >= completeRewriteStartingOffset) @@ -751,14 +801,7 @@ private void WriteFile() } } - // If the offset of entries to write from is still at long.MaxValue, then we know that nothing has been deleted, - // nothing has been modified - so we just want to move to the end of all remaining files in the archive. - if (startingOffset == long.MaxValue) - { - startingOffset = nextFileOffset; - } - - _archiveStream.Seek(startingOffset, SeekOrigin.Begin); + WriteFileUpdateModeFinalWork(startingOffset, nextFileOffset); } foreach (ZipArchiveEntry entry in entriesToWrite) @@ -789,12 +832,13 @@ private void WriteFile() WriteArchiveEpilogue(plannedCentralDirectoryPosition, sizeOfCentralDirectory, archiveEpilogueRequiresUpdate); - // If entries have been removed and new (smaller) ones added, there could be empty space at the end of the file. - // Shrink the file to reclaim this space. - if (_mode == ZipArchiveMode.Update && _archiveStream.Position != _archiveStream.Length) - { - _archiveStream.SetLength(_archiveStream.Position); - } + WriteFileFinalWork(); + } + + private void WriteArchiveEpilogueNoCDChangesWork() + { + _archiveStream.Seek(Zip64EndOfCentralDirectoryRecord.TotalSize, SeekOrigin.Current); + _archiveStream.Seek(Zip64EndOfCentralDirectoryLocator.TotalSize, SeekOrigin.Current); } // writes eocd, and if needed, zip 64 eocd, zip64 eocd locator @@ -820,8 +864,7 @@ private void WriteArchiveEpilogue(long startOfCentralDirectory, long sizeOfCentr } else { - _archiveStream.Seek(Zip64EndOfCentralDirectoryRecord.TotalSize, SeekOrigin.Current); - _archiveStream.Seek(Zip64EndOfCentralDirectoryLocator.TotalSize, SeekOrigin.Current); + WriteArchiveEpilogueNoCDChangesWork(); } } @@ -836,6 +879,52 @@ private void WriteArchiveEpilogue(long startOfCentralDirectory, long sizeOfCentr } } + // Confirms that the specified stream is compatible with the specified mode. + // Returns a boolean that indicates that further work needs to be done for when + // the mode is Read and the stream is unseekable. + private static bool ValidateMode(ZipArchiveMode mode, Stream stream) + { + // check stream against mode + bool isReadModeAndUnseekable = false; + + switch (mode) + { + case ZipArchiveMode.Create: + if (!stream.CanWrite) + throw new ArgumentException(SR.CreateModeCapabilities); + break; + case ZipArchiveMode.Read: + if (!stream.CanRead) + throw new ArgumentException(SR.ReadModeCapabilities); + if (!stream.CanSeek) + { + isReadModeAndUnseekable = true; + } + break; + case ZipArchiveMode.Update: + if (!stream.CanRead || !stream.CanWrite || !stream.CanSeek) + throw new ArgumentException(SR.UpdateModeCapabilities); + break; + default: + // still have to throw this, because stream constructor doesn't do mode argument checks + throw new ArgumentOutOfRangeException(nameof(mode)); + } + + return isReadModeAndUnseekable; + } + + // Depending on mode and stream seekability, we will decide if the archive + // stream needs to be wrapped or not by another stream to help with writing. + private static Stream DecideArchiveStream(ZipArchiveMode mode, Stream stream) + { + ArgumentNullException.ThrowIfNull(stream); + + return mode == ZipArchiveMode.Create && !stream.CanSeek ? + new PositionPreservingWriteOnlyStreamWrapper(stream) : + stream; + } + + [Flags] internal enum ChangeState { diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchiveEntry.Async.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchiveEntry.Async.cs new file mode 100644 index 00000000000000..b50b9fdcc91709 --- /dev/null +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchiveEntry.Async.cs @@ -0,0 +1,422 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Buffers.Binary; +using System.Diagnostics; +using System.Threading; +using System.Threading.Tasks; + +namespace System.IO.Compression; + +// The disposable fields that this class owns get disposed when the ZipArchive it belongs to gets disposed +public partial class ZipArchiveEntry +{ + /// + /// Asynchronously opens the entry. If the archive that the entry belongs to was opened in Read mode, the returned stream will be readable, and it may or may not be seekable. If Create mode, the returned stream will be writable and not seekable. If Update mode, the returned stream will be readable, writable, seekable, and support SetLength. + /// + /// The token to monitor for cancellation requests. + /// A Stream that represents the contents of the entry. + /// The entry is already currently open for writing. -or- The entry has been deleted from the archive. -or- The archive that this entry belongs to was opened in ZipArchiveMode.Create, and this entry has already been written to once. + /// The entry is missing from the archive or is corrupt and cannot be read. -or- The entry has been compressed using a compression method that is not supported. + /// The ZipArchive that this entry belongs to has been disposed. + public async Task OpenAsync(CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + ThrowIfInvalidArchive(); + + switch (_archive.Mode) + { + case ZipArchiveMode.Read: + return await OpenInReadModeAsync(checkOpenable: true, cancellationToken).ConfigureAwait(false); + case ZipArchiveMode.Create: + return OpenInWriteMode(); + case ZipArchiveMode.Update: + default: + Debug.Assert(_archive.Mode == ZipArchiveMode.Update); + return await OpenInUpdateModeAsync(cancellationToken).ConfigureAwait(false); + } + } + + internal async Task GetOffsetOfCompressedDataAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + if (_storedOffsetOfCompressedData == null) + { + _archive.ArchiveStream.Seek(_offsetOfLocalHeader, SeekOrigin.Begin); + // by calling this, we are using local header _storedEntryNameBytes.Length and extraFieldLength + // to find start of data, but still using central directory size information + if (!await ZipLocalFileHeader.TrySkipBlockAsync(_archive.ArchiveStream, cancellationToken).ConfigureAwait(false)) + throw new InvalidDataException(SR.LocalFileHeaderCorrupt); + _storedOffsetOfCompressedData = _archive.ArchiveStream.Position; + } + return _storedOffsetOfCompressedData.Value; + } + + private async Task GetUncompressedDataAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + if (_storedUncompressedData == null) + { + // this means we have never opened it before + + // if _uncompressedSize > int.MaxValue, it's still okay, because MemoryStream will just + // grow as data is copied into it + _storedUncompressedData = new MemoryStream((int)_uncompressedSize); + + if (_originallyInArchive) + { + Stream decompressor = await OpenInReadModeAsync(false, cancellationToken).ConfigureAwait(false); + await using (decompressor) + { + try + { + await decompressor.CopyToAsync(_storedUncompressedData, cancellationToken).ConfigureAwait(false); + } + catch (InvalidDataException) + { + // this is the case where the archive say the entry is deflate, but deflateStream + // throws an InvalidDataException. This property should only be getting accessed in + // Update mode, so we want to make sure _storedUncompressedData stays null so + // that later when we dispose the archive, this entry loads the compressedBytes, and + // copies them straight over + await _storedUncompressedData.DisposeAsync().ConfigureAwait(false); + _storedUncompressedData = null; + _currentlyOpenForWrite = false; + _everOpenedForWrite = false; + throw; + } + } + } + + // if they start modifying it and the compression method is not "store", we should make sure it will get deflated + if (CompressionMethod != CompressionMethodValues.Stored) + { + CompressionMethod = CompressionMethodValues.Deflate; + } + } + + return _storedUncompressedData; + } + + // does almost everything you need to do to forget about this entry + // writes the local header/data, gets rid of all the data, + // closes all of the streams except for the very outermost one that + // the user holds on to and is responsible for closing + // + // after calling this, and only after calling this can we be guaranteed + // that we are reading to write the central directory + // + // should only throw an exception in extremely exceptional cases because it is called from dispose + internal async Task WriteAndFinishLocalEntryAsync(bool forceWrite, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + await CloseStreamsAsync().ConfigureAwait(false); + await WriteLocalFileHeaderAndDataIfNeededAsync(forceWrite, cancellationToken).ConfigureAwait(false); + await UnloadStreamsAsync().ConfigureAwait(false); + } + + // should only throw an exception in extremely exceptional cases because it is called from dispose + internal async Task WriteCentralDirectoryFileHeaderAsync(bool forceWrite, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (WriteCentralDirectoryFileHeaderInitialize(forceWrite, out Zip64ExtraField? zip64ExtraField, out uint compressedSizeTruncated, out uint uncompressedSizeTruncated, out ushort extraFieldLength, out uint offsetOfLocalHeaderTruncated)) + { + byte[] cdStaticHeader = new byte[ZipCentralDirectoryFileHeader.BlockConstantSectionSize]; + WriteCentralDirectoryFileHeaderPrepare(cdStaticHeader, compressedSizeTruncated, uncompressedSizeTruncated, extraFieldLength, offsetOfLocalHeaderTruncated); + + await _archive.ArchiveStream.WriteAsync(cdStaticHeader, cancellationToken).ConfigureAwait(false); + await _archive.ArchiveStream.WriteAsync(_storedEntryNameBytes, cancellationToken).ConfigureAwait(false); + + // only write zip64ExtraField if we decided we need it (it's not null) + if (zip64ExtraField != null) + { + await zip64ExtraField.WriteBlockAsync(_archive.ArchiveStream, cancellationToken).ConfigureAwait(false); + } + + // write extra fields (and any malformed trailing data). + await ZipGenericExtraField.WriteAllBlocksAsync(_cdUnknownExtraFields, _cdTrailingExtraFieldData ?? Array.Empty(), _archive.ArchiveStream, cancellationToken).ConfigureAwait(false); + + if (_fileComment.Length > 0) + { + await _archive.ArchiveStream.WriteAsync(_fileComment, cancellationToken).ConfigureAwait(false); + } + } + } + + internal async Task LoadLocalHeaderExtraFieldIfNeededAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + // we should have made this exact call in _archive.Init through ThrowIfOpenable + Debug.Assert(await GetIsOpenableAsync(false, true, cancellationToken).ConfigureAwait(false)); + + // load local header's extra fields. it will be null if we couldn't read for some reason + if (_originallyInArchive) + { + _archive.ArchiveStream.Seek(_offsetOfLocalHeader, SeekOrigin.Begin); + (_lhUnknownExtraFields, _lhTrailingExtraFieldData) = await ZipLocalFileHeader.GetExtraFieldsAsync(_archive.ArchiveStream, cancellationToken).ConfigureAwait(false); + } + } + + internal async Task LoadCompressedBytesIfNeededAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + // we should have made this exact call in _archive.Init through ThrowIfOpenable + Debug.Assert(await GetIsOpenableAsync(false, true, cancellationToken).ConfigureAwait(false)); + + if (!_everOpenedForWrite && _originallyInArchive) + { + _compressedBytes = LoadCompressedBytesIfNeededInitialize(out int maxSingleBufferSize); + + _archive.ArchiveStream.Seek(await GetOffsetOfCompressedDataAsync(cancellationToken).ConfigureAwait(false), SeekOrigin.Begin); + + for (int i = 0; i < _compressedBytes.Length - 1; i++) + { + await ZipHelper.ReadBytesAsync(_archive.ArchiveStream, _compressedBytes[i], maxSingleBufferSize, cancellationToken).ConfigureAwait(false); + } + await ZipHelper.ReadBytesAsync(_archive.ArchiveStream, _compressedBytes[_compressedBytes.Length - 1], (int)(_compressedSize % maxSingleBufferSize), cancellationToken).ConfigureAwait(false); + } + } + + private async Task GetIsOpenableAsync(bool needToUncompress, bool needToLoadIntoMemory, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + (bool result, _) = await IsOpenableAsync(needToUncompress, needToLoadIntoMemory, cancellationToken).ConfigureAwait(false); + return result; + } + + internal async Task ThrowIfNotOpenableAsync(bool needToUncompress, bool needToLoadIntoMemory, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + (bool openable, string? message) = await IsOpenableAsync(needToUncompress, needToLoadIntoMemory, cancellationToken).ConfigureAwait(false); + if (!openable) + throw new InvalidDataException(message); + } + + private async Task OpenInReadModeAsync(bool checkOpenable, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + if (checkOpenable) + await ThrowIfNotOpenableAsync(needToUncompress: true, needToLoadIntoMemory: false, cancellationToken).ConfigureAwait(false); + + return OpenInReadModeGetDataCompressor( + await GetOffsetOfCompressedDataAsync(cancellationToken).ConfigureAwait(false)); + } + + private async Task OpenInUpdateModeAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + if (_currentlyOpenForWrite) + throw new IOException(SR.UpdateModeOneStream); + + await ThrowIfNotOpenableAsync(needToUncompress: true, needToLoadIntoMemory: true, cancellationToken).ConfigureAwait(false); + + _everOpenedForWrite = true; + Changes |= ZipArchive.ChangeState.StoredData; + _currentlyOpenForWrite = true; + // always put it at the beginning for them + Stream uncompressedData = await GetUncompressedDataAsync(cancellationToken).ConfigureAwait(false); + uncompressedData.Seek(0, SeekOrigin.Begin); + return new WrappedStream(uncompressedData, this, thisRef => + { + // once they close, we know uncompressed length, but still not compressed length + // so we don't fill in any size information + // those fields get figured out when we call GetCompressor as we write it to + // the actual archive + thisRef!._currentlyOpenForWrite = false; + }); + } + + private async Task<(bool, string?)> IsOpenableAsync(bool needToUncompress, bool needToLoadIntoMemory, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + string? message = null; + + if (!_originallyInArchive) + { + return (true, message); + } + + if (!IsOpenableInitialVerifications(needToUncompress, out message)) + { + return (false, message); + } + if (!await ZipLocalFileHeader.TrySkipBlockAsync(_archive.ArchiveStream, cancellationToken).ConfigureAwait(false)) + { + message = SR.LocalFileHeaderCorrupt; + return (false, message); + } + + // when this property gets called, some duplicated work + long offsetOfCompressedData = await GetOffsetOfCompressedDataAsync(cancellationToken).ConfigureAwait(false); + if (!IsOpenableFinalVerifications(needToLoadIntoMemory, offsetOfCompressedData, out message)) + { + return (false, message); + } + + return (true, message); + } + + // return value is true if we allocated an extra field for 64 bit headers, un/compressed size + private async Task WriteLocalFileHeaderAsync(bool isEmptyFile, bool forceWrite, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (WriteLocalFileHeaderInitialize(isEmptyFile, forceWrite, out Zip64ExtraField? zip64ExtraField, out uint compressedSizeTruncated, out uint uncompressedSizeTruncated, out ushort extraFieldLength)) + { + byte[] lfStaticHeader = new byte[ZipLocalFileHeader.SizeOfLocalHeader]; + WriteLocalFileHeaderPrepare(lfStaticHeader, compressedSizeTruncated, uncompressedSizeTruncated, extraFieldLength); + + // write header + await _archive.ArchiveStream.WriteAsync(lfStaticHeader, cancellationToken).ConfigureAwait(false); + await _archive.ArchiveStream.WriteAsync(_storedEntryNameBytes, cancellationToken).ConfigureAwait(false); + + // Only when handling zip64 + if (zip64ExtraField != null) + { + await zip64ExtraField.WriteBlockAsync(_archive.ArchiveStream, cancellationToken).ConfigureAwait(false); + } + + await ZipGenericExtraField.WriteAllBlocksAsync(_lhUnknownExtraFields, _lhTrailingExtraFieldData ?? Array.Empty(), _archive.ArchiveStream, cancellationToken).ConfigureAwait(false); + } + + return zip64ExtraField != null; + } + + private async Task WriteLocalFileHeaderAndDataIfNeededAsync(bool forceWrite, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + // _storedUncompressedData gets frozen here, and is what gets written to the file + if (_storedUncompressedData != null || _compressedBytes != null) + { + if (_storedUncompressedData != null) + { + _uncompressedSize = _storedUncompressedData.Length; + + //The compressor fills in CRC and sizes + //The DirectToArchiveWriterStream writes headers and such + DirectToArchiveWriterStream entryWriter = new(GetDataCompressor(_archive.ArchiveStream, true, null), this); + await using (entryWriter) + { + _storedUncompressedData.Seek(0, SeekOrigin.Begin); + await _storedUncompressedData.CopyToAsync(entryWriter, cancellationToken).ConfigureAwait(false); + await _storedUncompressedData.DisposeAsync().ConfigureAwait(false); + _storedUncompressedData = null; + } + } + else + { + if (_uncompressedSize == 0) + { + // reset size to ensure proper central directory size header + _compressedSize = 0; + } + + await WriteLocalFileHeaderAsync(isEmptyFile: _uncompressedSize == 0, forceWrite: true, cancellationToken).ConfigureAwait(false); + + // according to ZIP specs, zero-byte files MUST NOT include file data + if (_uncompressedSize != 0) + { + Debug.Assert(_compressedBytes != null); + foreach (byte[] compressedBytes in _compressedBytes) + { + await _archive.ArchiveStream.WriteAsync(compressedBytes, cancellationToken).ConfigureAwait(false); + } + } + } + } + else // there is no data in the file (or the data in the file has not been loaded), but if we are in update mode, we may still need to write a header + { + if (_archive.Mode == ZipArchiveMode.Update || !_everOpenedForWrite) + { + _everOpenedForWrite = true; + await WriteLocalFileHeaderAsync(isEmptyFile: _uncompressedSize == 0, forceWrite: forceWrite, cancellationToken).ConfigureAwait(false); + + // If we know that we need to update the file header (but don't need to load and update the data itself) + // then advance the position past it. + if (_compressedSize != 0) + { + _archive.ArchiveStream.Seek(_compressedSize, SeekOrigin.Current); + } + } + } + } + + // Using _offsetOfLocalHeader, seeks back to where CRC and sizes should be in the header, + // writes them, then seeks back to where you started + // Assumes that the stream is currently at the end of the data + private async Task WriteCrcAndSizesInLocalHeaderAsync(bool zip64HeaderUsed, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + // Buffer has been sized to the largest data payload required: the 64-bit data descriptor. + byte[] writeBuffer = new byte[Zip64DataDescriptorCrcAndSizesBufferLength]; + + WriteCrcAndSizesInLocalHeaderInitialize(zip64HeaderUsed, out long finalPosition, out bool pretendStreaming, out uint compressedSizeTruncated, out uint uncompressedSizeTruncated); + + // first step is, if we need zip64, but didn't allocate it, pretend we did a stream write, because + // we can't go back and give ourselves the space that the extra field needs. + // we do this by setting the correct property in the bit flag to indicate we have a data descriptor + // and setting the version to Zip64 to indicate that descriptor contains 64-bit values + if (pretendStreaming) + { + WriteCrcAndSizesInLocalHeaderPrepareForZip64PretendStreaming(writeBuffer); + await _archive.ArchiveStream.WriteAsync(writeBuffer.AsMemory(0, MetadataBufferLength), cancellationToken).ConfigureAwait(false); + } + + // next step is fill out the 32-bit size values in the normal header. we can't assume that + // they are correct. we also write the CRC + WriteCrcAndSizesInLocalHeaderPrepareFor32bitValuesWriting(pretendStreaming, writeBuffer, compressedSizeTruncated, uncompressedSizeTruncated); + await _archive.ArchiveStream.WriteAsync(writeBuffer.AsMemory(0, CrcAndSizesBufferLength), cancellationToken).ConfigureAwait(false); + + // next step: if we wrote the 64 bit header initially, a different implementation might + // try to read it, even if the 32-bit size values aren't masked. thus, we should always put the + // correct size information in there. note that order of uncomp/comp is switched, and these are + // 64-bit values + // also, note that in order for this to be correct, we have to ensure that the zip64 extra field + // is always the first extra field that is written + if (zip64HeaderUsed) + { + WriteCrcAndSizesInLocalHeaderPrepareForWritingWhenZip64HeaderUsed(writeBuffer); + await _archive.ArchiveStream.WriteAsync(writeBuffer.AsMemory(0, Zip64SizesBufferLength), cancellationToken).ConfigureAwait(false); + } + + // now go to the where we were. assume that this is the end of the data + _archive.ArchiveStream.Seek(finalPosition, SeekOrigin.Begin); + + // if we are pretending we did a stream write, we want to write the data descriptor out + // the data descriptor can have 32-bit sizes or 64-bit sizes. In this case, we always use + // 64-bit sizes + if (pretendStreaming) + { + WriteCrcAndSizesInLocalHeaderPrepareForWritingDataDescriptor(writeBuffer); + await _archive.ArchiveStream.WriteAsync(writeBuffer.AsMemory(0, Zip64DataDescriptorCrcAndSizesBufferLength), cancellationToken).ConfigureAwait(false); + } + } + + private ValueTask WriteDataDescriptorAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + byte[] dataDescriptor = new byte[MaxSizeOfDataDescriptor]; + int bytesToWrite = PrepareToWriteDataDescriptor(dataDescriptor); + return _archive.ArchiveStream.WriteAsync(dataDescriptor.AsMemory(0, bytesToWrite), cancellationToken); + } + + private async Task UnloadStreamsAsync() + { + if (_storedUncompressedData != null) + { + await _storedUncompressedData.DisposeAsync().ConfigureAwait(false); + } + _compressedBytes = null; + _outstandingWriteStream = null; + } + + private async Task CloseStreamsAsync() + { + // if the user left the stream open, close the underlying stream for them + if (_outstandingWriteStream != null) + { + await _outstandingWriteStream.DisposeAsync().ConfigureAwait(false); + } + } +} diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchiveEntry.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchiveEntry.cs index ddd12b7b33215e..a7bdf84c4e23a7 100644 --- a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchiveEntry.cs +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipArchiveEntry.cs @@ -5,7 +5,6 @@ using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; -using System.Runtime.InteropServices; using System.Text; using System.Threading; using System.Threading.Tasks; @@ -391,68 +390,62 @@ private string DecodeEntryString(byte[] entryStringBytes) internal bool EverOpenedForWrite => _everOpenedForWrite; - internal long OffsetOfCompressedData + internal long GetOffsetOfCompressedData() { - get + if (_storedOffsetOfCompressedData == null) { - if (_storedOffsetOfCompressedData == null) - { - _archive.ArchiveStream.Seek(_offsetOfLocalHeader, SeekOrigin.Begin); - // by calling this, we are using local header _storedEntryNameBytes.Length and extraFieldLength - // to find start of data, but still using central directory size information - if (!ZipLocalFileHeader.TrySkipBlock(_archive.ArchiveStream)) - throw new InvalidDataException(SR.LocalFileHeaderCorrupt); - _storedOffsetOfCompressedData = _archive.ArchiveStream.Position; - } - return _storedOffsetOfCompressedData.Value; + _archive.ArchiveStream.Seek(_offsetOfLocalHeader, SeekOrigin.Begin); + // by calling this, we are using local header _storedEntryNameBytes.Length and extraFieldLength + // to find start of data, but still using central directory size information + if (!ZipLocalFileHeader.TrySkipBlock(_archive.ArchiveStream)) + throw new InvalidDataException(SR.LocalFileHeaderCorrupt); + _storedOffsetOfCompressedData = _archive.ArchiveStream.Position; } + return _storedOffsetOfCompressedData.Value; } - private MemoryStream UncompressedData + private MemoryStream GetUncompressedData() { - get + if (_storedUncompressedData == null) { - if (_storedUncompressedData == null) - { - // this means we have never opened it before + // this means we have never opened it before - // if _uncompressedSize > int.MaxValue, it's still okay, because MemoryStream will just - // grow as data is copied into it - _storedUncompressedData = new MemoryStream((int)_uncompressedSize); + // if _uncompressedSize > int.MaxValue, it's still okay, because MemoryStream will just + // grow as data is copied into it + _storedUncompressedData = new MemoryStream((int)_uncompressedSize); - if (_originallyInArchive) + if (_originallyInArchive) + { + using (Stream decompressor = OpenInReadMode(false)) { - using (Stream decompressor = OpenInReadMode(false)) + try { - try - { - decompressor.CopyTo(_storedUncompressedData); - } - catch (InvalidDataException) - { - // this is the case where the archive say the entry is deflate, but deflateStream - // throws an InvalidDataException. This property should only be getting accessed in - // Update mode, so we want to make sure _storedUncompressedData stays null so - // that later when we dispose the archive, this entry loads the compressedBytes, and - // copies them straight over - _storedUncompressedData.Dispose(); - _storedUncompressedData = null; - _currentlyOpenForWrite = false; - _everOpenedForWrite = false; - throw; - } + decompressor.CopyTo(_storedUncompressedData); + } + catch (InvalidDataException) + { + // this is the case where the archive say the entry is deflate, but deflateStream + // throws an InvalidDataException. This property should only be getting accessed in + // Update mode, so we want to make sure _storedUncompressedData stays null so + // that later when we dispose the archive, this entry loads the compressedBytes, and + // copies them straight over + _storedUncompressedData.Dispose(); + _storedUncompressedData = null; + _currentlyOpenForWrite = false; + _everOpenedForWrite = false; + throw; } - } - - // if they start modifying it and the compression method is not "store", we should make sure it will get deflated - if (CompressionMethod != CompressionMethodValues.Stored) - { - CompressionMethod = CompressionMethodValues.Deflate; } } - return _storedUncompressedData; + // if they start modifying it and the compression method is not "store", we should make sure it will get deflated + if (CompressionMethod != CompressionMethodValues.Stored) + { + CompressionMethod = CompressionMethodValues.Deflate; + } } + + return _storedUncompressedData; } private CompressionMethodValues CompressionMethod @@ -467,7 +460,6 @@ private CompressionMethodValues CompressionMethod _storedCompressionMethod = value; } } - // does almost everything you need to do to forget about this entry // writes the local header/data, gets rid of all the data, // closes all of the streams except for the very outermost one that @@ -484,8 +476,7 @@ internal void WriteAndFinishLocalEntry(bool forceWrite) UnloadStreams(); } - // should only throw an exception in extremely exceptional cases because it is called from dispose - internal void WriteCentralDirectoryFileHeader(bool forceWrite) + private bool WriteCentralDirectoryFileHeaderInitialize(bool forceWrite, out Zip64ExtraField? zip64ExtraField, out uint compressedSizeTruncated, out uint uncompressedSizeTruncated, out ushort extraFieldLength, out uint offsetOfLocalHeaderTruncated) { // This part is simple, because we should definitely know the sizes by this time @@ -497,8 +488,7 @@ internal void WriteCentralDirectoryFileHeader(bool forceWrite) Debug.Assert(_fileComment.Length <= ushort.MaxValue); // decide if we need the Zip64 extra field: - Zip64ExtraField? zip64ExtraField = null; - uint compressedSizeTruncated, uncompressedSizeTruncated, offsetOfLocalHeaderTruncated; + zip64ExtraField = null; if (AreSizesTooLarge #if DEBUG_FORCE_ZIP64 @@ -552,7 +542,7 @@ internal void WriteCentralDirectoryFileHeader(bool forceWrite) int currExtraFieldDataLength = ZipGenericExtraField.TotalSize(_cdUnknownExtraFields, _cdTrailingExtraFieldData?.Length ?? 0); int bigExtraFieldLength = (zip64ExtraField != null ? zip64ExtraField.TotalSize : 0) + currExtraFieldDataLength; - ushort extraFieldLength; + if (bigExtraFieldLength > ushort.MaxValue) { extraFieldLength = (ushort)(zip64ExtraField != null ? zip64ExtraField.TotalSize : 0); @@ -572,46 +562,60 @@ internal void WriteCentralDirectoryFileHeader(bool forceWrite) + _fileComment.Length; _archive.ArchiveStream.Seek(centralDirectoryHeaderLength, SeekOrigin.Current); + + return false; } - else + + return true; + } + + private void WriteCentralDirectoryFileHeaderPrepare(Span cdStaticHeader, uint compressedSizeTruncated, uint uncompressedSizeTruncated, ushort extraFieldLength, uint offsetOfLocalHeaderTruncated) + { + // The central directory file header begins with the below constant-length structure: + // Central directory file header signature (4 bytes) + // Version made by Specification (version) (1 byte) + // Version made by Compatibility (type) (1 byte) + // Minimum version needed to extract (2 bytes) + // General Purpose bit flag (2 bytes) + // The Compression method (2 bytes) + // File last modification time and date (4 bytes) + // CRC-32 (4 bytes) + // Compressed Size (4 bytes) + // Uncompressed Size (4 bytes) + // File Name Length (2 bytes) + // Extra Field Length (2 bytes) + // File Comment Length (2 bytes) + // Start Disk Number (2 bytes) + // Internal File Attributes (2 bytes) + // External File Attributes (4 bytes) + // Offset Of Local Header (4 bytes) + + ZipCentralDirectoryFileHeader.SignatureConstantBytes.CopyTo(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.Signature..]); + cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.VersionMadeBySpecification] = (byte)_versionMadeBySpecification; + cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.VersionMadeByCompatibility] = (byte)CurrentZipPlatform; + BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.VersionNeededToExtract..], (ushort)_versionToExtract); + BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.GeneralPurposeBitFlags..], (ushort)_generalPurposeBitFlag); + BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.CompressionMethod..], (ushort)CompressionMethod); + BinaryPrimitives.WriteUInt32LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.LastModified..], ZipHelper.DateTimeToDosTime(_lastModified.DateTime)); + BinaryPrimitives.WriteUInt32LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.Crc32..], _crc32); + BinaryPrimitives.WriteUInt32LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.CompressedSize..], compressedSizeTruncated); + BinaryPrimitives.WriteUInt32LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.UncompressedSize..], uncompressedSizeTruncated); + BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.FilenameLength..], (ushort)_storedEntryNameBytes.Length); + BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.ExtraFieldLength..], extraFieldLength); + BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.FileCommentLength..], (ushort)_fileComment.Length); + BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.DiskNumberStart..], 0); + BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.InternalFileAttributes..], 0); + BinaryPrimitives.WriteUInt32LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.ExternalFileAttributes..], _externalFileAttr); + BinaryPrimitives.WriteUInt32LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.RelativeOffsetOfLocalHeader..], offsetOfLocalHeaderTruncated); + } + + // should only throw an exception in extremely exceptional cases because it is called from dispose + internal void WriteCentralDirectoryFileHeader(bool forceWrite) + { + if (WriteCentralDirectoryFileHeaderInitialize(forceWrite, out Zip64ExtraField? zip64ExtraField, out uint compressedSizeTruncated, out uint uncompressedSizeTruncated, out ushort extraFieldLength, out uint offsetOfLocalHeaderTruncated)) { - // The central directory file header begins with the below constant-length structure: - // Central directory file header signature (4 bytes) - // Version made by Specification (version) (1 byte) - // Version made by Compatibility (type) (1 byte) - // Minimum version needed to extract (2 bytes) - // General Purpose bit flag (2 bytes) - // The Compression method (2 bytes) - // File last modification time and date (4 bytes) - // CRC-32 (4 bytes) - // Compressed Size (4 bytes) - // Uncompressed Size (4 bytes) - // File Name Length (2 bytes) - // Extra Field Length (2 bytes) - // File Comment Length (2 bytes) - // Start Disk Number (2 bytes) - // Internal File Attributes (2 bytes) - // External File Attributes (4 bytes) - // Offset Of Local Header (4 bytes) Span cdStaticHeader = stackalloc byte[ZipCentralDirectoryFileHeader.BlockConstantSectionSize]; - - ZipCentralDirectoryFileHeader.SignatureConstantBytes.CopyTo(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.Signature..]); - cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.VersionMadeBySpecification] = (byte)_versionMadeBySpecification; - cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.VersionMadeByCompatibility] = (byte)CurrentZipPlatform; - BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.VersionNeededToExtract..], (ushort)_versionToExtract); - BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.GeneralPurposeBitFlags..], (ushort)_generalPurposeBitFlag); - BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.CompressionMethod..], (ushort)CompressionMethod); - BinaryPrimitives.WriteUInt32LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.LastModified..], ZipHelper.DateTimeToDosTime(_lastModified.DateTime)); - BinaryPrimitives.WriteUInt32LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.Crc32..], _crc32); - BinaryPrimitives.WriteUInt32LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.CompressedSize..], compressedSizeTruncated); - BinaryPrimitives.WriteUInt32LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.UncompressedSize..], uncompressedSizeTruncated); - BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.FilenameLength..], (ushort)_storedEntryNameBytes.Length); - BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.ExtraFieldLength..], extraFieldLength); - BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.FileCommentLength..], (ushort)_fileComment.Length); - BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.DiskNumberStart..], 0); - BinaryPrimitives.WriteUInt16LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.InternalFileAttributes..], 0); - BinaryPrimitives.WriteUInt32LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.ExternalFileAttributes..], _externalFileAttr); - BinaryPrimitives.WriteUInt32LittleEndian(cdStaticHeader[ZipCentralDirectoryFileHeader.FieldLocations.RelativeOffsetOfLocalHeader..], offsetOfLocalHeaderTruncated); + WriteCentralDirectoryFileHeaderPrepare(cdStaticHeader, compressedSizeTruncated, uncompressedSizeTruncated, extraFieldLength, offsetOfLocalHeaderTruncated); _archive.ArchiveStream.Write(cdStaticHeader); _archive.ArchiveStream.Write(_storedEntryNameBytes); @@ -644,6 +648,21 @@ internal void LoadLocalHeaderExtraFieldIfNeeded() } } + private byte[][] LoadCompressedBytesIfNeededInitialize(out int maxSingleBufferSize) + { + // we know that it is openable at this point + maxSingleBufferSize = Array.MaxLength; + + byte[][] compressedBytes = new byte[(_compressedSize / maxSingleBufferSize) + 1][]; + for (int i = 0; i < compressedBytes.Length - 1; i++) + { + compressedBytes[i] = new byte[maxSingleBufferSize]; + } + compressedBytes[compressedBytes.Length - 1] = new byte[_compressedSize % maxSingleBufferSize]; + + return compressedBytes; + } + // throws exception if fails, will get called on every relevant entry before closing in update mode // can throw InvalidDataException internal void LoadCompressedBytesIfNeeded() @@ -653,23 +672,15 @@ internal void LoadCompressedBytesIfNeeded() if (!_everOpenedForWrite && _originallyInArchive) { - // we know that it is openable at this point - int MaxSingleBufferSize = Array.MaxLength; - - _compressedBytes = new byte[(_compressedSize / MaxSingleBufferSize) + 1][]; - for (int i = 0; i < _compressedBytes.Length - 1; i++) - { - _compressedBytes[i] = new byte[MaxSingleBufferSize]; - } - _compressedBytes[_compressedBytes.Length - 1] = new byte[_compressedSize % MaxSingleBufferSize]; + _compressedBytes = LoadCompressedBytesIfNeededInitialize(out int maxSingleBufferSize); - _archive.ArchiveStream.Seek(OffsetOfCompressedData, SeekOrigin.Begin); + _archive.ArchiveStream.Seek(GetOffsetOfCompressedData(), SeekOrigin.Begin); for (int i = 0; i < _compressedBytes.Length - 1; i++) { - ZipHelper.ReadBytes(_archive.ArchiveStream, _compressedBytes[i], MaxSingleBufferSize); + ZipHelper.ReadBytes(_archive.ArchiveStream, _compressedBytes[i], maxSingleBufferSize); } - ZipHelper.ReadBytes(_archive.ArchiveStream, _compressedBytes[_compressedBytes.Length - 1], (int)(_compressedSize % MaxSingleBufferSize)); + ZipHelper.ReadBytes(_archive.ArchiveStream, _compressedBytes[_compressedBytes.Length - 1], (int)(_compressedSize % maxSingleBufferSize)); } } @@ -758,8 +769,12 @@ private Stream OpenInReadMode(bool checkOpenable) { if (checkOpenable) ThrowIfNotOpenable(needToUncompress: true, needToLoadIntoMemory: false); + return OpenInReadModeGetDataCompressor(GetOffsetOfCompressedData()); + } - Stream compressedStream = new SubReadStream(_archive.ArchiveStream, OffsetOfCompressedData, _compressedSize); + private Stream OpenInReadModeGetDataCompressor(long offsetOfCompressedData) + { + Stream compressedStream = new SubReadStream(_archive.ArchiveStream, offsetOfCompressedData, _compressedSize); return GetDataDecompressor(compressedStream); } @@ -796,8 +811,9 @@ private WrappedStream OpenInUpdateMode() Changes |= ZipArchive.ChangeState.StoredData; _currentlyOpenForWrite = true; // always put it at the beginning for them - UncompressedData.Seek(0, SeekOrigin.Begin); - return new WrappedStream(UncompressedData, this, thisRef => + Stream uncompressedData = GetUncompressedData(); + uncompressedData.Seek(0, SeekOrigin.Begin); + return new WrappedStream(uncompressedData, this, thisRef => { // once they close, we know uncompressed length, but still not compressed length // so we don't fill in any size information @@ -813,60 +829,81 @@ private bool IsOpenable(bool needToUncompress, bool needToLoadIntoMemory, out st if (_originallyInArchive) { - if (needToUncompress) - { - if (CompressionMethod != CompressionMethodValues.Stored && - CompressionMethod != CompressionMethodValues.Deflate && - CompressionMethod != CompressionMethodValues.Deflate64) - { - switch (CompressionMethod) - { - case CompressionMethodValues.BZip2: - case CompressionMethodValues.LZMA: - message = SR.Format(SR.UnsupportedCompressionMethod, CompressionMethod.ToString()); - break; - default: - message = SR.UnsupportedCompression; - break; - } - return false; - } - } - if (_diskNumberStart != _archive.NumberOfThisDisk) + if (!IsOpenableInitialVerifications(needToUncompress, out message)) { - message = SR.SplitSpanned; return false; } - if (_offsetOfLocalHeader > _archive.ArchiveStream.Length) + if (!ZipLocalFileHeader.TrySkipBlock(_archive.ArchiveStream)) { message = SR.LocalFileHeaderCorrupt; return false; } - _archive.ArchiveStream.Seek(_offsetOfLocalHeader, SeekOrigin.Begin); - if (!ZipLocalFileHeader.TrySkipBlock(_archive.ArchiveStream)) + + // when this property gets called, some duplicated work + long offsetOfCompressedData = GetOffsetOfCompressedData(); + if (!IsOpenableFinalVerifications(needToLoadIntoMemory, offsetOfCompressedData, out message)) { - message = SR.LocalFileHeaderCorrupt; return false; } - // when this property gets called, some duplicated work - if (OffsetOfCompressedData + _compressedSize > _archive.ArchiveStream.Length) + + return true; + } + + return true; + } + + private bool IsOpenableInitialVerifications(bool needToUncompress, out string? message) + { + message = null; + if (needToUncompress) + { + if (CompressionMethod != CompressionMethodValues.Stored && + CompressionMethod != CompressionMethodValues.Deflate && + CompressionMethod != CompressionMethodValues.Deflate64) { - message = SR.LocalFileHeaderCorrupt; + message = CompressionMethod switch + { + CompressionMethodValues.BZip2 or CompressionMethodValues.LZMA => SR.Format(SR.UnsupportedCompressionMethod, CompressionMethod.ToString()), + _ => SR.UnsupportedCompression, + }; return false; } - // This limitation originally existed because a) it is unreasonable to load > 4GB into memory - // but also because the stream reading functions make it hard. This has been updated to handle - // this scenario in a 64-bit process using multiple buffers, delivered first as an OOB for - // compatibility. - if (needToLoadIntoMemory) + } + if (_diskNumberStart != _archive.NumberOfThisDisk) + { + message = SR.SplitSpanned; + return false; + } + if (_offsetOfLocalHeader > _archive.ArchiveStream.Length) + { + message = SR.LocalFileHeaderCorrupt; + return false; + } + + _archive.ArchiveStream.Seek(_offsetOfLocalHeader, SeekOrigin.Begin); + return true; + } + + private bool IsOpenableFinalVerifications(bool needToLoadIntoMemory, long offsetOfCompressedData, out string? message) + { + message = null; + if (offsetOfCompressedData + _compressedSize > _archive.ArchiveStream.Length) + { + message = SR.LocalFileHeaderCorrupt; + return false; + } + // This limitation originally existed because a) it is unreasonable to load > 4GB into memory + // but also because the stream reading functions make it hard. This has been updated to handle + // this scenario in a 64-bit process using multiple buffers, delivered first as an OOB for + // compatibility. + if (needToLoadIntoMemory) + { + if (_compressedSize > int.MaxValue) { - if (_compressedSize > int.MaxValue) + if (!s_allowLargeZipArchiveEntriesInUpdateMode) { - if (!s_allowLargeZipArchiveEntriesInUpdateMode) - { - message = SR.EntryTooLarge; - return false; - } + message = SR.EntryTooLarge; + return false; } } } @@ -920,18 +957,14 @@ private static BitFlagValues MapDeflateCompressionOption(BitFlagValues generalPu private bool ShouldUseZIP64 => AreSizesTooLarge || IsOffsetTooLarge; - // return value is true if we allocated an extra field for 64 bit headers, un/compressed size - private bool WriteLocalFileHeader(bool isEmptyFile, bool forceWrite) + private bool WriteLocalFileHeaderInitialize(bool isEmptyFile, bool forceWrite, out Zip64ExtraField? zip64ExtraField, out uint compressedSizeTruncated, out uint uncompressedSizeTruncated, out ushort extraFieldLength) { - Span lfStaticHeader = stackalloc byte[ZipLocalFileHeader.SizeOfLocalHeader]; - // _entryname only gets set when we read in or call moveTo. MoveTo does a check, and // reading in should not be able to produce an entryname longer than ushort.MaxValue Debug.Assert(_storedEntryNameBytes.Length <= ushort.MaxValue); // decide if we need the Zip64 extra field: - Zip64ExtraField? zip64ExtraField = null; - uint compressedSizeTruncated, uncompressedSizeTruncated; + zip64ExtraField = null; // save offset _offsetOfLocalHeader = _archive.ArchiveStream.Position; @@ -995,7 +1028,7 @@ private bool WriteLocalFileHeader(bool isEmptyFile, bool forceWrite) int currExtraFieldDataLength = ZipGenericExtraField.TotalSize(_lhUnknownExtraFields, _lhTrailingExtraFieldData?.Length ?? 0); int bigExtraFieldLength = (zip64ExtraField != null ? zip64ExtraField.TotalSize : 0) + currExtraFieldDataLength; - ushort extraFieldLength; + if (bigExtraFieldLength > ushort.MaxValue) { extraFieldLength = (ushort)(zip64ExtraField != null ? zip64ExtraField.TotalSize : 0); @@ -1018,23 +1051,37 @@ private bool WriteLocalFileHeader(bool isEmptyFile, bool forceWrite) } _archive.ArchiveStream.Seek(currExtraFieldDataLength, SeekOrigin.Current); + + return false; } - else + + return true; + } + + private void WriteLocalFileHeaderPrepare(Span lfStaticHeader, uint compressedSizeTruncated, uint uncompressedSizeTruncated, ushort extraFieldLength) + { + ZipLocalFileHeader.SignatureConstantBytes.CopyTo(lfStaticHeader[ZipLocalFileHeader.FieldLocations.Signature..]); + BinaryPrimitives.WriteUInt16LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.VersionNeededToExtract..], (ushort)_versionToExtract); + BinaryPrimitives.WriteUInt16LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.GeneralPurposeBitFlags..], (ushort)_generalPurposeBitFlag); + BinaryPrimitives.WriteUInt16LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.CompressionMethod..], (ushort)CompressionMethod); + BinaryPrimitives.WriteUInt32LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.LastModified..], ZipHelper.DateTimeToDosTime(_lastModified.DateTime)); + BinaryPrimitives.WriteUInt32LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.Crc32..], _crc32); + BinaryPrimitives.WriteUInt32LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.CompressedSize..], compressedSizeTruncated); + BinaryPrimitives.WriteUInt32LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.UncompressedSize..], uncompressedSizeTruncated); + BinaryPrimitives.WriteUInt16LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.FilenameLength..], (ushort)_storedEntryNameBytes.Length); + BinaryPrimitives.WriteUInt16LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.ExtraFieldLength..], extraFieldLength); + } + + // return value is true if we allocated an extra field for 64 bit headers, un/compressed size + private bool WriteLocalFileHeader(bool isEmptyFile, bool forceWrite) + { + if (WriteLocalFileHeaderInitialize(isEmptyFile, forceWrite, out Zip64ExtraField? zip64ExtraField, out uint compressedSizeTruncated, out uint uncompressedSizeTruncated, out ushort extraFieldLength)) { - ZipLocalFileHeader.SignatureConstantBytes.CopyTo(lfStaticHeader[ZipLocalFileHeader.FieldLocations.Signature..]); - BinaryPrimitives.WriteUInt16LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.VersionNeededToExtract..], (ushort)_versionToExtract); - BinaryPrimitives.WriteUInt16LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.GeneralPurposeBitFlags..], (ushort)_generalPurposeBitFlag); - BinaryPrimitives.WriteUInt16LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.CompressionMethod..], (ushort)CompressionMethod); - BinaryPrimitives.WriteUInt32LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.LastModified..], ZipHelper.DateTimeToDosTime(_lastModified.DateTime)); - BinaryPrimitives.WriteUInt32LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.Crc32..], _crc32); - BinaryPrimitives.WriteUInt32LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.CompressedSize..], compressedSizeTruncated); - BinaryPrimitives.WriteUInt32LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.UncompressedSize..], uncompressedSizeTruncated); - BinaryPrimitives.WriteUInt16LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.FilenameLength..], (ushort)_storedEntryNameBytes.Length); - BinaryPrimitives.WriteUInt16LittleEndian(lfStaticHeader[ZipLocalFileHeader.FieldLocations.ExtraFieldLength..], extraFieldLength); + Span lfStaticHeader = stackalloc byte[ZipLocalFileHeader.SizeOfLocalHeader]; + WriteLocalFileHeaderPrepare(lfStaticHeader, compressedSizeTruncated, uncompressedSizeTruncated, extraFieldLength); // write header _archive.ArchiveStream.Write(lfStaticHeader); - _archive.ArchiveStream.Write(_storedEntryNameBytes); // Only when handling zip64 @@ -1057,7 +1104,7 @@ private void WriteLocalFileHeaderAndDataIfNeeded(bool forceWrite) //The compressor fills in CRC and sizes //The DirectToArchiveWriterStream writes headers and such - using (Stream entryWriter = new DirectToArchiveWriterStream( + using (DirectToArchiveWriterStream entryWriter = new( GetDataCompressor(_archive.ArchiveStream, true, null), this)) { @@ -1105,31 +1152,21 @@ private void WriteLocalFileHeaderAndDataIfNeeded(bool forceWrite) } } + private const int MetadataBufferLength = ZipLocalFileHeader.FieldLengths.VersionNeededToExtract + ZipLocalFileHeader.FieldLengths.GeneralPurposeBitFlags; + private const int CrcAndSizesBufferLength = ZipLocalFileHeader.FieldLengths.Crc32 + ZipLocalFileHeader.FieldLengths.CompressedSize + ZipLocalFileHeader.FieldLengths.UncompressedSize; + private const int Zip64SizesBufferLength = Zip64ExtraField.FieldLengths.UncompressedSize + Zip64ExtraField.FieldLengths.CompressedSize; + private const int Zip64DataDescriptorCrcAndSizesBufferLength = ZipLocalFileHeader.Zip64DataDescriptor.FieldLengths.Crc32 + + ZipLocalFileHeader.Zip64DataDescriptor.FieldLengths.CompressedSize + ZipLocalFileHeader.Zip64DataDescriptor.FieldLengths.UncompressedSize; + // Using _offsetOfLocalHeader, seeks back to where CRC and sizes should be in the header, // writes them, then seeks back to where you started // Assumes that the stream is currently at the end of the data private void WriteCrcAndSizesInLocalHeader(bool zip64HeaderUsed) { - const int MetadataBufferLength = ZipLocalFileHeader.FieldLengths.VersionNeededToExtract + ZipLocalFileHeader.FieldLengths.GeneralPurposeBitFlags; - const int CrcAndSizesBufferLength = ZipLocalFileHeader.FieldLengths.Crc32 + ZipLocalFileHeader.FieldLengths.CompressedSize + ZipLocalFileHeader.FieldLengths.UncompressedSize; - const int Zip64SizesBufferLength = Zip64ExtraField.FieldLengths.UncompressedSize + Zip64ExtraField.FieldLengths.CompressedSize; - const int Zip64DataDescriptorCrcAndSizesBufferLength = ZipLocalFileHeader.Zip64DataDescriptor.FieldLengths.Crc32 - + ZipLocalFileHeader.Zip64DataDescriptor.FieldLengths.CompressedSize + ZipLocalFileHeader.Zip64DataDescriptor.FieldLengths.UncompressedSize; - - long finalPosition = _archive.ArchiveStream.Position; // Buffer has been sized to the largest data payload required: the 64-bit data descriptor. Span writeBuffer = stackalloc byte[Zip64DataDescriptorCrcAndSizesBufferLength]; - bool zip64Needed = ShouldUseZIP64 -#if DEBUG_FORCE_ZIP64 - || _archive._forceZip64 -#endif - ; - - bool pretendStreaming = zip64Needed && !zip64HeaderUsed; - - uint compressedSizeTruncated = zip64Needed ? ZipHelper.Mask32Bit : (uint)_compressedSize; - uint uncompressedSizeTruncated = zip64Needed ? ZipHelper.Mask32Bit : (uint)_uncompressedSize; + WriteCrcAndSizesInLocalHeaderInitialize(zip64HeaderUsed, out long finalPosition, out bool pretendStreaming, out uint compressedSizeTruncated, out uint uncompressedSizeTruncated); // first step is, if we need zip64, but didn't allocate it, pretend we did a stream write, because // we can't go back and give ourselves the space that the extra field needs. @@ -1137,38 +1174,13 @@ private void WriteCrcAndSizesInLocalHeader(bool zip64HeaderUsed) // and setting the version to Zip64 to indicate that descriptor contains 64-bit values if (pretendStreaming) { - int relativeVersionToExtractLocation = ZipLocalFileHeader.FieldLocations.VersionNeededToExtract - ZipLocalFileHeader.FieldLocations.VersionNeededToExtract; - int relativeGeneralPurposeBitFlagsLocation = ZipLocalFileHeader.FieldLocations.GeneralPurposeBitFlags - ZipLocalFileHeader.FieldLocations.VersionNeededToExtract; - - VersionToExtractAtLeast(ZipVersionNeededValues.Zip64); - _generalPurposeBitFlag |= BitFlagValues.DataDescriptor; - - _archive.ArchiveStream.Seek(_offsetOfLocalHeader + ZipLocalFileHeader.FieldLocations.VersionNeededToExtract, - SeekOrigin.Begin); - BinaryPrimitives.WriteUInt16LittleEndian(writeBuffer[relativeVersionToExtractLocation..], (ushort)_versionToExtract); - BinaryPrimitives.WriteUInt16LittleEndian(writeBuffer[relativeGeneralPurposeBitFlagsLocation..], (ushort)_generalPurposeBitFlag); - + WriteCrcAndSizesInLocalHeaderPrepareForZip64PretendStreaming(writeBuffer); _archive.ArchiveStream.Write(writeBuffer[..MetadataBufferLength]); } // next step is fill out the 32-bit size values in the normal header. we can't assume that // they are correct. we also write the CRC - _archive.ArchiveStream.Seek(_offsetOfLocalHeader + ZipLocalFileHeader.FieldLocations.Crc32, - SeekOrigin.Begin); - if (!pretendStreaming) - { - int relativeCrc32Location = ZipLocalFileHeader.FieldLocations.Crc32 - ZipLocalFileHeader.FieldLocations.Crc32; - int relativeCompressedSizeLocation = ZipLocalFileHeader.FieldLocations.CompressedSize - ZipLocalFileHeader.FieldLocations.Crc32; - int relativeUncompressedSizeLocation = ZipLocalFileHeader.FieldLocations.UncompressedSize - ZipLocalFileHeader.FieldLocations.Crc32; - - BinaryPrimitives.WriteUInt32LittleEndian(writeBuffer[relativeCrc32Location..], _crc32); - BinaryPrimitives.WriteUInt32LittleEndian(writeBuffer[relativeCompressedSizeLocation..], compressedSizeTruncated); - BinaryPrimitives.WriteUInt32LittleEndian(writeBuffer[relativeUncompressedSizeLocation..], uncompressedSizeTruncated); - } - else // but if we are pretending to stream, we want to fill in with zeroes - { - writeBuffer[..CrcAndSizesBufferLength].Clear(); - } + WriteCrcAndSizesInLocalHeaderPrepareFor32bitValuesWriting(pretendStreaming, writeBuffer, compressedSizeTruncated, uncompressedSizeTruncated); _archive.ArchiveStream.Write(writeBuffer[..CrcAndSizesBufferLength]); // next step: if we wrote the 64 bit header initially, a different implementation might @@ -1179,15 +1191,7 @@ private void WriteCrcAndSizesInLocalHeader(bool zip64HeaderUsed) // is always the first extra field that is written if (zip64HeaderUsed) { - int relativeUncompressedSizeLocation = Zip64ExtraField.FieldLocations.UncompressedSize - Zip64ExtraField.FieldLocations.UncompressedSize; - int relativeCompressedSizeLocation = Zip64ExtraField.FieldLocations.CompressedSize - Zip64ExtraField.FieldLocations.UncompressedSize; - - _archive.ArchiveStream.Seek(_offsetOfLocalHeader + ZipLocalFileHeader.SizeOfLocalHeader - + _storedEntryNameBytes.Length + Zip64ExtraField.OffsetToFirstField, - SeekOrigin.Begin); - BinaryPrimitives.WriteInt64LittleEndian(writeBuffer[relativeUncompressedSizeLocation..], _uncompressedSize); - BinaryPrimitives.WriteInt64LittleEndian(writeBuffer[relativeCompressedSizeLocation..], _compressedSize); - + WriteCrcAndSizesInLocalHeaderPrepareForWritingWhenZip64HeaderUsed(writeBuffer); _archive.ArchiveStream.Write(writeBuffer[..Zip64SizesBufferLength]); } @@ -1199,28 +1203,100 @@ private void WriteCrcAndSizesInLocalHeader(bool zip64HeaderUsed) // 64-bit sizes if (pretendStreaming) { - int relativeCrc32Location = ZipLocalFileHeader.Zip64DataDescriptor.FieldLocations.Crc32 - ZipLocalFileHeader.Zip64DataDescriptor.FieldLocations.Crc32; - int relativeCompressedSizeLocation = ZipLocalFileHeader.Zip64DataDescriptor.FieldLocations.CompressedSize - ZipLocalFileHeader.Zip64DataDescriptor.FieldLocations.Crc32; - int relativeUncompressedSizeLocation = ZipLocalFileHeader.Zip64DataDescriptor.FieldLocations.UncompressedSize - ZipLocalFileHeader.Zip64DataDescriptor.FieldLocations.Crc32; + WriteCrcAndSizesInLocalHeaderPrepareForWritingDataDescriptor(writeBuffer); + _archive.ArchiveStream.Write(writeBuffer[..Zip64DataDescriptorCrcAndSizesBufferLength]); + } + } - BinaryPrimitives.WriteUInt32LittleEndian(writeBuffer[relativeCrc32Location..], _crc32); - BinaryPrimitives.WriteInt64LittleEndian(writeBuffer[relativeCompressedSizeLocation..], _compressedSize); - BinaryPrimitives.WriteInt64LittleEndian(writeBuffer[relativeUncompressedSizeLocation..], _uncompressedSize); + private void WriteCrcAndSizesInLocalHeaderInitialize(bool zip64HeaderUsed, out long finalPosition, out bool pretendStreaming, out uint compressedSizeTruncated, out uint uncompressedSizeTruncated) + { + finalPosition = _archive.ArchiveStream.Position; - _archive.ArchiveStream.Write(writeBuffer[..Zip64DataDescriptorCrcAndSizesBufferLength]); + bool zip64Needed = ShouldUseZIP64 +#if DEBUG_FORCE_ZIP64 + || _archive._forceZip64 +#endif + ; + + pretendStreaming = zip64Needed && !zip64HeaderUsed; + compressedSizeTruncated = zip64Needed ? ZipHelper.Mask32Bit : (uint)_compressedSize; + uncompressedSizeTruncated = zip64Needed ? ZipHelper.Mask32Bit : (uint)_uncompressedSize; + } + + private void WriteCrcAndSizesInLocalHeaderPrepareForZip64PretendStreaming(Span writeBuffer) + { + int relativeVersionToExtractLocation = ZipLocalFileHeader.FieldLocations.VersionNeededToExtract - ZipLocalFileHeader.FieldLocations.VersionNeededToExtract; + int relativeGeneralPurposeBitFlagsLocation = ZipLocalFileHeader.FieldLocations.GeneralPurposeBitFlags - ZipLocalFileHeader.FieldLocations.VersionNeededToExtract; + + VersionToExtractAtLeast(ZipVersionNeededValues.Zip64); + _generalPurposeBitFlag |= BitFlagValues.DataDescriptor; + + _archive.ArchiveStream.Seek(_offsetOfLocalHeader + ZipLocalFileHeader.FieldLocations.VersionNeededToExtract, + SeekOrigin.Begin); + BinaryPrimitives.WriteUInt16LittleEndian(writeBuffer[relativeVersionToExtractLocation..], (ushort)_versionToExtract); + BinaryPrimitives.WriteUInt16LittleEndian(writeBuffer[relativeGeneralPurposeBitFlagsLocation..], (ushort)_generalPurposeBitFlag); + } + + private void WriteCrcAndSizesInLocalHeaderPrepareFor32bitValuesWriting(bool pretendStreaming, Span writeBuffer, uint compressedSizeTruncated, uint uncompressedSizeTruncated) + { + _archive.ArchiveStream.Seek(_offsetOfLocalHeader + ZipLocalFileHeader.FieldLocations.Crc32, + SeekOrigin.Begin); + if (!pretendStreaming) + { + int relativeCrc32Location = ZipLocalFileHeader.FieldLocations.Crc32 - ZipLocalFileHeader.FieldLocations.Crc32; + int relativeCompressedSizeLocation = ZipLocalFileHeader.FieldLocations.CompressedSize - ZipLocalFileHeader.FieldLocations.Crc32; + int relativeUncompressedSizeLocation = ZipLocalFileHeader.FieldLocations.UncompressedSize - ZipLocalFileHeader.FieldLocations.Crc32; + + BinaryPrimitives.WriteUInt32LittleEndian(writeBuffer[relativeCrc32Location..], _crc32); + BinaryPrimitives.WriteUInt32LittleEndian(writeBuffer[relativeCompressedSizeLocation..], compressedSizeTruncated); + BinaryPrimitives.WriteUInt32LittleEndian(writeBuffer[relativeUncompressedSizeLocation..], uncompressedSizeTruncated); + } + else // but if we are pretending to stream, we want to fill in with zeroes + { + writeBuffer[..CrcAndSizesBufferLength].Clear(); } } + private void WriteCrcAndSizesInLocalHeaderPrepareForWritingWhenZip64HeaderUsed(Span writeBuffer) + { + int relativeUncompressedSizeLocation = Zip64ExtraField.FieldLocations.UncompressedSize - Zip64ExtraField.FieldLocations.UncompressedSize; + int relativeCompressedSizeLocation = Zip64ExtraField.FieldLocations.CompressedSize - Zip64ExtraField.FieldLocations.UncompressedSize; + + _archive.ArchiveStream.Seek(_offsetOfLocalHeader + ZipLocalFileHeader.SizeOfLocalHeader + + _storedEntryNameBytes.Length + Zip64ExtraField.OffsetToFirstField, + SeekOrigin.Begin); + BinaryPrimitives.WriteInt64LittleEndian(writeBuffer[relativeUncompressedSizeLocation..], _uncompressedSize); + BinaryPrimitives.WriteInt64LittleEndian(writeBuffer[relativeCompressedSizeLocation..], _compressedSize); + } + + private void WriteCrcAndSizesInLocalHeaderPrepareForWritingDataDescriptor(Span writeBuffer) + { + int relativeCrc32Location = ZipLocalFileHeader.Zip64DataDescriptor.FieldLocations.Crc32 - ZipLocalFileHeader.Zip64DataDescriptor.FieldLocations.Crc32; + int relativeCompressedSizeLocation = ZipLocalFileHeader.Zip64DataDescriptor.FieldLocations.CompressedSize - ZipLocalFileHeader.Zip64DataDescriptor.FieldLocations.Crc32; + int relativeUncompressedSizeLocation = ZipLocalFileHeader.Zip64DataDescriptor.FieldLocations.UncompressedSize - ZipLocalFileHeader.Zip64DataDescriptor.FieldLocations.Crc32; + + BinaryPrimitives.WriteUInt32LittleEndian(writeBuffer.Slice(relativeCrc32Location), _crc32); + BinaryPrimitives.WriteInt64LittleEndian(writeBuffer.Slice(relativeCompressedSizeLocation), _compressedSize); + BinaryPrimitives.WriteInt64LittleEndian(writeBuffer.Slice(relativeUncompressedSizeLocation), _uncompressedSize); + + } + + // data descriptor can be 32-bit or 64-bit sizes. 32-bit is more compatible, so use that if possible + // signature is optional but recommended by the spec + private const int MaxSizeOfDataDescriptor = 24; + private void WriteDataDescriptor() + { + Span dataDescriptor = stackalloc byte[MaxSizeOfDataDescriptor]; + int bytesToWrite = PrepareToWriteDataDescriptor(dataDescriptor); + _archive.ArchiveStream.Write(dataDescriptor[..bytesToWrite]); + } + + private int PrepareToWriteDataDescriptor(Span dataDescriptor) { // We enter here because we cannot seek, so the data descriptor bit should be on Debug.Assert((_generalPurposeBitFlag & BitFlagValues.DataDescriptor) != 0); - // data descriptor can be 32-bit or 64-bit sizes. 32-bit is more compatible, so use that if possible - // signature is optional but recommended by the spec - const int MaxSizeOfDataDescriptor = 24; - - Span dataDescriptor = stackalloc byte[MaxSizeOfDataDescriptor]; int bytesToWrite; ZipLocalFileHeader.DataDescriptorSignatureConstantBytes.CopyTo(dataDescriptor[ZipLocalFileHeader.ZipDataDescriptor.FieldLocations.Signature..]); @@ -1241,7 +1317,7 @@ private void WriteDataDescriptor() bytesToWrite = ZipLocalFileHeader.ZipDataDescriptor.FieldLocations.UncompressedSize + ZipLocalFileHeader.ZipDataDescriptor.FieldLengths.UncompressedSize; } - _archive.ArchiveStream.Write(dataDescriptor[..bytesToWrite]); + return bytesToWrite; } private void UnloadStreams() @@ -1361,6 +1437,18 @@ public override int Read(byte[] buffer, int offset, int count) throw new NotSupportedException(SR.ReadingNotSupported); } + public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + ThrowIfDisposed(); + throw new NotSupportedException(SR.ReadingNotSupported); + } + + public override ValueTask ReadAsync(Memory buffer, CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + throw new NotSupportedException(SR.ReadingNotSupported); + } + public override long Seek(long offset, SeekOrigin origin) { ThrowIfDisposed(); @@ -1441,7 +1529,7 @@ async ValueTask Core(ReadOnlyMemory buffer, CancellationToken cancellation { _everWritten = true; // write local header, we are good to go - _usedZip64inLH = _entry.WriteLocalFileHeader(isEmptyFile: false, forceWrite: true); + _usedZip64inLH = await _entry.WriteLocalFileHeaderAsync(isEmptyFile: false, forceWrite: true, cancellationToken).ConfigureAwait(false); } await _crcSizeStream.WriteAsync(buffer, cancellationToken).ConfigureAwait(false); @@ -1492,6 +1580,34 @@ protected override void Dispose(bool disposing) base.Dispose(disposing); } + + public override async ValueTask DisposeAsync() + { + if (!_isDisposed) + { + await _crcSizeStream.DisposeAsync().ConfigureAwait(false); // now we have size/crc info + + if (!_everWritten) + { + // write local header, no data, so we use stored + await _entry.WriteLocalFileHeaderAsync(isEmptyFile: true, forceWrite: true, cancellationToken: default).ConfigureAwait(false); + } + else + { + // go back and finish writing + if (_entry._archive.ArchiveStream.CanSeek) + // finish writing local header if we have seek capabilities + await _entry.WriteCrcAndSizesInLocalHeaderAsync(_usedZip64inLH, cancellationToken: default).ConfigureAwait(false); + else + // write out data descriptor if we don't have seek capabilities + await _entry.WriteDataDescriptorAsync(cancellationToken: default).ConfigureAwait(false); + } + _canWrite = false; + _isDisposed = true; + } + + await base.DisposeAsync().ConfigureAwait(false); + } } [Flags] diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.Async.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.Async.cs new file mode 100644 index 00000000000000..8305b5e42bfdce --- /dev/null +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.Async.cs @@ -0,0 +1,261 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Buffers; +using System.Collections.Generic; +using System.Diagnostics; +using System.Threading; +using System.Threading.Tasks; + +namespace System.IO.Compression; + +internal sealed partial class ZipGenericExtraField +{ + public async Task WriteBlockAsync(Stream stream, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + byte[] extraFieldHeader = new byte[SizeOfHeader]; + WriteBlockCore(extraFieldHeader); + await stream.WriteAsync(extraFieldHeader, cancellationToken).ConfigureAwait(false); + await stream.WriteAsync(Data, cancellationToken).ConfigureAwait(false); + } + + public static async Task WriteAllBlocksAsync(List? fields, ReadOnlyMemory trailingExtraFieldData, Stream stream, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (fields != null) + { + foreach (ZipGenericExtraField field in fields) + { + await field.WriteBlockAsync(stream, cancellationToken).ConfigureAwait(false); + } + } + + if (!trailingExtraFieldData.IsEmpty) + { + await stream.WriteAsync(trailingExtraFieldData, cancellationToken).ConfigureAwait(false); + } + } +} + +internal sealed partial class Zip64ExtraField +{ + public ValueTask WriteBlockAsync(Stream stream, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + byte[] extraFieldData = new byte[TotalSize]; + WriteBlockCore(extraFieldData); + return stream.WriteAsync(extraFieldData, cancellationToken); + } + +} + +internal sealed partial class Zip64EndOfCentralDirectoryLocator +{ + public static async Task TryReadBlockAsync(Stream stream, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + byte[] blockContents = new byte[TotalSize]; + int bytesRead = await stream.ReadAtLeastAsync(blockContents, blockContents.Length, throwOnEndOfStream: false, cancellationToken).ConfigureAwait(false); + bool zip64eocdLocatorProper = TryReadBlockCore(blockContents, bytesRead, out Zip64EndOfCentralDirectoryLocator? zip64EOCDLocator); + + Debug.Assert(zip64eocdLocatorProper && zip64EOCDLocator != null); // we just found this using the signature finder, so it should be okay + + return zip64EOCDLocator; + } + + public static ValueTask WriteBlockAsync(Stream stream, long zip64EOCDRecordStart, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + byte[] blockContents = new byte[TotalSize]; + WriteBlockCore(blockContents, zip64EOCDRecordStart); + return stream.WriteAsync(blockContents, cancellationToken); + } +} + +internal sealed partial class Zip64EndOfCentralDirectoryRecord +{ + public static async Task TryReadBlockAsync(Stream stream, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + byte[] blockContents = new byte[BlockConstantSectionSize]; + int bytesRead = await stream.ReadAtLeastAsync(blockContents, blockContents.Length, throwOnEndOfStream: false, cancellationToken).ConfigureAwait(false); + + if (!TryReadBlockCore(blockContents, bytesRead, out Zip64EndOfCentralDirectoryRecord? zip64EOCDRecord)) + { + throw new InvalidDataException(SR.Zip64EOCDNotWhereExpected); + } + + return zip64EOCDRecord; + } + + public static ValueTask WriteBlockAsync(Stream stream, long numberOfEntries, long startOfCentralDirectory, long sizeOfCentralDirectory, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + byte[] blockContents = new byte[BlockConstantSectionSize]; + WriteBlockCore(blockContents, numberOfEntries, startOfCentralDirectory, sizeOfCentralDirectory); + // write Zip 64 EOCD record + return stream.WriteAsync(blockContents, cancellationToken); + } +} + +internal readonly partial struct ZipLocalFileHeader +{ + public static async Task<(List, byte[] trailingData)> GetExtraFieldsAsync(Stream stream, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + // assumes that TrySkipBlock has already been called, so we don't have to validate twice + + byte[] fixedHeaderBuffer = new byte[FieldLengths.FilenameLength + FieldLengths.ExtraFieldLength]; + GetExtraFieldsInitialize(stream, out int relativeFilenameLengthLocation, out int relativeExtraFieldLengthLocation); + await stream.ReadExactlyAsync(fixedHeaderBuffer, cancellationToken).ConfigureAwait(false); + + GetExtraFieldsCore(fixedHeaderBuffer, relativeFilenameLengthLocation, relativeExtraFieldLengthLocation, out ushort filenameLength, out ushort extraFieldLength); + + byte[] arrayPoolBuffer = ArrayPool.Shared.Rent(extraFieldLength); + Memory extraFieldBuffer = arrayPoolBuffer.AsMemory(0, extraFieldLength); + + try + { + stream.Seek(filenameLength, SeekOrigin.Current); + await stream.ReadExactlyAsync(extraFieldBuffer, cancellationToken).ConfigureAwait(false); + + List list = GetExtraFieldPostReadWork(extraFieldBuffer.Span, out byte[] trailingData); + + return (list, trailingData); + } + finally + { + if (arrayPoolBuffer != null) + { + ArrayPool.Shared.Return(arrayPoolBuffer); + } + } + } + + // will not throw end of stream exception + public static async Task TrySkipBlockAsync(Stream stream, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + byte[] blockBytes = new byte[FieldLengths.Signature]; + long currPosition = stream.Position; + int bytesRead = await stream.ReadAtLeastAsync(blockBytes, blockBytes.Length, throwOnEndOfStream: false, cancellationToken).ConfigureAwait(false); + if (!TrySkipBlockCore(stream, blockBytes, bytesRead, currPosition)) + { + return false; + } + bytesRead = await stream.ReadAtLeastAsync(blockBytes, blockBytes.Length, throwOnEndOfStream: false, cancellationToken).ConfigureAwait(false); + return TrySkipBlockFinalize(stream, blockBytes, bytesRead); + } +} + +internal sealed partial class ZipCentralDirectoryFileHeader +{ + // if saveExtraFieldsAndComments is false, FileComment and ExtraFields will be null + // in either case, the zip64 extra field info will be incorporated into other fields + public static async Task<(bool, int, ZipCentralDirectoryFileHeader?)> TryReadBlockAsync(ReadOnlyMemory buffer, Stream furtherReads, bool saveExtraFieldsAndComments, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + ZipCentralDirectoryFileHeader? header; + if (!TryReadBlockInitialize(buffer.Span, out header, out int bytesRead, out uint compressedSizeSmall, out uint uncompressedSizeSmall, out ushort diskNumberStartSmall, out uint relativeOffsetOfLocalHeaderSmall)) + { + return (false, 0, null); + } + + byte[]? arrayPoolBuffer = null; + try + { + // Assemble the dynamic header in a separate buffer. We can't guarantee that it's all in the input buffer, + // some additional data might need to come from the stream. + int dynamicHeaderSize = header.FilenameLength + header.ExtraFieldLength + header.FileCommentLength; + int remainingBufferLength = buffer.Length - FieldLocations.DynamicData; + int bytesToRead = dynamicHeaderSize - remainingBufferLength; + scoped ReadOnlySpan dynamicHeader; + + // No need to read extra data from the stream, no need to allocate a new buffer. + if (bytesToRead <= 0) + { + dynamicHeader = buffer.Span[FieldLocations.DynamicData..]; + } + // Data needs to come from two sources, and we must thus copy data into a single address space. + else + { + if (dynamicHeaderSize > StackAllocationThreshold) + { + arrayPoolBuffer = ArrayPool.Shared.Rent(dynamicHeaderSize); + } + + byte[] collatedHeader = dynamicHeaderSize <= StackAllocationThreshold ? new byte[dynamicHeaderSize] : arrayPoolBuffer.AsSpan(0, dynamicHeaderSize).ToArray(); + + buffer[FieldLocations.DynamicData..].CopyTo(collatedHeader); + + Debug.Assert(bytesToRead == collatedHeader[remainingBufferLength..].Length); + int realBytesRead = await furtherReads.ReadAtLeastAsync(collatedHeader.AsMemory(remainingBufferLength..), bytesToRead, throwOnEndOfStream: false, cancellationToken).ConfigureAwait(false); + + if (realBytesRead != bytesToRead) + { + return (false, bytesRead, null); + } + dynamicHeader = collatedHeader; + } + + TryReadBlockFinalize(header, dynamicHeader, dynamicHeaderSize, uncompressedSizeSmall, compressedSizeSmall, diskNumberStartSmall, relativeOffsetOfLocalHeaderSmall, saveExtraFieldsAndComments, ref bytesRead, out Zip64ExtraField zip64); + } + finally + { + if (arrayPoolBuffer != null) + { + ArrayPool.Shared.Return(arrayPoolBuffer); + } + } + + return (true, bytesRead, header); + } +} + +internal sealed partial class ZipEndOfCentralDirectoryBlock +{ + public static async Task WriteBlockAsync(Stream stream, long numberOfEntries, long startOfCentralDirectory, long sizeOfCentralDirectory, byte[] archiveComment, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + byte[] blockContents = new byte[TotalSize]; + + WriteBlockInitialize(blockContents, numberOfEntries, startOfCentralDirectory, sizeOfCentralDirectory, archiveComment); + + await stream.WriteAsync(blockContents, cancellationToken).ConfigureAwait(false); + if (archiveComment.Length > 0) + { + await stream.WriteAsync(archiveComment, cancellationToken).ConfigureAwait(false); + } + } + + public static async Task ReadBlockAsync(Stream stream, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + byte[] blockContents = new byte[TotalSize]; + int bytesRead = await stream.ReadAtLeastAsync(blockContents, blockContents.Length, throwOnEndOfStream: false, cancellationToken).ConfigureAwait(false); + + if (!TryReadBlockInitialize(stream, blockContents, bytesRead, out ZipEndOfCentralDirectoryBlock? eocdBlock, out bool readComment)) + { + // // We shouldn't get here becasue we found the eocd block using the signature finder + throw new InvalidDataException(SR.EOCDNotFound); + } + else if (readComment) + { + stream.ReadExactly(eocdBlock._archiveComment); + } + return eocdBlock; + } +} diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.FieldLengths.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.FieldLengths.cs index 62b31842d67cce..20e51de6060273 100644 --- a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.FieldLengths.cs +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.FieldLengths.cs @@ -27,7 +27,9 @@ internal sealed partial class Zip64EndOfCentralDirectoryLocator { internal static class FieldLengths { - public static readonly int Signature = SignatureConstantBytes.Length; + // Must match the signature constant bytes length, but should stay a const int or sometimes + // static initialization of FieldLengths and NullReferenceException occurs. + public const int Signature = 4; public const int NumberOfDiskWithZip64EOCD = sizeof(uint); public const int OffsetOfZip64EOCD = sizeof(ulong); public const int TotalNumberOfDisks = sizeof(uint); @@ -38,7 +40,9 @@ internal sealed partial class Zip64EndOfCentralDirectoryRecord { private static class FieldLengths { - public static readonly int Signature = SignatureConstantBytes.Length; + // Must match the signature constant bytes length, but should stay a const int or sometimes + // static initialization of FieldLengths and NullReferenceException occurs. + public const int Signature = 4; public const int SizeOfThisRecord = sizeof(ulong); public const int VersionMadeBy = sizeof(ushort); public const int VersionNeededToExtract = sizeof(ushort); @@ -55,7 +59,9 @@ internal readonly partial struct ZipLocalFileHeader { internal static class FieldLengths { - public static readonly int Signature = SignatureConstantBytes.Length; + // Must match the signature constant bytes length, but should stay a const int or sometimes + // static initialization of FieldLengths and NullReferenceException occurs. + public const int Signature = 4; public const int VersionNeededToExtract = sizeof(ushort); public const int GeneralPurposeBitFlags = sizeof(ushort); public const int CompressionMethod = sizeof(ushort); @@ -71,7 +77,9 @@ internal sealed partial class ZipDataDescriptor { internal static class FieldLengths { - public static readonly int Signature = DataDescriptorSignatureConstantBytes.Length; + // Must match the data descriptor signature constant bytes length, but should stay a const int or sometimes + // static initialization of FieldLengths and NullReferenceException occurs. + public const int Signature = 4; public const int Crc32 = sizeof(uint); public const int CompressedSize = sizeof(uint); public const int UncompressedSize = sizeof(uint); @@ -82,7 +90,9 @@ internal sealed partial class Zip64DataDescriptor { internal static class FieldLengths { - public static readonly int Signature = DataDescriptorSignatureConstantBytes.Length; + // Must match the data descriptor signature constant bytes length, but should stay a const int or sometimes + // static initialization of FieldLengths and NullReferenceException occurs. + public const int Signature = 4; public const int Crc32 = sizeof(uint); public const int CompressedSize = sizeof(long); public const int UncompressedSize = sizeof(long); @@ -94,7 +104,9 @@ internal sealed partial class ZipCentralDirectoryFileHeader { internal static class FieldLengths { - public static readonly int Signature = SignatureConstantBytes.Length; + // Must match the signature constant bytes length, but should stay a const int or sometimes + // static initialization of FieldLengths and NullReferenceException occurs. + public const int Signature = 4; public const int VersionMadeBySpecification = sizeof(byte); public const int VersionMadeByCompatibility = sizeof(byte); public const int VersionNeededToExtract = sizeof(ushort); @@ -118,7 +130,9 @@ internal sealed partial class ZipEndOfCentralDirectoryBlock { internal static class FieldLengths { - public static readonly int Signature = SignatureConstantBytes.Length; + // Must match the signature constant bytes length, but should stay a const int or sometimes + // static initialization of FieldLengths and NullReferenceException occurs. + public const int Signature = 4; public const int NumberOfThisDisk = sizeof(ushort); public const int NumberOfTheDiskWithTheStartOfTheCentralDirectory = sizeof(ushort); public const int NumberOfEntriesInTheCentralDirectoryOnThisDisk = sizeof(ushort); diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.FieldLocations.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.FieldLocations.cs index 5d7a5127ed5722..d95d2d1e4979e1 100644 --- a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.FieldLocations.cs +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.FieldLocations.cs @@ -31,9 +31,9 @@ internal sealed partial class Zip64EndOfCentralDirectoryLocator private static class FieldLocations { public const int Signature = 0; - public static readonly int NumberOfDiskWithZip64EOCD = Signature + FieldLengths.Signature; - public static readonly int OffsetOfZip64EOCD = NumberOfDiskWithZip64EOCD + FieldLengths.NumberOfDiskWithZip64EOCD; - public static readonly int TotalNumberOfDisks = OffsetOfZip64EOCD + FieldLengths.OffsetOfZip64EOCD; + public const int NumberOfDiskWithZip64EOCD = Signature + FieldLengths.Signature; + public const int OffsetOfZip64EOCD = NumberOfDiskWithZip64EOCD + FieldLengths.NumberOfDiskWithZip64EOCD; + public const int TotalNumberOfDisks = OffsetOfZip64EOCD + FieldLengths.OffsetOfZip64EOCD; } } @@ -42,15 +42,15 @@ internal sealed partial class Zip64EndOfCentralDirectoryRecord private static class FieldLocations { public const int Signature = 0; - public static readonly int SizeOfThisRecord = Signature + FieldLengths.Signature; - public static readonly int VersionMadeBy = SizeOfThisRecord + FieldLengths.SizeOfThisRecord; - public static readonly int VersionNeededToExtract = VersionMadeBy + FieldLengths.VersionMadeBy; - public static readonly int NumberOfThisDisk = VersionNeededToExtract + FieldLengths.VersionNeededToExtract; - public static readonly int NumberOfDiskWithStartOfCD = NumberOfThisDisk + FieldLengths.NumberOfThisDisk; - public static readonly int NumberOfEntriesOnThisDisk = NumberOfDiskWithStartOfCD + FieldLengths.NumberOfDiskWithStartOfCD; - public static readonly int NumberOfEntriesTotal = NumberOfEntriesOnThisDisk + FieldLengths.NumberOfEntriesOnThisDisk; - public static readonly int SizeOfCentralDirectory = NumberOfEntriesTotal + FieldLengths.NumberOfEntriesTotal; - public static readonly int OffsetOfCentralDirectory = SizeOfCentralDirectory + FieldLengths.SizeOfCentralDirectory; + public const int SizeOfThisRecord = Signature + FieldLengths.Signature; + public const int VersionMadeBy = SizeOfThisRecord + FieldLengths.SizeOfThisRecord; + public const int VersionNeededToExtract = VersionMadeBy + FieldLengths.VersionMadeBy; + public const int NumberOfThisDisk = VersionNeededToExtract + FieldLengths.VersionNeededToExtract; + public const int NumberOfDiskWithStartOfCD = NumberOfThisDisk + FieldLengths.NumberOfThisDisk; + public const int NumberOfEntriesOnThisDisk = NumberOfDiskWithStartOfCD + FieldLengths.NumberOfDiskWithStartOfCD; + public const int NumberOfEntriesTotal = NumberOfEntriesOnThisDisk + FieldLengths.NumberOfEntriesOnThisDisk; + public const int SizeOfCentralDirectory = NumberOfEntriesTotal + FieldLengths.NumberOfEntriesTotal; + public const int OffsetOfCentralDirectory = SizeOfCentralDirectory + FieldLengths.SizeOfCentralDirectory; } } @@ -59,16 +59,16 @@ internal readonly partial struct ZipLocalFileHeader internal static class FieldLocations { public const int Signature = 0; - public static readonly int VersionNeededToExtract = Signature + FieldLengths.Signature; - public static readonly int GeneralPurposeBitFlags = VersionNeededToExtract + FieldLengths.VersionNeededToExtract; - public static readonly int CompressionMethod = GeneralPurposeBitFlags + FieldLengths.GeneralPurposeBitFlags; - public static readonly int LastModified = CompressionMethod + FieldLengths.CompressionMethod; - public static readonly int Crc32 = LastModified + FieldLengths.LastModified; - public static readonly int CompressedSize = Crc32 + FieldLengths.Crc32; - public static readonly int UncompressedSize = CompressedSize + FieldLengths.CompressedSize; - public static readonly int FilenameLength = UncompressedSize + FieldLengths.UncompressedSize; - public static readonly int ExtraFieldLength = FilenameLength + FieldLengths.FilenameLength; - public static readonly int DynamicData = ExtraFieldLength + FieldLengths.ExtraFieldLength; + public const int VersionNeededToExtract = Signature + FieldLengths.Signature; + public const int GeneralPurposeBitFlags = VersionNeededToExtract + FieldLengths.VersionNeededToExtract; + public const int CompressionMethod = GeneralPurposeBitFlags + FieldLengths.GeneralPurposeBitFlags; + public const int LastModified = CompressionMethod + FieldLengths.CompressionMethod; + public const int Crc32 = LastModified + FieldLengths.LastModified; + public const int CompressedSize = Crc32 + FieldLengths.Crc32; + public const int UncompressedSize = CompressedSize + FieldLengths.CompressedSize; + public const int FilenameLength = UncompressedSize + FieldLengths.UncompressedSize; + public const int ExtraFieldLength = FilenameLength + FieldLengths.FilenameLength; + public const int DynamicData = ExtraFieldLength + FieldLengths.ExtraFieldLength; } internal sealed partial class ZipDataDescriptor @@ -76,9 +76,9 @@ internal sealed partial class ZipDataDescriptor internal static class FieldLocations { public const int Signature = 0; - public static readonly int Crc32 = Signature + FieldLengths.Signature; - public static readonly int CompressedSize = Crc32 + FieldLengths.Crc32; - public static readonly int UncompressedSize = CompressedSize + FieldLengths.CompressedSize; + public const int Crc32 = Signature + FieldLengths.Signature; + public const int CompressedSize = Crc32 + FieldLengths.Crc32; + public const int UncompressedSize = CompressedSize + FieldLengths.CompressedSize; } } @@ -87,9 +87,9 @@ internal sealed partial class Zip64DataDescriptor internal static class FieldLocations { public const int Signature = 0; - public static readonly int Crc32 = Signature + FieldLengths.Signature; - public static readonly int CompressedSize = Crc32 + FieldLengths.Crc32; - public static readonly int UncompressedSize = CompressedSize + FieldLengths.CompressedSize; + public const int Crc32 = Signature + FieldLengths.Signature; + public const int CompressedSize = Crc32 + FieldLengths.Crc32; + public const int UncompressedSize = CompressedSize + FieldLengths.CompressedSize; } } } @@ -99,23 +99,23 @@ internal sealed partial class ZipCentralDirectoryFileHeader internal static class FieldLocations { public const int Signature = 0; - public static readonly int VersionMadeBySpecification = Signature + FieldLengths.Signature; - public static readonly int VersionMadeByCompatibility = VersionMadeBySpecification + FieldLengths.VersionMadeBySpecification; - public static readonly int VersionNeededToExtract = VersionMadeByCompatibility + FieldLengths.VersionMadeByCompatibility; - public static readonly int GeneralPurposeBitFlags = VersionNeededToExtract + FieldLengths.VersionNeededToExtract; - public static readonly int CompressionMethod = GeneralPurposeBitFlags + FieldLengths.GeneralPurposeBitFlags; - public static readonly int LastModified = CompressionMethod + FieldLengths.CompressionMethod; - public static readonly int Crc32 = LastModified + FieldLengths.LastModified; - public static readonly int CompressedSize = Crc32 + FieldLengths.Crc32; - public static readonly int UncompressedSize = CompressedSize + FieldLengths.CompressedSize; - public static readonly int FilenameLength = UncompressedSize + FieldLengths.UncompressedSize; - public static readonly int ExtraFieldLength = FilenameLength + FieldLengths.FilenameLength; - public static readonly int FileCommentLength = ExtraFieldLength + FieldLengths.ExtraFieldLength; - public static readonly int DiskNumberStart = FileCommentLength + FieldLengths.FileCommentLength; - public static readonly int InternalFileAttributes = DiskNumberStart + FieldLengths.DiskNumberStart; - public static readonly int ExternalFileAttributes = InternalFileAttributes + FieldLengths.InternalFileAttributes; - public static readonly int RelativeOffsetOfLocalHeader = ExternalFileAttributes + FieldLengths.ExternalFileAttributes; - public static readonly int DynamicData = RelativeOffsetOfLocalHeader + FieldLengths.RelativeOffsetOfLocalHeader; + public const int VersionMadeBySpecification = Signature + FieldLengths.Signature; + public const int VersionMadeByCompatibility = VersionMadeBySpecification + FieldLengths.VersionMadeBySpecification; + public const int VersionNeededToExtract = VersionMadeByCompatibility + FieldLengths.VersionMadeByCompatibility; + public const int GeneralPurposeBitFlags = VersionNeededToExtract + FieldLengths.VersionNeededToExtract; + public const int CompressionMethod = GeneralPurposeBitFlags + FieldLengths.GeneralPurposeBitFlags; + public const int LastModified = CompressionMethod + FieldLengths.CompressionMethod; + public const int Crc32 = LastModified + FieldLengths.LastModified; + public const int CompressedSize = Crc32 + FieldLengths.Crc32; + public const int UncompressedSize = CompressedSize + FieldLengths.CompressedSize; + public const int FilenameLength = UncompressedSize + FieldLengths.UncompressedSize; + public const int ExtraFieldLength = FilenameLength + FieldLengths.FilenameLength; + public const int FileCommentLength = ExtraFieldLength + FieldLengths.ExtraFieldLength; + public const int DiskNumberStart = FileCommentLength + FieldLengths.FileCommentLength; + public const int InternalFileAttributes = DiskNumberStart + FieldLengths.DiskNumberStart; + public const int ExternalFileAttributes = InternalFileAttributes + FieldLengths.InternalFileAttributes; + public const int RelativeOffsetOfLocalHeader = ExternalFileAttributes + FieldLengths.ExternalFileAttributes; + public const int DynamicData = RelativeOffsetOfLocalHeader + FieldLengths.RelativeOffsetOfLocalHeader; } } @@ -124,14 +124,14 @@ internal sealed partial class ZipEndOfCentralDirectoryBlock private static class FieldLocations { public const int Signature = 0; - public static readonly int NumberOfThisDisk = Signature + FieldLengths.Signature; - public static readonly int NumberOfTheDiskWithTheStartOfTheCentralDirectory = NumberOfThisDisk + FieldLengths.NumberOfThisDisk; - public static readonly int NumberOfEntriesInTheCentralDirectoryOnThisDisk = NumberOfTheDiskWithTheStartOfTheCentralDirectory + FieldLengths.NumberOfTheDiskWithTheStartOfTheCentralDirectory; - public static readonly int NumberOfEntriesInTheCentralDirectory = NumberOfEntriesInTheCentralDirectoryOnThisDisk + FieldLengths.NumberOfEntriesInTheCentralDirectoryOnThisDisk; - public static readonly int SizeOfCentralDirectory = NumberOfEntriesInTheCentralDirectory + FieldLengths.NumberOfEntriesInTheCentralDirectory; - public static readonly int OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber = SizeOfCentralDirectory + FieldLengths.SizeOfCentralDirectory; - public static readonly int ArchiveCommentLength = OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber + FieldLengths.OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber; - public static readonly int DynamicData = ArchiveCommentLength + FieldLengths.ArchiveCommentLength; + public const int NumberOfThisDisk = Signature + FieldLengths.Signature; + public const int NumberOfTheDiskWithTheStartOfTheCentralDirectory = NumberOfThisDisk + FieldLengths.NumberOfThisDisk; + public const int NumberOfEntriesInTheCentralDirectoryOnThisDisk = NumberOfTheDiskWithTheStartOfTheCentralDirectory + FieldLengths.NumberOfTheDiskWithTheStartOfTheCentralDirectory; + public const int NumberOfEntriesInTheCentralDirectory = NumberOfEntriesInTheCentralDirectoryOnThisDisk + FieldLengths.NumberOfEntriesInTheCentralDirectoryOnThisDisk; + public const int SizeOfCentralDirectory = NumberOfEntriesInTheCentralDirectory + FieldLengths.NumberOfEntriesInTheCentralDirectory; + public const int OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber = SizeOfCentralDirectory + FieldLengths.SizeOfCentralDirectory; + public const int ArchiveCommentLength = OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber + FieldLengths.OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber; + public const int DynamicData = ArchiveCommentLength + FieldLengths.ArchiveCommentLength; } } } diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.cs index a7b08d940dd857..5e70cf29fc5eaa 100644 --- a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.cs +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.cs @@ -1,10 +1,14 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Buffers; using System.Buffers.Binary; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; +using System.Runtime.InteropServices; +using System.Threading; +using System.Threading.Tasks; namespace System.IO.Compression { @@ -26,12 +30,15 @@ internal sealed partial class ZipGenericExtraField public void WriteBlock(Stream stream) { Span extraFieldHeader = stackalloc byte[SizeOfHeader]; + WriteBlockCore(extraFieldHeader); + stream.Write(extraFieldHeader); + stream.Write(Data); + } + private void WriteBlockCore(Span extraFieldHeader) + { BinaryPrimitives.WriteUInt16LittleEndian(extraFieldHeader[FieldLocations.Tag..], _tag); BinaryPrimitives.WriteUInt16LittleEndian(extraFieldHeader[FieldLocations.Size..], _size); - - stream.Write(extraFieldHeader); - stream.Write(Data); } // assumes that bytes starts at the beginning of an extra field subfield @@ -357,9 +364,8 @@ public static void RemoveZip64Blocks(List extraFields) extraFields.RemoveAll(field => field.Tag == TagConstant); } - public void WriteBlock(Stream stream) + public void WriteBlockCore(Span extraFieldData) { - Span extraFieldData = stackalloc byte[TotalSize]; int startOffset = ZipGenericExtraField.FieldLocations.DynamicData; BinaryPrimitives.WriteUInt16LittleEndian(extraFieldData[FieldLocations.Tag..], TagConstant); @@ -386,9 +392,13 @@ public void WriteBlock(Stream stream) if (_startDiskNumber != null) { BinaryPrimitives.WriteUInt32LittleEndian(extraFieldData[startOffset..], _startDiskNumber.Value); - startOffset += FieldLengths.StartDiskNumber; } + } + public void WriteBlock(Stream stream) + { + Span extraFieldData = stackalloc byte[TotalSize]; + WriteBlockCore(extraFieldData); stream.Write(extraFieldData); } } @@ -397,44 +407,46 @@ internal sealed partial class Zip64EndOfCentralDirectoryLocator { // The Zip File Format Specification references 0x07064B50, this is a big endian representation. // ZIP files store values in little endian, so this is reversed. - public static ReadOnlySpan SignatureConstantBytes => [0x50, 0x4B, 0x06, 0x07]; + public static readonly byte[] SignatureConstantBytes = [0x50, 0x4B, 0x06, 0x07]; - public static readonly int TotalSize = FieldLocations.TotalNumberOfDisks + FieldLengths.TotalNumberOfDisks; - public static readonly int SizeOfBlockWithoutSignature = TotalSize - FieldLengths.Signature; + public const int TotalSize = FieldLocations.TotalNumberOfDisks + FieldLengths.TotalNumberOfDisks; + public const int SizeOfBlockWithoutSignature = TotalSize - FieldLengths.Signature; public uint NumberOfDiskWithZip64EOCD; public ulong OffsetOfZip64EOCD; public uint TotalNumberOfDisks; - public static bool TryReadBlock(Stream stream, out Zip64EndOfCentralDirectoryLocator zip64EOCDLocator) + private static bool TryReadBlockCore(Span blockContents, int bytesRead, [NotNullWhen(returnValue: true)] out Zip64EndOfCentralDirectoryLocator? zip64EOCDLocator) { - Span blockContents = stackalloc byte[TotalSize]; - int bytesRead; - - zip64EOCDLocator = new(); - bytesRead = stream.ReadAtLeast(blockContents, blockContents.Length, throwOnEndOfStream: false); - - if (bytesRead < TotalSize) + zip64EOCDLocator = null; + if (bytesRead < TotalSize || !blockContents.StartsWith(SignatureConstantBytes)) { return false; } - if (!blockContents.StartsWith(SignatureConstantBytes)) + zip64EOCDLocator = new() { - return false; - } - - zip64EOCDLocator.NumberOfDiskWithZip64EOCD = BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.NumberOfDiskWithZip64EOCD..]); - zip64EOCDLocator.OffsetOfZip64EOCD = BinaryPrimitives.ReadUInt64LittleEndian(blockContents[FieldLocations.OffsetOfZip64EOCD..]); - zip64EOCDLocator.TotalNumberOfDisks = BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.TotalNumberOfDisks..]); + NumberOfDiskWithZip64EOCD = BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.NumberOfDiskWithZip64EOCD..]), + OffsetOfZip64EOCD = BinaryPrimitives.ReadUInt64LittleEndian(blockContents[FieldLocations.OffsetOfZip64EOCD..]), + TotalNumberOfDisks = BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.TotalNumberOfDisks..]) + }; return true; } - public static void WriteBlock(Stream stream, long zip64EOCDRecordStart) + public static Zip64EndOfCentralDirectoryLocator TryReadBlock(Stream stream) { Span blockContents = stackalloc byte[TotalSize]; + int bytesRead = stream.ReadAtLeast(blockContents, blockContents.Length, throwOnEndOfStream: false); + bool zip64eocdLocatorProper = TryReadBlockCore(blockContents, bytesRead, out Zip64EndOfCentralDirectoryLocator? zip64EOCDLocator); + Debug.Assert(zip64eocdLocatorProper && zip64EOCDLocator != null); // we just found this using the signature finder, so it should be okay + + return zip64EOCDLocator; + } + + private static void WriteBlockCore(Span blockContents, long zip64EOCDRecordStart) + { SignatureConstantBytes.CopyTo(blockContents[FieldLocations.Signature..]); // number of disk with start of zip64 eocd BinaryPrimitives.WriteUInt32LittleEndian(blockContents[FieldLocations.NumberOfDiskWithZip64EOCD..], 0); @@ -442,6 +454,12 @@ public static void WriteBlock(Stream stream, long zip64EOCDRecordStart) // total number of disks BinaryPrimitives.WriteUInt32LittleEndian(blockContents[FieldLocations.TotalNumberOfDisks..], 1); + } + + public static void WriteBlock(Stream stream, long zip64EOCDRecordStart) + { + Span blockContents = stackalloc byte[TotalSize]; + WriteBlockCore(blockContents, zip64EOCDRecordStart); stream.Write(blockContents); } } @@ -466,14 +484,9 @@ internal sealed partial class Zip64EndOfCentralDirectoryRecord public ulong SizeOfCentralDirectory; public ulong OffsetOfCentralDirectory; - public static bool TryReadBlock(Stream stream, out Zip64EndOfCentralDirectoryRecord zip64EOCDRecord) + private static bool TryReadBlockCore(Span blockContents, int bytesRead, [NotNullWhen(returnValue: true)] out Zip64EndOfCentralDirectoryRecord? zip64EOCDRecord) { - Span blockContents = stackalloc byte[BlockConstantSectionSize]; - int bytesRead; - - zip64EOCDRecord = new(); - bytesRead = stream.ReadAtLeast(blockContents, blockContents.Length, throwOnEndOfStream: false); - + zip64EOCDRecord = null; if (bytesRead < BlockConstantSectionSize) { return false; @@ -484,23 +497,36 @@ public static bool TryReadBlock(Stream stream, out Zip64EndOfCentralDirectoryRec return false; } - zip64EOCDRecord.SizeOfThisRecord = BinaryPrimitives.ReadUInt64LittleEndian(blockContents[FieldLocations.SizeOfThisRecord..]); - zip64EOCDRecord.VersionMadeBy = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.VersionMadeBy..]); - zip64EOCDRecord.VersionNeededToExtract = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.VersionNeededToExtract..]); - zip64EOCDRecord.NumberOfThisDisk = BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.NumberOfThisDisk..]); - zip64EOCDRecord.NumberOfDiskWithStartOfCD = BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.NumberOfDiskWithStartOfCD..]); - zip64EOCDRecord.NumberOfEntriesOnThisDisk = BinaryPrimitives.ReadUInt64LittleEndian(blockContents[FieldLocations.NumberOfEntriesOnThisDisk..]); - zip64EOCDRecord.NumberOfEntriesTotal = BinaryPrimitives.ReadUInt64LittleEndian(blockContents[FieldLocations.NumberOfEntriesTotal..]); - zip64EOCDRecord.SizeOfCentralDirectory = BinaryPrimitives.ReadUInt64LittleEndian(blockContents[FieldLocations.SizeOfCentralDirectory..]); - zip64EOCDRecord.OffsetOfCentralDirectory = BinaryPrimitives.ReadUInt64LittleEndian(blockContents[FieldLocations.OffsetOfCentralDirectory..]); + zip64EOCDRecord = new Zip64EndOfCentralDirectoryRecord() + { + SizeOfThisRecord = BinaryPrimitives.ReadUInt64LittleEndian(blockContents[FieldLocations.SizeOfThisRecord..]), + VersionMadeBy = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.VersionMadeBy..]), + VersionNeededToExtract = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.VersionNeededToExtract..]), + NumberOfThisDisk = BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.NumberOfThisDisk..]), + NumberOfDiskWithStartOfCD = BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.NumberOfDiskWithStartOfCD..]), + NumberOfEntriesOnThisDisk = BinaryPrimitives.ReadUInt64LittleEndian(blockContents[FieldLocations.NumberOfEntriesOnThisDisk..]), + NumberOfEntriesTotal = BinaryPrimitives.ReadUInt64LittleEndian(blockContents[FieldLocations.NumberOfEntriesTotal..]), + SizeOfCentralDirectory = BinaryPrimitives.ReadUInt64LittleEndian(blockContents[FieldLocations.SizeOfCentralDirectory..]), + OffsetOfCentralDirectory = BinaryPrimitives.ReadUInt64LittleEndian(blockContents[FieldLocations.OffsetOfCentralDirectory..]) + }; return true; } - public static void WriteBlock(Stream stream, long numberOfEntries, long startOfCentralDirectory, long sizeOfCentralDirectory) + public static Zip64EndOfCentralDirectoryRecord TryReadBlock(Stream stream) { Span blockContents = stackalloc byte[BlockConstantSectionSize]; + int bytesRead = stream.ReadAtLeast(blockContents, blockContents.Length, throwOnEndOfStream: false); + if (!TryReadBlockCore(blockContents, bytesRead, out Zip64EndOfCentralDirectoryRecord? zip64EOCDRecord)) + { + throw new InvalidDataException(SR.Zip64EOCDNotWhereExpected); + } + return zip64EOCDRecord; + } + + private static void WriteBlockCore(Span blockContents, long numberOfEntries, long startOfCentralDirectory, long sizeOfCentralDirectory) + { SignatureConstantBytes.CopyTo(blockContents[FieldLocations.Signature..]); BinaryPrimitives.WriteUInt64LittleEndian(blockContents[FieldLocations.SizeOfThisRecord..], NormalSize); // version made by: high byte is 0 for MS DOS, low byte is version needed @@ -517,7 +543,12 @@ public static void WriteBlock(Stream stream, long numberOfEntries, long startOfC BinaryPrimitives.WriteInt64LittleEndian(blockContents[FieldLocations.NumberOfEntriesTotal..], numberOfEntries); BinaryPrimitives.WriteInt64LittleEndian(blockContents[FieldLocations.SizeOfCentralDirectory..], sizeOfCentralDirectory); BinaryPrimitives.WriteInt64LittleEndian(blockContents[FieldLocations.OffsetOfCentralDirectory..], startOfCentralDirectory); + } + public static void WriteBlock(Stream stream, long numberOfEntries, long startOfCentralDirectory, long sizeOfCentralDirectory) + { + Span blockContents = stackalloc byte[BlockConstantSectionSize]; + WriteBlockCore(blockContents, numberOfEntries, startOfCentralDirectory, sizeOfCentralDirectory); // write Zip 64 EOCD record stream.Write(blockContents); } @@ -531,53 +562,59 @@ internal readonly partial struct ZipLocalFileHeader public static ReadOnlySpan SignatureConstantBytes => [0x50, 0x4B, 0x03, 0x04]; public const int SizeOfLocalHeader = 30; + private static void GetExtraFieldsInitialize(Stream stream, out int relativeFilenameLengthLocation, out int relativeExtraFieldLengthLocation) + { + relativeFilenameLengthLocation = FieldLocations.FilenameLength - FieldLocations.FilenameLength; + relativeExtraFieldLengthLocation = FieldLocations.ExtraFieldLength - FieldLocations.FilenameLength; + stream.Seek(FieldLocations.FilenameLength, SeekOrigin.Current); + } + + private static void GetExtraFieldsCore(Span fixedHeaderBuffer, int relativeFilenameLengthLocation, int relativeExtraFieldLengthLocation, out ushort filenameLength, out ushort extraFieldLength) + { + filenameLength = BinaryPrimitives.ReadUInt16LittleEndian(fixedHeaderBuffer[relativeFilenameLengthLocation..]); + extraFieldLength = BinaryPrimitives.ReadUInt16LittleEndian(fixedHeaderBuffer[relativeExtraFieldLengthLocation..]); + } + + private static List GetExtraFieldPostReadWork(Span extraFieldBuffer, out byte[] trailingData) + { + List list = ZipGenericExtraField.ParseExtraField(extraFieldBuffer, out ReadOnlySpan trailingDataSpan); + Zip64ExtraField.RemoveZip64Blocks(list); + trailingData = trailingDataSpan.ToArray(); + return list; + } + public static List GetExtraFields(Stream stream, out byte[] trailingData) { // assumes that TrySkipBlock has already been called, so we don't have to validate twice - const int StackAllocationThreshold = 512; - - List result; - int relativeFilenameLengthLocation = FieldLocations.FilenameLength - FieldLocations.FilenameLength; - int relativeExtraFieldLengthLocation = FieldLocations.ExtraFieldLength - FieldLocations.FilenameLength; Span fixedHeaderBuffer = stackalloc byte[FieldLengths.FilenameLength + FieldLengths.ExtraFieldLength]; - - stream.Seek(FieldLocations.FilenameLength, SeekOrigin.Current); + GetExtraFieldsInitialize(stream, out int relativeFilenameLengthLocation, out int relativeExtraFieldLengthLocation); stream.ReadExactly(fixedHeaderBuffer); - ushort filenameLength = BinaryPrimitives.ReadUInt16LittleEndian(fixedHeaderBuffer[relativeFilenameLengthLocation..]); - ushort extraFieldLength = BinaryPrimitives.ReadUInt16LittleEndian(fixedHeaderBuffer[relativeExtraFieldLengthLocation..]); - byte[]? arrayPoolBuffer = extraFieldLength > StackAllocationThreshold ? System.Buffers.ArrayPool.Shared.Rent(extraFieldLength) : null; - Span extraFieldBuffer = extraFieldLength <= StackAllocationThreshold ? stackalloc byte[StackAllocationThreshold].Slice(0, extraFieldLength) : arrayPoolBuffer.AsSpan(0, extraFieldLength); + GetExtraFieldsCore(fixedHeaderBuffer, relativeFilenameLengthLocation, relativeExtraFieldLengthLocation, out ushort filenameLength, out ushort extraFieldLength); + const int StackAllocationThreshold = 512; + + byte[]? arrayPoolBuffer = extraFieldLength > StackAllocationThreshold ? ArrayPool.Shared.Rent(extraFieldLength) : null; + Span extraFieldBuffer = extraFieldLength <= StackAllocationThreshold ? stackalloc byte[StackAllocationThreshold].Slice(0, extraFieldLength) : arrayPoolBuffer.AsSpan(0, extraFieldLength); try { stream.Seek(filenameLength, SeekOrigin.Current); stream.ReadExactly(extraFieldBuffer); - result = ZipGenericExtraField.ParseExtraField(extraFieldBuffer, out ReadOnlySpan trailingDataSpan); - Zip64ExtraField.RemoveZip64Blocks(result); - trailingData = trailingDataSpan.ToArray(); - - return result; + return GetExtraFieldPostReadWork(extraFieldBuffer, out trailingData); } finally { if (arrayPoolBuffer != null) { - System.Buffers.ArrayPool.Shared.Return(arrayPoolBuffer); + ArrayPool.Shared.Return(arrayPoolBuffer); } } } - // will not throw end of stream exception - public static bool TrySkipBlock(Stream stream) + private static bool TrySkipBlockCore(Stream stream, Span blockBytes, int bytesRead, long currPosition) { - Span blockBytes = stackalloc byte[FieldLengths.Signature]; - long currPosition = stream.Position; - int bytesRead; - - bytesRead = stream.ReadAtLeast(blockBytes, blockBytes.Length, throwOnEndOfStream: false); if (bytesRead != FieldLengths.Signature || !blockBytes.SequenceEqual(SignatureConstantBytes)) { return false; @@ -594,7 +631,12 @@ public static bool TrySkipBlock(Stream stream) // Reuse blockBytes to read the filename length and the extra field length - these two consecutive // fields fit inside blockBytes. Debug.Assert(blockBytes.Length == FieldLengths.FilenameLength + FieldLengths.ExtraFieldLength); - bytesRead = stream.ReadAtLeast(blockBytes, blockBytes.Length, throwOnEndOfStream: false); + + return true; + } + + private static bool TrySkipBlockFinalize(Stream stream, Span blockBytes, int bytesRead) + { if (bytesRead != FieldLengths.FilenameLength + FieldLengths.ExtraFieldLength) { return false; @@ -614,6 +656,20 @@ public static bool TrySkipBlock(Stream stream) return true; } + + // will not throw end of stream exception + public static bool TrySkipBlock(Stream stream) + { + Span blockBytes = stackalloc byte[FieldLengths.Signature]; + long currPosition = stream.Position; + int bytesRead = stream.ReadAtLeast(blockBytes, blockBytes.Length, throwOnEndOfStream: false); + if (!TrySkipBlockCore(stream, blockBytes, bytesRead, currPosition)) + { + return false; + } + bytesRead = stream.ReadAtLeast(blockBytes, blockBytes.Length, throwOnEndOfStream: false); + return TrySkipBlockFinalize(stream, blockBytes, bytesRead); + } } internal sealed partial class ZipCentralDirectoryFileHeader @@ -622,6 +678,8 @@ internal sealed partial class ZipCentralDirectoryFileHeader // ZIP files store values in little endian, so this is reversed. public static ReadOnlySpan SignatureConstantBytes => [0x50, 0x4B, 0x01, 0x02]; + private const int StackAllocationThreshold = 512; + // These are the minimum possible size, assuming the zip file comments variable section is empty public const int BlockConstantSectionSize = 46; @@ -647,19 +705,17 @@ internal sealed partial class ZipCentralDirectoryFileHeader public List? ExtraFields; public byte[]? TrailingExtraFieldData; - // if saveExtraFieldsAndComments is false, FileComment and ExtraFields will be null - // in either case, the zip64 extra field info will be incorporated into other fields - public static bool TryReadBlock(ReadOnlySpan buffer, Stream furtherReads, bool saveExtraFieldsAndComments, out int bytesRead, [NotNullWhen(returnValue: true)] out ZipCentralDirectoryFileHeader? header) + private static bool TryReadBlockInitialize(ReadOnlySpan buffer, [NotNullWhen(returnValue: true)] out ZipCentralDirectoryFileHeader? header, out int bytesRead, out uint compressedSizeSmall, out uint uncompressedSizeSmall, out ushort diskNumberStartSmall, out uint relativeOffsetOfLocalHeaderSmall) { - header = null; - - const int StackAllocationThreshold = 512; - - bytesRead = 0; - // the buffer will always be large enough for at least the constant section to be verified Debug.Assert(buffer.Length >= BlockConstantSectionSize); + header = null; + bytesRead = 0; + compressedSizeSmall = 0; + uncompressedSizeSmall = 0; + diskNumberStartSmall = 0; + relativeOffsetOfLocalHeaderSmall = 0; if (!buffer.StartsWith(SignatureConstantBytes)) { return false; @@ -681,23 +737,71 @@ public static bool TryReadBlock(ReadOnlySpan buffer, Stream furtherReads, ExternalFileAttributes = BinaryPrimitives.ReadUInt32LittleEndian(buffer[FieldLocations.ExternalFileAttributes..]) }; - uint compressedSizeSmall = BinaryPrimitives.ReadUInt32LittleEndian(buffer[FieldLocations.CompressedSize..]); - uint uncompressedSizeSmall = BinaryPrimitives.ReadUInt32LittleEndian(buffer[FieldLocations.UncompressedSize..]); - ushort diskNumberStartSmall = BinaryPrimitives.ReadUInt16LittleEndian(buffer[FieldLocations.DiskNumberStart..]); - uint relativeOffsetOfLocalHeaderSmall = BinaryPrimitives.ReadUInt32LittleEndian(buffer[FieldLocations.RelativeOffsetOfLocalHeader..]); - - // Assemble the dynamic header in a separate buffer. We can't guarantee that it's all in the input buffer, - // some additional data might need to come from the stream. - int dynamicHeaderSize = header.FilenameLength + header.ExtraFieldLength + header.FileCommentLength; - int remainingBufferLength = buffer.Length - FieldLocations.DynamicData; - int bytesToRead = dynamicHeaderSize - remainingBufferLength; - scoped ReadOnlySpan dynamicHeader; - byte[]? arrayPoolBuffer = null; + compressedSizeSmall = BinaryPrimitives.ReadUInt32LittleEndian(buffer[FieldLocations.CompressedSize..]); + uncompressedSizeSmall = BinaryPrimitives.ReadUInt32LittleEndian(buffer[FieldLocations.UncompressedSize..]); + diskNumberStartSmall = BinaryPrimitives.ReadUInt16LittleEndian(buffer[FieldLocations.DiskNumberStart..]); + relativeOffsetOfLocalHeaderSmall = BinaryPrimitives.ReadUInt32LittleEndian(buffer[FieldLocations.RelativeOffsetOfLocalHeader..]); + + return true; + } + + private static void TryReadBlockFinalize(ZipCentralDirectoryFileHeader header, ReadOnlySpan dynamicHeader, int dynamicHeaderSize, uint uncompressedSizeSmall, uint compressedSizeSmall, ushort diskNumberStartSmall, uint relativeOffsetOfLocalHeaderSmall, bool saveExtraFieldsAndComments, ref int bytesRead, out Zip64ExtraField zip64) + { + header.Filename = dynamicHeader[..header.FilenameLength].ToArray(); + + bool uncompressedSizeInZip64 = uncompressedSizeSmall == ZipHelper.Mask32Bit; + bool compressedSizeInZip64 = compressedSizeSmall == ZipHelper.Mask32Bit; + bool relativeOffsetInZip64 = relativeOffsetOfLocalHeaderSmall == ZipHelper.Mask32Bit; + bool diskNumberStartInZip64 = diskNumberStartSmall == ZipHelper.Mask16Bit; + + ReadOnlySpan zipExtraFields = dynamicHeader.Slice(header.FilenameLength, header.ExtraFieldLength); - Zip64ExtraField zip64; + if (saveExtraFieldsAndComments) + { + header.ExtraFields = ZipGenericExtraField.ParseExtraField(zipExtraFields, out ReadOnlySpan trailingDataSpan); + zip64 = Zip64ExtraField.GetAndRemoveZip64Block(header.ExtraFields, + uncompressedSizeInZip64, compressedSizeInZip64, + relativeOffsetInZip64, diskNumberStartInZip64); + header.TrailingExtraFieldData = trailingDataSpan.ToArray(); + } + else + { + header.ExtraFields = null; + header.TrailingExtraFieldData = null; + zip64 = Zip64ExtraField.GetJustZip64Block(zipExtraFields, + uncompressedSizeInZip64, compressedSizeInZip64, + relativeOffsetInZip64, diskNumberStartInZip64); + } + + header.FileComment = dynamicHeader.Slice(header.FilenameLength + header.ExtraFieldLength, header.FileCommentLength).ToArray(); + + bytesRead = FieldLocations.DynamicData + dynamicHeaderSize; + + header.UncompressedSize = zip64.UncompressedSize ?? uncompressedSizeSmall; + header.CompressedSize = zip64.CompressedSize ?? compressedSizeSmall; + header.RelativeOffsetOfLocalHeader = zip64.LocalHeaderOffset ?? relativeOffsetOfLocalHeaderSmall; + header.DiskNumberStart = zip64.StartDiskNumber ?? diskNumberStartSmall; + } + + // if saveExtraFieldsAndComments is false, FileComment and ExtraFields will be null + // in either case, the zip64 extra field info will be incorporated into other fields + public static bool TryReadBlock(ReadOnlySpan buffer, Stream furtherReads, bool saveExtraFieldsAndComments, out int bytesRead, [NotNullWhen(returnValue: true)] out ZipCentralDirectoryFileHeader? header) + { + if (!TryReadBlockInitialize(buffer, out header, out bytesRead, out uint compressedSizeSmall, out uint uncompressedSizeSmall, out ushort diskNumberStartSmall, out uint relativeOffsetOfLocalHeaderSmall)) + { + return false; + } + byte[]? arrayPoolBuffer = null; try { + // Assemble the dynamic header in a separate buffer. We can't guarantee that it's all in the input buffer, + // some additional data might need to come from the stream. + int dynamicHeaderSize = header.FilenameLength + header.ExtraFieldLength + header.FileCommentLength; + int remainingBufferLength = buffer.Length - FieldLocations.DynamicData; + int bytesToRead = dynamicHeaderSize - remainingBufferLength; + scoped ReadOnlySpan dynamicHeader; + // No need to read extra data from the stream, no need to allocate a new buffer. if (bytesToRead <= 0) { @@ -708,7 +812,7 @@ public static bool TryReadBlock(ReadOnlySpan buffer, Stream furtherReads, { if (dynamicHeaderSize > StackAllocationThreshold) { - arrayPoolBuffer = System.Buffers.ArrayPool.Shared.Rent(dynamicHeaderSize); + arrayPoolBuffer = ArrayPool.Shared.Rent(dynamicHeaderSize); } Span collatedHeader = dynamicHeaderSize <= StackAllocationThreshold ? stackalloc byte[StackAllocationThreshold].Slice(0, dynamicHeaderSize) : arrayPoolBuffer.AsSpan(0, dynamicHeaderSize); @@ -725,50 +829,16 @@ public static bool TryReadBlock(ReadOnlySpan buffer, Stream furtherReads, dynamicHeader = collatedHeader; } - header.Filename = dynamicHeader[..header.FilenameLength].ToArray(); - - bool uncompressedSizeInZip64 = uncompressedSizeSmall == ZipHelper.Mask32Bit; - bool compressedSizeInZip64 = compressedSizeSmall == ZipHelper.Mask32Bit; - bool relativeOffsetInZip64 = relativeOffsetOfLocalHeaderSmall == ZipHelper.Mask32Bit; - bool diskNumberStartInZip64 = diskNumberStartSmall == ZipHelper.Mask16Bit; - - ReadOnlySpan zipExtraFields = dynamicHeader.Slice(header.FilenameLength, header.ExtraFieldLength); - - zip64 = new(); - if (saveExtraFieldsAndComments) - { - header.ExtraFields = ZipGenericExtraField.ParseExtraField(zipExtraFields, out ReadOnlySpan trailingDataSpan); - zip64 = Zip64ExtraField.GetAndRemoveZip64Block(header.ExtraFields, - uncompressedSizeInZip64, compressedSizeInZip64, - relativeOffsetInZip64, diskNumberStartInZip64); - header.TrailingExtraFieldData = trailingDataSpan.ToArray(); - } - else - { - header.ExtraFields = null; - header.TrailingExtraFieldData = null; - zip64 = Zip64ExtraField.GetJustZip64Block(zipExtraFields, - uncompressedSizeInZip64, compressedSizeInZip64, - relativeOffsetInZip64, diskNumberStartInZip64); - } - - header.FileComment = dynamicHeader.Slice(header.FilenameLength + header.ExtraFieldLength, header.FileCommentLength).ToArray(); + TryReadBlockFinalize(header, dynamicHeader, dynamicHeaderSize, uncompressedSizeSmall, compressedSizeSmall, diskNumberStartSmall, relativeOffsetOfLocalHeaderSmall, saveExtraFieldsAndComments, ref bytesRead, out Zip64ExtraField zip64); } finally { if (arrayPoolBuffer != null) { - System.Buffers.ArrayPool.Shared.Return(arrayPoolBuffer); + ArrayPool.Shared.Return(arrayPoolBuffer); } } - bytesRead = FieldLocations.DynamicData + dynamicHeaderSize; - - header.UncompressedSize = zip64.UncompressedSize ?? uncompressedSizeSmall; - header.CompressedSize = zip64.CompressedSize ?? compressedSizeSmall; - header.RelativeOffsetOfLocalHeader = zip64.LocalHeaderOffset ?? relativeOffsetOfLocalHeaderSmall; - header.DiskNumberStart = zip64.StartDiskNumber ?? diskNumberStartSmall; - return true; } } @@ -777,12 +847,12 @@ internal sealed partial class ZipEndOfCentralDirectoryBlock { // The Zip File Format Specification references 0x06054B50, this is a big endian representation. // ZIP files store values in little endian, so this is reversed. - public static ReadOnlySpan SignatureConstantBytes => [0x50, 0x4B, 0x05, 0x06]; + public static readonly byte[] SignatureConstantBytes = [0x50, 0x4B, 0x05, 0x06]; // This also assumes a zero-length comment. - public static readonly int TotalSize = FieldLocations.ArchiveCommentLength + FieldLengths.ArchiveCommentLength; + public const int TotalSize = FieldLocations.ArchiveCommentLength + FieldLengths.ArchiveCommentLength; // These are the minimum possible size, assuming the zip file comments variable section is empty - public static readonly int SizeOfBlockWithoutSignature = TotalSize - FieldLengths.Signature; + public const int SizeOfBlockWithoutSignature = TotalSize - FieldLengths.Signature; // The end of central directory can have a variable size zip file comment at the end, but its max length can be 64K // The Zip File Format Specification does not explicitly mention a max size for this field, but we are assuming this @@ -799,10 +869,8 @@ internal sealed partial class ZipEndOfCentralDirectoryBlock private byte[]? _archiveComment; public byte[] ArchiveComment => _archiveComment ??= []; - public static void WriteBlock(Stream stream, long numberOfEntries, long startOfCentralDirectory, long sizeOfCentralDirectory, byte[] archiveComment) + private static void WriteBlockInitialize(Span blockContents, long numberOfEntries, long startOfCentralDirectory, long sizeOfCentralDirectory, byte[] archiveComment) { - Span blockContents = stackalloc byte[TotalSize]; - ushort numberOfEntriesTruncated = numberOfEntries > ushort.MaxValue ? ZipHelper.Mask16Bit : (ushort)numberOfEntries; uint startOfCentralDirectoryTruncated = startOfCentralDirectory > uint.MaxValue ? @@ -827,6 +895,13 @@ public static void WriteBlock(Stream stream, long numberOfEntries, long startOfC // zip file comment length BinaryPrimitives.WriteUInt16LittleEndian(blockContents[FieldLocations.ArchiveCommentLength..], (ushort)archiveComment.Length); + } + + public static void WriteBlock(Stream stream, long numberOfEntries, long startOfCentralDirectory, long sizeOfCentralDirectory, byte[] archiveComment) + { + Span blockContents = stackalloc byte[TotalSize]; + + WriteBlockInitialize(blockContents, numberOfEntries, startOfCentralDirectory, sizeOfCentralDirectory, archiveComment); stream.Write(blockContents); if (archiveComment.Length > 0) @@ -835,14 +910,11 @@ public static void WriteBlock(Stream stream, long numberOfEntries, long startOfC } } - public static bool TryReadBlock(Stream stream, out ZipEndOfCentralDirectoryBlock eocdBlock) + private static bool TryReadBlockInitialize(Stream stream, Span blockContents, int bytesRead, [NotNullWhen(returnValue: true)] out ZipEndOfCentralDirectoryBlock? eocdBlock, out bool readComment) { - Span blockContents = stackalloc byte[TotalSize]; - int bytesRead; - - eocdBlock = new(); - bytesRead = stream.ReadAtLeast(blockContents, blockContents.Length, throwOnEndOfStream: false); + readComment = false; + eocdBlock = null; if (bytesRead < TotalSize) { return false; @@ -852,15 +924,17 @@ public static bool TryReadBlock(Stream stream, out ZipEndOfCentralDirectoryBlock { return false; } - - eocdBlock.Signature = BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.Signature..]); - eocdBlock.NumberOfThisDisk = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.NumberOfThisDisk..]); - eocdBlock.NumberOfTheDiskWithTheStartOfTheCentralDirectory = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.NumberOfTheDiskWithTheStartOfTheCentralDirectory..]); - eocdBlock.NumberOfEntriesInTheCentralDirectoryOnThisDisk = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.NumberOfEntriesInTheCentralDirectoryOnThisDisk..]); - eocdBlock.NumberOfEntriesInTheCentralDirectory = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.NumberOfEntriesInTheCentralDirectory..]); - eocdBlock.SizeOfCentralDirectory = BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.SizeOfCentralDirectory..]); - eocdBlock.OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber = - BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber..]); + eocdBlock = new() + { + Signature = BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.Signature..]), + NumberOfThisDisk = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.NumberOfThisDisk..]), + NumberOfTheDiskWithTheStartOfTheCentralDirectory = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.NumberOfTheDiskWithTheStartOfTheCentralDirectory..]), + NumberOfEntriesInTheCentralDirectoryOnThisDisk = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.NumberOfEntriesInTheCentralDirectoryOnThisDisk..]), + NumberOfEntriesInTheCentralDirectory = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.NumberOfEntriesInTheCentralDirectory..]), + SizeOfCentralDirectory = BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.SizeOfCentralDirectory..]), + OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber = + BinaryPrimitives.ReadUInt32LittleEndian(blockContents[FieldLocations.OffsetOfStartOfCentralDirectoryWithRespectToTheStartingDiskNumber..]) + }; ushort commentLength = BinaryPrimitives.ReadUInt16LittleEndian(blockContents[FieldLocations.ArchiveCommentLength..]); @@ -876,10 +950,28 @@ public static bool TryReadBlock(Stream stream, out ZipEndOfCentralDirectoryBlock else { eocdBlock._archiveComment = new byte[commentLength]; - stream.ReadExactly(eocdBlock._archiveComment); + readComment = true; } return true; } + + public static ZipEndOfCentralDirectoryBlock ReadBlock(Stream stream) + { + Span blockContents = stackalloc byte[TotalSize]; + int bytesRead = stream.ReadAtLeast(blockContents, blockContents.Length, throwOnEndOfStream: false); + + if (!TryReadBlockInitialize(stream, blockContents, bytesRead, out ZipEndOfCentralDirectoryBlock? eocdBlock, out bool readComment)) + { + // // We shouldn't get here becasue we found the eocd block using the signature finder + throw new InvalidDataException(SR.EOCDNotFound); + } + else if (readComment) + { + stream.ReadExactly(eocdBlock._archiveComment); + } + + return eocdBlock; + } } } diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipCustomStreams.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipCustomStreams.cs index b3987fc68e6e6b..b69f0b84c0827a 100644 --- a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipCustomStreams.cs +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipCustomStreams.cs @@ -486,6 +486,18 @@ public override int Read(byte[] buffer, int offset, int count) throw new NotSupportedException(SR.ReadingNotSupported); } + public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + ThrowIfDisposed(); + throw new NotSupportedException(SR.ReadingNotSupported); + } + + public override ValueTask ReadAsync(Memory buffer, CancellationToken cancellationToken = default) + { + ThrowIfDisposed(); + throw new NotSupportedException(SR.ReadingNotSupported); + } + public override long Seek(long offset, SeekOrigin origin) { ThrowIfDisposed(); @@ -604,5 +616,20 @@ protected override void Dispose(bool disposing) } base.Dispose(disposing); } + + public override async ValueTask DisposeAsync() + { + if (!_isDisposed) + { + // if we never wrote through here, save the position + if (!_everWritten) + _initialPosition = _baseBaseStream.Position; + if (!_leaveOpenOnClose) + await _baseStream.DisposeAsync().ConfigureAwait(false); // Close my super-stream (flushes the last data) + _saveCrcAndSizes?.Invoke(_initialPosition, Position, _checksum, _baseBaseStream, _zipArchiveEntry, _onClose); + _isDisposed = true; + } + await base.DisposeAsync().ConfigureAwait(false); + } } } diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipHelper.Async.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipHelper.Async.cs new file mode 100644 index 00000000000000..04b65b6fda6186 --- /dev/null +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipHelper.Async.cs @@ -0,0 +1,125 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Buffers; +using System.Diagnostics; +using System.Threading; +using System.Threading.Tasks; + +namespace System.IO.Compression; + +internal static partial class ZipHelper +{ + /// + /// Asynchronously reads exactly bytesToRead out of stream, unless it is out of bytes. + /// + internal static async Task ReadBytesAsync(Stream stream, Memory buffer, int bytesToRead, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + int bytesRead = await stream.ReadAtLeastAsync(buffer, bytesToRead, throwOnEndOfStream: false, cancellationToken).ConfigureAwait(false); + + if (bytesRead < bytesToRead) + { + throw new IOException(SR.UnexpectedEndOfStream); + } + return bytesRead; + } + + // Asynchronously assumes all bytes of signatureToFind are non zero, looks backwards from current position in stream, + // assumes maxBytesToRead is positive, ensures to not read beyond the provided max number of bytes, + // if the signature is found then returns true and positions stream at first byte of signature + // if the signature is not found, returns false + internal static async Task SeekBackwardsToSignatureAsync(Stream stream, ReadOnlyMemory signatureToFind, int maxBytesToRead, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + Debug.Assert(signatureToFind.Length != 0); + Debug.Assert(maxBytesToRead > 0); + + // This method reads blocks of BackwardsSeekingBufferSize bytes, searching each block for signatureToFind. + // A simple LastIndexOf(signatureToFind) doesn't account for cases where signatureToFind is split, starting in + // one block and ending in another. + // To account for this, we read blocks of BackwardsSeekingBufferSize bytes, but seek backwards by + // [BackwardsSeekingBufferSize - signatureToFind.Length] bytes. This guarantees that signatureToFind will not be + // split between two consecutive blocks, at the cost of reading [signatureToFind.Length] duplicate bytes in each iteration. + int bufferPointer = 0; + byte[] buffer = ArrayPool.Shared.Rent(BackwardsSeekingBufferSize); + Memory bufferMemory = buffer.AsMemory(0, BackwardsSeekingBufferSize); + + try + { + bool outOfBytes = false; + bool signatureFound = false; + + int totalBytesRead = 0; + int duplicateBytesRead = 0; + + while (!signatureFound && !outOfBytes && totalBytesRead <= maxBytesToRead) + { + int bytesRead = await SeekBackwardsAndReadAsync(stream, bufferMemory, signatureToFind.Length, cancellationToken).ConfigureAwait(false); + + outOfBytes = bytesRead < bufferMemory.Length; + if (bytesRead < bufferMemory.Length) + { + bufferMemory = bufferMemory.Slice(0, bytesRead); + } + + bufferPointer = bufferMemory.Span.LastIndexOf(signatureToFind.Span); + Debug.Assert(bufferPointer < bufferMemory.Length); + + totalBytesRead += (bufferMemory.Length - duplicateBytesRead); + + if (bufferPointer != -1) + { + signatureFound = true; + break; + } + + duplicateBytesRead = signatureToFind.Length; + } + + if (!signatureFound) + { + return false; + } + else + { + stream.Seek(bufferPointer, SeekOrigin.Current); + return true; + } + } + finally + { + ArrayPool.Shared.Return(buffer); + } + } + + // Asynchronously returns the number of bytes actually read. + // Allows successive buffers to overlap by a number of bytes. This handles cases where + // the value being searched for straddles buffers (i.e. where the first buffer ends with the + // first X bytes being searched for, and the second buffer begins with the remaining bytes.) + private static async Task SeekBackwardsAndReadAsync(Stream stream, Memory buffer, int overlap, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + int bytesRead; + + if (stream.Position >= buffer.Length) + { + Debug.Assert(overlap <= buffer.Length); + stream.Seek(-(buffer.Length - overlap), SeekOrigin.Current); + bytesRead = await ReadBytesAsync(stream, buffer, buffer.Length, cancellationToken).ConfigureAwait(false); + stream.Seek(-buffer.Length, SeekOrigin.Current); + } + else + { + int bytesToRead = (int)stream.Position; + stream.Seek(0, SeekOrigin.Begin); + bytesRead = await ReadBytesAsync(stream, buffer, bytesToRead, cancellationToken).ConfigureAwait(false); + stream.Seek(0, SeekOrigin.Begin); + } + + return bytesRead; + } +} diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipHelper.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipHelper.cs index 4cc4b4feef21d8..09ff508270bf4d 100644 --- a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipHelper.cs +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipHelper.cs @@ -5,236 +5,236 @@ using System.Diagnostics; using System.Text; -namespace System.IO.Compression +namespace System.IO.Compression; + +internal static partial class ZipHelper { - internal static class ZipHelper - { - internal const uint Mask32Bit = 0xFFFFFFFF; - internal const ushort Mask16Bit = 0xFFFF; + internal const uint Mask32Bit = 0xFFFFFFFF; + internal const ushort Mask16Bit = 0xFFFF; - private const int BackwardsSeekingBufferSize = 4096; + private const int BackwardsSeekingBufferSize = 4096; - internal const int ValidZipDate_YearMin = 1980; - internal const int ValidZipDate_YearMax = 2107; + internal const int ValidZipDate_YearMin = 1980; + internal const int ValidZipDate_YearMax = 2107; - private static readonly DateTime s_invalidDateIndicator = new DateTime(ValidZipDate_YearMin, 1, 1, 0, 0, 0); + private static readonly DateTime s_invalidDateIndicator = new DateTime(ValidZipDate_YearMin, 1, 1, 0, 0, 0); - internal static Encoding GetEncoding(string text) + internal static Encoding GetEncoding(string text) + { + if (text.AsSpan().ContainsAnyExceptInRange((char)32, (char)126)) { - if (text.AsSpan().ContainsAnyExceptInRange((char)32, (char)126)) - { - // The Zip Format uses code page 437 when the Unicode bit is not set. This format - // is the same as ASCII for characters 32-126 but differs otherwise. If we can fit - // the string into CP437 then we treat ASCII as acceptable. - return Encoding.UTF8; - } - - return Encoding.ASCII; + // The Zip Format uses code page 437 when the Unicode bit is not set. This format + // is the same as ASCII for characters 32-126 but differs otherwise. If we can fit + // the string into CP437 then we treat ASCII as acceptable. + return Encoding.UTF8; } - /// - /// Reads exactly bytesToRead out of stream, unless it is out of bytes - /// - internal static int ReadBytes(Stream stream, Span buffer, int bytesToRead) + return Encoding.ASCII; + } + + /// + /// Reads exactly bytesToRead out of stream, unless it is out of bytes + /// + internal static int ReadBytes(Stream stream, Span buffer, int bytesToRead) + { + int bytesRead = stream.ReadAtLeast(buffer, bytesToRead, throwOnEndOfStream: false); + if (bytesRead < bytesToRead) { - int bytesRead = stream.ReadAtLeast(buffer, bytesToRead, throwOnEndOfStream: false); - if (bytesRead < bytesToRead) - { - throw new IOException(SR.UnexpectedEndOfStream); - } - return bytesRead; + throw new IOException(SR.UnexpectedEndOfStream); } + return bytesRead; + } - // will silently return InvalidDateIndicator if the uint is not a valid Dos DateTime - internal static DateTime DosTimeToDateTime(uint dateTime) + // will silently return InvalidDateIndicator if the uint is not a valid Dos DateTime + internal static DateTime DosTimeToDateTime(uint dateTime) + { + if (dateTime == 0) { - if (dateTime == 0) - { - return s_invalidDateIndicator; - } - - // DosTime format 32 bits - // Year: 7 bits, 0 is ValidZipDate_YearMin, unsigned (ValidZipDate_YearMin = 1980) - // Month: 4 bits - // Day: 5 bits - // Hour: 5 - // Minute: 6 bits - // Second: 5 bits - - // do the bit shift as unsigned because the fields are unsigned, but - // we can safely convert to int, because they won't be too big - int year = (int)(ValidZipDate_YearMin + (dateTime >> 25)); - int month = (int)((dateTime >> 21) & 0xF); - int day = (int)((dateTime >> 16) & 0x1F); - int hour = (int)((dateTime >> 11) & 0x1F); - int minute = (int)((dateTime >> 5) & 0x3F); - int second = (int)((dateTime & 0x001F) * 2); // only 5 bits for second, so we only have a granularity of 2 sec. - - try - { - return new DateTime(year, month, day, hour, minute, second, 0); - } - catch (ArgumentOutOfRangeException) - { - return s_invalidDateIndicator; - } - catch (ArgumentException) - { - return s_invalidDateIndicator; - } + return s_invalidDateIndicator; } - // assume date time has passed IsConvertibleToDosTime - internal static uint DateTimeToDosTime(DateTime dateTime) + // DosTime format 32 bits + // Year: 7 bits, 0 is ValidZipDate_YearMin, unsigned (ValidZipDate_YearMin = 1980) + // Month: 4 bits + // Day: 5 bits + // Hour: 5 + // Minute: 6 bits + // Second: 5 bits + + // do the bit shift as unsigned because the fields are unsigned, but + // we can safely convert to int, because they won't be too big + int year = (int)(ValidZipDate_YearMin + (dateTime >> 25)); + int month = (int)((dateTime >> 21) & 0xF); + int day = (int)((dateTime >> 16) & 0x1F); + int hour = (int)((dateTime >> 11) & 0x1F); + int minute = (int)((dateTime >> 5) & 0x3F); + int second = (int)((dateTime & 0x001F) * 2); // only 5 bits for second, so we only have a granularity of 2 sec. + + try { - // DateTime must be Convertible to DosTime: - Debug.Assert(ValidZipDate_YearMin <= dateTime.Year && dateTime.Year <= ValidZipDate_YearMax); - - int ret = ((dateTime.Year - ValidZipDate_YearMin) & 0x7F); - ret = (ret << 4) + dateTime.Month; - ret = (ret << 5) + dateTime.Day; - ret = (ret << 5) + dateTime.Hour; - ret = (ret << 6) + dateTime.Minute; - ret = (ret << 5) + (dateTime.Second / 2); // only 5 bits for second, so we only have a granularity of 2 sec. - return (uint)ret; + return new DateTime(year, month, day, hour, minute, second, 0); } - - // Assumes all bytes of signatureToFind are non zero, looks backwards from current position in stream, - // assumes maxBytesToRead is positive, ensures to not read beyond the provided max number of bytes, - // if the signature is found then returns true and positions stream at first byte of signature - // if the signature is not found, returns false - internal static bool SeekBackwardsToSignature(Stream stream, ReadOnlySpan signatureToFind, int maxBytesToRead) + catch (ArgumentOutOfRangeException) { - Debug.Assert(signatureToFind.Length != 0); - Debug.Assert(maxBytesToRead > 0); - - // This method reads blocks of BackwardsSeekingBufferSize bytes, searching each block for signatureToFind. - // A simple LastIndexOf(signatureToFind) doesn't account for cases where signatureToFind is split, starting in - // one block and ending in another. - // To account for this, we read blocks of BackwardsSeekingBufferSize bytes, but seek backwards by - // [BackwardsSeekingBufferSize - signatureToFind.Length] bytes. This guarantees that signatureToFind will not be - // split between two consecutive blocks, at the cost of reading [signatureToFind.Length] duplicate bytes in each iteration. - int bufferPointer = 0; - byte[] buffer = ArrayPool.Shared.Rent(BackwardsSeekingBufferSize); - Span bufferSpan = buffer.AsSpan(0, BackwardsSeekingBufferSize); - - try - { - bool outOfBytes = false; - bool signatureFound = false; + return s_invalidDateIndicator; + } + catch (ArgumentException) + { + return s_invalidDateIndicator; + } + } - int totalBytesRead = 0; - int duplicateBytesRead = 0; + // assume date time has passed IsConvertibleToDosTime + internal static uint DateTimeToDosTime(DateTime dateTime) + { + // DateTime must be Convertible to DosTime: + Debug.Assert(ValidZipDate_YearMin <= dateTime.Year && dateTime.Year <= ValidZipDate_YearMax); + + int ret = ((dateTime.Year - ValidZipDate_YearMin) & 0x7F); + ret = (ret << 4) + dateTime.Month; + ret = (ret << 5) + dateTime.Day; + ret = (ret << 5) + dateTime.Hour; + ret = (ret << 6) + dateTime.Minute; + ret = (ret << 5) + (dateTime.Second / 2); // only 5 bits for second, so we only have a granularity of 2 sec. + return (uint)ret; + } - while (!signatureFound && !outOfBytes && totalBytesRead <= maxBytesToRead) - { - int bytesRead = SeekBackwardsAndRead(stream, bufferSpan, signatureToFind.Length); + // Assumes all bytes of signatureToFind are non zero, looks backwards from current position in stream, + // assumes maxBytesToRead is positive, ensures to not read beyond the provided max number of bytes, + // if the signature is found then returns true and positions stream at first byte of signature + // if the signature is not found, returns false + internal static bool SeekBackwardsToSignature(Stream stream, ReadOnlySpan signatureToFind, int maxBytesToRead) + { + Debug.Assert(signatureToFind.Length != 0); + Debug.Assert(maxBytesToRead > 0); + + // This method reads blocks of BackwardsSeekingBufferSize bytes, searching each block for signatureToFind. + // A simple LastIndexOf(signatureToFind) doesn't account for cases where signatureToFind is split, starting in + // one block and ending in another. + // To account for this, we read blocks of BackwardsSeekingBufferSize bytes, but seek backwards by + // [BackwardsSeekingBufferSize - signatureToFind.Length] bytes. This guarantees that signatureToFind will not be + // split between two consecutive blocks, at the cost of reading [signatureToFind.Length] duplicate bytes in each iteration. + int bufferPointer = 0; + byte[] buffer = ArrayPool.Shared.Rent(BackwardsSeekingBufferSize); + Span bufferSpan = buffer.AsSpan(0, BackwardsSeekingBufferSize); + + try + { + bool outOfBytes = false; + bool signatureFound = false; - outOfBytes = bytesRead < bufferSpan.Length; - if (bytesRead < bufferSpan.Length) - { - bufferSpan = bufferSpan.Slice(0, bytesRead); - } + int totalBytesRead = 0; + int duplicateBytesRead = 0; - bufferPointer = bufferSpan.LastIndexOf(signatureToFind); - Debug.Assert(bufferPointer < bufferSpan.Length); + while (!signatureFound && !outOfBytes && totalBytesRead <= maxBytesToRead) + { + int bytesRead = SeekBackwardsAndRead(stream, bufferSpan, signatureToFind.Length); - totalBytesRead += (bufferSpan.Length - duplicateBytesRead); + outOfBytes = bytesRead < bufferSpan.Length; + if (bytesRead < bufferSpan.Length) + { + bufferSpan = bufferSpan.Slice(0, bytesRead); + } - if (bufferPointer != -1) - { - signatureFound = true; - break; - } + bufferPointer = bufferSpan.LastIndexOf(signatureToFind); + Debug.Assert(bufferPointer < bufferSpan.Length); - duplicateBytesRead = signatureToFind.Length; - } + totalBytesRead += (bufferSpan.Length - duplicateBytesRead); - if (!signatureFound) - { - return false; - } - else + if (bufferPointer != -1) { - stream.Seek(bufferPointer, SeekOrigin.Current); - return true; + signatureFound = true; + break; } - } - finally - { - ArrayPool.Shared.Return(buffer); - } - } - // Returns the number of bytes actually read. - // Allows successive buffers to overlap by a number of bytes. This handles cases where - // the value being searched for straddles buffers (i.e. where the first buffer ends with the - // first X bytes being searched for, and the second buffer begins with the remaining bytes.) - private static int SeekBackwardsAndRead(Stream stream, Span buffer, int overlap) - { - int bytesRead; + duplicateBytesRead = signatureToFind.Length; + } - if (stream.Position >= buffer.Length) + if (!signatureFound) { - Debug.Assert(overlap <= buffer.Length); - stream.Seek(-(buffer.Length - overlap), SeekOrigin.Current); - bytesRead = ReadBytes(stream, buffer, buffer.Length); - stream.Seek(-buffer.Length, SeekOrigin.Current); + return false; } else { - int bytesToRead = (int)stream.Position; - stream.Seek(0, SeekOrigin.Begin); - bytesRead = ReadBytes(stream, buffer, bytesToRead); - stream.Seek(0, SeekOrigin.Begin); + stream.Seek(bufferPointer, SeekOrigin.Current); + return true; } + } + finally + { + ArrayPool.Shared.Return(buffer); + } + } - return bytesRead; + // Returns the number of bytes actually read. + // Allows successive buffers to overlap by a number of bytes. This handles cases where + // the value being searched for straddles buffers (i.e. where the first buffer ends with the + // first X bytes being searched for, and the second buffer begins with the remaining bytes.) + private static int SeekBackwardsAndRead(Stream stream, Span buffer, int overlap) + { + int bytesRead; + + if (stream.Position >= buffer.Length) + { + Debug.Assert(overlap <= buffer.Length); + stream.Seek(-(buffer.Length - overlap), SeekOrigin.Current); + bytesRead = ReadBytes(stream, buffer, buffer.Length); + stream.Seek(-buffer.Length, SeekOrigin.Current); + } + else + { + int bytesToRead = (int)stream.Position; + stream.Seek(0, SeekOrigin.Begin); + bytesRead = ReadBytes(stream, buffer, bytesToRead); + stream.Seek(0, SeekOrigin.Begin); } - // Converts the specified string into bytes using the optional specified encoding. - // If the encoding null, then the encoding is calculated from the string itself. - // If maxBytes is greater than zero, the returned string will be truncated to a total - // number of characters whose bytes do not add up to more than that number. - internal static byte[] GetEncodedTruncatedBytesFromString(string? text, Encoding? encoding, int maxBytes, out bool isUTF8) + return bytesRead; + } + // Converts the specified string into bytes using the optional specified encoding. + // If the encoding null, then the encoding is calculated from the string itself. + // If maxBytes is greater than zero, the returned string will be truncated to a total + // number of characters whose bytes do not add up to more than that number. + internal static byte[] GetEncodedTruncatedBytesFromString(string? text, Encoding? encoding, int maxBytes, out bool isUTF8) + { + if (string.IsNullOrEmpty(text)) { - if (string.IsNullOrEmpty(text)) - { - isUTF8 = false; - return Array.Empty(); - } + isUTF8 = false; + return Array.Empty(); + } - encoding ??= GetEncoding(text); - isUTF8 = encoding.CodePage == 65001; + encoding ??= GetEncoding(text); + isUTF8 = encoding.CodePage == 65001; - if (maxBytes == 0) // No truncation - { - return encoding.GetBytes(text); - } + if (maxBytes == 0) // No truncation + { + return encoding.GetBytes(text); + } - byte[] bytes; - if (isUTF8 && encoding.GetMaxByteCount(text.Length) > maxBytes) + byte[] bytes; + if (isUTF8 && encoding.GetMaxByteCount(text.Length) > maxBytes) + { + int totalCodePoints = 0; + foreach (Rune rune in text.EnumerateRunes()) { - int totalCodePoints = 0; - foreach (Rune rune in text.EnumerateRunes()) + if (totalCodePoints + rune.Utf8SequenceLength > maxBytes) { - if (totalCodePoints + rune.Utf8SequenceLength > maxBytes) - { - break; - } - totalCodePoints += rune.Utf8SequenceLength; + break; } - - bytes = encoding.GetBytes(text); - - Debug.Assert(totalCodePoints > 0); - Debug.Assert(totalCodePoints <= bytes.Length); - - return bytes[0..totalCodePoints]; + totalCodePoints += rune.Utf8SequenceLength; } bytes = encoding.GetBytes(text); - return maxBytes < bytes.Length ? bytes[0..maxBytes] : bytes; + + Debug.Assert(totalCodePoints > 0); + Debug.Assert(totalCodePoints <= bytes.Length); + + return bytes[0..totalCodePoints]; } + + bytes = encoding.GetBytes(text); + return maxBytes < bytes.Length ? bytes[0..maxBytes] : bytes; } + + } diff --git a/src/libraries/System.IO.Compression/tests/CompressionStreamUnitTests.Deflate.cs b/src/libraries/System.IO.Compression/tests/CompressionStreamUnitTests.Deflate.cs index d91a243b907cc1..30ab788bab2d42 100644 --- a/src/libraries/System.IO.Compression/tests/CompressionStreamUnitTests.Deflate.cs +++ b/src/libraries/System.IO.Compression/tests/CompressionStreamUnitTests.Deflate.cs @@ -38,7 +38,7 @@ public static IEnumerable DecompressFailsWithWrapperStream_MemberData( public async Task DecompressFailsWithWrapperStream(string uncompressedPath, string newDirectory, string newSuffix) { string fileName = Path.Combine(newDirectory, Path.GetFileName(uncompressedPath) + newSuffix); - using (LocalMemoryStream baseStream = await LocalMemoryStream.readAppFileAsync(fileName)) + using (LocalMemoryStream baseStream = await LocalMemoryStream.ReadAppFileAsync(fileName)) using (Stream cs = CreateStream(baseStream, CompressionMode.Decompress)) { int _bufferSize = 2048; diff --git a/src/libraries/System.IO.Compression/tests/System.IO.Compression.Tests.csproj b/src/libraries/System.IO.Compression/tests/System.IO.Compression.Tests.csproj index a0c493573cde51..fbc00e92e4cfb5 100644 --- a/src/libraries/System.IO.Compression/tests/System.IO.Compression.Tests.csproj +++ b/src/libraries/System.IO.Compression/tests/System.IO.Compression.Tests.csproj @@ -37,13 +37,14 @@ - + + + diff --git a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_CreateTests.Comments.cs b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_CreateTests.Comments.cs index ca5b651e3f2c81..a86c5590eefff7 100644 --- a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_CreateTests.Comments.cs +++ b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_CreateTests.Comments.cs @@ -3,6 +3,7 @@ using Xunit; using System.Text; +using System.Threading.Tasks; namespace System.IO.Compression.Tests { @@ -10,88 +11,86 @@ public partial class zip_CreateTests : ZipFileTestBase { [Theory] [MemberData(nameof(Utf8Comment_Data))] - public static void Create_Comment_AsciiEntryName_NullEncoding(string originalComment, string expectedComment) => - Create_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedComment, null); + public static Task Create_Comment_AsciiEntryName_NullEncoding(string originalComment, string expectedComment, bool async) => + Create_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedComment, null, async); [Theory] [MemberData(nameof(Utf8Comment_Data))] - public static void Create_Comment_AsciiEntryName_Utf8Encoding(string originalComment, string expectedComment) => - Create_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedComment, Encoding.UTF8); + public static Task Create_Comment_AsciiEntryName_Utf8Encoding(string originalComment, string expectedComment, bool async) => + Create_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedComment, Encoding.UTF8, async); [Theory] [MemberData(nameof(Latin1Comment_Data))] - public static void Create_Comment_AsciiEntryName_Latin1Encoding(string originalComment, string expectedComment) => - Create_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedComment, Encoding.Latin1); + public static Task Create_Comment_AsciiEntryName_Latin1Encoding(string originalComment, string expectedComment, bool async) => + Create_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedComment, Encoding.Latin1, async); [Theory] [MemberData(nameof(Utf8Comment_Data))] - public static void Create_Comment_Utf8EntryName_NullEncoding(string originalComment, string expectedComment) => - Create_Comment_EntryName_Encoding_Internal(Utf8FileName, originalComment, expectedComment, null); + public static Task Create_Comment_Utf8EntryName_NullEncoding(string originalComment, string expectedComment, bool async) => + Create_Comment_EntryName_Encoding_Internal(Utf8FileName, originalComment, expectedComment, null, async); [Theory] [MemberData(nameof(Utf8Comment_Data))] - public static void Create_Comment_Utf8EntryName_Utf8Encoding(string originalComment, string expectedComment) => - Create_Comment_EntryName_Encoding_Internal(Utf8FileName, originalComment, expectedComment, Encoding.UTF8); + public static Task Create_Comment_Utf8EntryName_Utf8Encoding(string originalComment, string expectedComment, bool async) => + Create_Comment_EntryName_Encoding_Internal(Utf8FileName, originalComment, expectedComment, Encoding.UTF8, async); [Theory] [MemberData(nameof(Utf8Comment_Data))] - public static void Create_Comment_Utf8EntryName_Utf8Encoding_Default(string originalComment, string expectedComment) => - Create_Comment_EntryName_Encoding_Internal(Utf8FileName, originalComment, expectedComment, expectedComment, Encoding.UTF8, null); + public static Task Create_Comment_Utf8EntryName_Utf8Encoding_Default(string originalComment, string expectedComment, bool async) => + Create_Comment_EntryName_Encoding_Internal(Utf8FileName, originalComment, expectedComment, expectedComment, Encoding.UTF8, null, async); [Theory] [MemberData(nameof(Latin1Comment_Data))] - public static void Create_Comment_Utf8EntryName_Latin1Encoding(string originalComment, string expectedComment) => + public static Task Create_Comment_Utf8EntryName_Latin1Encoding(string originalComment, string expectedComment, bool async) => // Emoji not supported by latin1 - Create_Comment_EntryName_Encoding_Internal(Utf8AndLatin1FileName, originalComment, expectedComment, Encoding.Latin1); + Create_Comment_EntryName_Encoding_Internal(Utf8AndLatin1FileName, originalComment, expectedComment, Encoding.Latin1, async); [Theory] [MemberData(nameof(Utf8Comment_Data))] - public static void Create_Comment_Utf8EntryName_Utf8Encoding_Prioritised(string originalComment, string expectedComment) + public static Task Create_Comment_Utf8EntryName_Utf8Encoding_Prioritised(string originalComment, string expectedComment, bool async) // UTF8 encoding bit is set in the general-purpose bit flags. The verification encoding of Latin1 should be ignored - => Create_Comment_EntryName_Encoding_Internal(Utf8FileName, originalComment, expectedComment, expectedComment, Encoding.UTF8, Encoding.Latin1); + => Create_Comment_EntryName_Encoding_Internal(Utf8FileName, originalComment, expectedComment, expectedComment, Encoding.UTF8, Encoding.Latin1, async); [Theory] [MemberData(nameof(MismatchingEncodingComment_Data))] - public static void Create_Comment_AsciiEntryName_Utf8Decoding_Invalid(string originalComment, string expectedPreWriteComment, string expectedPostWriteComment) + public static Task Create_Comment_AsciiEntryName_Utf8Decoding_Invalid(string originalComment, string expectedPreWriteComment, string expectedPostWriteComment, bool async) // The UTF8 encoding bit in the general-purpose bit flags should not be set, filenames should be encoded with Latin1, and thus // decoding with UTF8 should result in incorrect filenames. This is because the filenames and comments contain code points in the // range 0xC0..0xFF (which Latin1 encodes in one byte, and UTF8 encodes in two bytes.) - => Create_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedPreWriteComment, expectedPostWriteComment, Encoding.Latin1, Encoding.UTF8); + => Create_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedPreWriteComment, expectedPostWriteComment, Encoding.Latin1, Encoding.UTF8, async); [Theory] [MemberData(nameof(MismatchingEncodingComment_Data))] - public static void Create_Comment_AsciiEntryName_DefaultDecoding_Utf8(string originalComment, string expectedPreWriteComment, string expectedPostWriteComment) + public static Task Create_Comment_AsciiEntryName_DefaultDecoding_Utf8(string originalComment, string expectedPreWriteComment, string expectedPostWriteComment, bool async) // Filenames should be encoded with Latin1, resulting in the UTF8 encoding bit in the general-purpose bit flags not being set. // However, failing to specify an encoding (or specifying a null encoding) for the read should result in UTF8 being used anyway. // This should result in incorrect filenames, since the filenames and comments contain code points in the range 0xC0..0xFF (which // Latin1 encodes in one byte, and UTF8 encodes in two bytes.) - => Create_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedPreWriteComment, expectedPostWriteComment, Encoding.Latin1, null); + => Create_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedPreWriteComment, expectedPostWriteComment, Encoding.Latin1, null, async); - private static void Create_Comment_EntryName_Encoding_Internal(string entryName, string originalComment, string expectedComment, Encoding encoding) - => Create_Comment_EntryName_Encoding_Internal(entryName, originalComment, expectedComment, expectedComment, encoding, encoding); + private static Task Create_Comment_EntryName_Encoding_Internal(string entryName, string originalComment, string expectedComment, Encoding encoding, bool async) + => Create_Comment_EntryName_Encoding_Internal(entryName, originalComment, expectedComment, expectedComment, encoding, encoding, async); - private static void Create_Comment_EntryName_Encoding_Internal(string entryName, string originalComment, + private static async Task Create_Comment_EntryName_Encoding_Internal(string entryName, string originalComment, string expectedPreWriteComment, string expectedPostWriteComment, - Encoding creationEncoding, Encoding verificationEncoding) + Encoding creationEncoding, Encoding verificationEncoding, bool async) { using var ms = new MemoryStream(); - using (var zip = new ZipArchive(ms, ZipArchiveMode.Create, leaveOpen: true, creationEncoding)) - { - ZipArchiveEntry entry = zip.CreateEntry(entryName, CompressionLevel.NoCompression); - entry.Comment = originalComment; - // The expected pre-write and post-write comment can be different when testing encodings which vary between operations. - Assert.Equal(expectedPreWriteComment, entry.Comment); - } + var zip = await CreateZipArchive(async, ms, ZipArchiveMode.Create, leaveOpen: true, creationEncoding); + ZipArchiveEntry entry1 = zip.CreateEntry(entryName, CompressionLevel.NoCompression); + entry1.Comment = originalComment; + // The expected pre-write and post-write comment can be different when testing encodings which vary between operations. + Assert.Equal(expectedPreWriteComment, entry1.Comment); + await DisposeZipArchive(async, zip); - using (var zip = new ZipArchive(ms, ZipArchiveMode.Read, leaveOpen: false, verificationEncoding)) + zip = await CreateZipArchive(async, ms, ZipArchiveMode.Read, leaveOpen: false, verificationEncoding); + foreach (ZipArchiveEntry entry2 in zip.Entries) { - foreach (ZipArchiveEntry entry in zip.Entries) - { - Assert.Equal(entryName, entry.Name); - Assert.Equal(expectedPostWriteComment, entry.Comment); - } + Assert.Equal(entryName, entry2.Name); + Assert.Equal(expectedPostWriteComment, entry2.Comment); } + await DisposeZipArchive(async, zip); } } } diff --git a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_CreateTests.Unix.cs b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_CreateTests.Unix.cs index 757ef558dcde2b..f0cdc238f92fab 100644 --- a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_CreateTests.Unix.cs +++ b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_CreateTests.Unix.cs @@ -1,6 +1,7 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Collections.Generic; using System.Threading.Tasks; using Xunit; @@ -8,16 +9,25 @@ namespace System.IO.Compression.Tests { public partial class zip_CreateTests : ZipFileTestBase { + public static IEnumerable Get_Verify_Default_Permissions_Are_Applied_For_Entries_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "folder/", "40755", async }; + yield return new object[] { "folder/file", "100644", async }; + yield return new object[] { "folder\\file", "100644", async }; + } + } + [Theory] - [InlineData("folder/", "40755")] - [InlineData("folder/file", "100644")] - [InlineData("folder\\file", "100644")] - public static void Verify_Default_Permissions_Are_Applied_For_Entries(string path, string mode) + [MemberData(nameof(Get_Verify_Default_Permissions_Are_Applied_For_Entries_Data))] + public static async Task Verify_Default_Permissions_Are_Applied_For_Entries(string path, string mode, bool async) { - using var archive = new ZipArchive(new MemoryStream(), ZipArchiveMode.Create, false); + var archive = await CreateZipArchive(async, new MemoryStream(), ZipArchiveMode.Create, false); var newEntry = archive.CreateEntry(path); Assert.Equal(0, newEntry.ExternalAttributes & 0xffff); Assert.Equal(mode, Convert.ToString((uint)newEntry.ExternalAttributes >> 16, 8)); + await DisposeZipArchive(async, archive); } } } diff --git a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_CreateTests.cs b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_CreateTests.cs index dae65db374f40d..8e52f7d194932a 100644 --- a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_CreateTests.cs +++ b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_CreateTests.cs @@ -1,6 +1,12 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Buffers.Binary; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using System.IO.Pipelines; +using System.Linq; using System.Threading.Tasks; using Xunit; @@ -8,18 +14,21 @@ namespace System.IO.Compression.Tests { public partial class zip_CreateTests : ZipFileTestBase { - [Fact] - public static void CreateModeInvalidOperations() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task CreateModeInvalidOperations(bool async) { MemoryStream ms = new MemoryStream(); - ZipArchive z = new ZipArchive(ms, ZipArchiveMode.Create); + + ZipArchive z = await CreateZipArchive(async, ms, ZipArchiveMode.Create); + Assert.Throws(() => { var x = z.Entries; }); //"Entries not applicable on Create" Assert.Throws(() => z.GetEntry("dirka")); //"GetEntry not applicable on Create" ZipArchiveEntry e = z.CreateEntry("hey"); Assert.Throws(() => e.Delete()); //"Can't delete new entry" - Stream s = e.Open(); + Stream s = await OpenEntryStream(async, e); Assert.Throws(() => s.ReadByte()); //"Can't read on new entry" Assert.Throws(() => s.Seek(0, SeekOrigin.Begin)); //"Can't seek on new entry" Assert.Throws(() => s.Position = 0); //"Can't set position on new entry" @@ -30,11 +39,13 @@ public static void CreateModeInvalidOperations() Assert.Throws(() => { var x = e.CompressedLength; }); //"can't get CompressedLength on new entry" Assert.Throws(() => z.CreateEntry("bad")); - s.Dispose(); + + await DisposeStream(async, s); Assert.Throws(() => s.WriteByte(25)); //"Can't write to disposed entry" - Assert.Throws(() => e.Open()); + await Assert.ThrowsAsync(() => OpenEntryStream(async, e)); + Assert.Throws(() => e.LastWriteTime = new DateTimeOffset()); Assert.Throws(() => { var x = e.Length; }); Assert.Throws(() => { var x = e.CompressedLength; }); @@ -42,103 +53,193 @@ public static void CreateModeInvalidOperations() ZipArchiveEntry e1 = z.CreateEntry("e1"); ZipArchiveEntry e2 = z.CreateEntry("e2"); - Assert.Throws(() => e1.Open()); //"Can't open previous entry after new entry created" + // Can't open previous entry after new entry created + await Assert.ThrowsAsync(() => OpenEntryStream(async, e1)); - z.Dispose(); + await DisposeZipArchive(async, z); Assert.Throws(() => z.CreateEntry("dirka")); //"Can't create after dispose" } + private static readonly string[] _folderNames = [ "small", "normal", "empty", "emptydir" ]; + + public static IEnumerable GetCreateNormal_Seekable_Data() + { + foreach (string folder in _folderNames) + { + yield return new object[] { folder, false, false, }; + } + + yield return new object[] { "small", false, true }; + yield return new object[] { "small", true, false }; + yield return new object[] { "normal", false, true }; + yield return new object[] { "normal", true, false }; + } + + public static IEnumerable GetCreateNormal_Seekable_Async_Data() + { + foreach (bool async in _bools) + { + foreach (object[] data in GetCreateNormal_Seekable_Data()) + { + string folder = (string)data[0]; + bool useSpansForWriting = (bool)data[1]; + bool writeInChunks = (bool)data[2]; + yield return new object[] { folder, useSpansForWriting, writeInChunks, async }; + } + } + } + [Theory] - [InlineData("small", false, false)] - [InlineData("normal", false, false)] - [InlineData("empty", false, false)] - [InlineData("emptydir", false, false)] - [InlineData("small", true, false)] - [InlineData("normal", true, false)] - [InlineData("small", false, true)] - [InlineData("normal", false, true)] - public static async Task CreateNormal_Seekable(string folder, bool useSpansForWriting, bool writeInChunks) + [MemberData(nameof(GetCreateNormal_Seekable_Async_Data))] + public static async Task CreateNormal_Seekable(string folder, bool useSpansForWriting, bool writeInChunks, bool async) { using (var s = new MemoryStream()) { var testStream = new WrappedStream(s, false, true, true, null); - await CreateFromDir(zfolder(folder), testStream, ZipArchiveMode.Create, useSpansForWriting, writeInChunks); + await CreateFromDir(zfolder(folder), testStream, async, ZipArchiveMode.Create, useSpansForWriting: useSpansForWriting, writeInChunks: writeInChunks); + await IsZipSameAsDir(s, zfolder(folder), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async: async); + } + } + + [Theory] + [MemberData(nameof(GetCreateNormal_Seekable_Data))] + public static async Task CreateNormal_Seekable_CompareSyncAndAsync(string folder, bool useSpansForWriting, bool writeInChunks) + { + using var s_sync = new MemoryStream(); + using var s_async = new MemoryStream(); - IsZipSameAsDir(s, zfolder(folder), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + var testStream_sync = new WrappedStream(s_sync, false, true, true, null); + await CreateFromDir(zfolder(folder), testStream_sync, async: false, ZipArchiveMode.Create, useSpansForWriting: useSpansForWriting, writeInChunks: writeInChunks); + + var testStream_async = new WrappedStream(s_async, false, true, true, null); + await CreateFromDir(zfolder(folder), testStream_async, async: true, ZipArchiveMode.Create, useSpansForWriting: useSpansForWriting, writeInChunks: writeInChunks); + + s_sync.Position = 0; + s_async.Position = 0; + + Assert.Equal(s_sync.ToArray(), s_async.ToArray()); + } + + public static IEnumerable Get_FolderNames_Data() + { + foreach (string folder in _folderNames) + { + yield return new object[] { folder }; + } + } + + public static IEnumerable Get_CreateNormal_Unseekable_Data() + { + foreach (string folder in _folderNames) + { + yield return new object[] { folder, false }; + yield return new object[] { folder, true }; } } [Theory] - [InlineData("small")] - [InlineData("normal")] - [InlineData("empty")] - [InlineData("emptydir")] - public static async Task CreateNormal_Unseekable(string folder) + [MemberData(nameof(Get_CreateNormal_Unseekable_Data))] + public static async Task CreateNormal_Unseekable(string folder, bool async) { using (var s = new MemoryStream()) { var testStream = new WrappedStream(s, false, true, false, null); - await CreateFromDir(zfolder(folder), testStream, ZipArchiveMode.Create); - - IsZipSameAsDir(s, zfolder(folder), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + await CreateFromDir(zfolder(folder), testStream, async, ZipArchiveMode.Create); + await IsZipSameAsDir(s, zfolder(folder), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async); } } - [Fact] - public static async Task CreateNormal_Unicode_Seekable() + [Theory] + [MemberData(nameof(Get_FolderNames_Data))] + public static async Task CreateNormal_Unseekable_CompareSyncAndAsync(string folder) + { + using var s_sync = new MemoryStream(); + using var s_async = new MemoryStream(); + + var testStream_sync = new WrappedStream(s_sync, false, true, canSeek: false, null); + await CreateFromDir(zfolder(folder), testStream_sync, async: false, ZipArchiveMode.Create); + + var testStream_async = new WrappedStream(s_async, false, true, canSeek: false, null); + await CreateFromDir(zfolder(folder), testStream_async, async: true, ZipArchiveMode.Create); + + s_sync.Position = 0; + s_async.Position = 0; + + Assert.Equal(s_sync.ToArray(), s_async.ToArray()); + } + + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task CreateNormal_Unicode_Seekable(bool async) { using (var s = new MemoryStream()) { var testStream = new WrappedStream(s, false, true, true, null); - await CreateFromDir(zfolder("unicode"), testStream, ZipArchiveMode.Create); - - IsZipSameAsDir(s, zfolder("unicode"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + await CreateFromDir(zfolder("unicode"), testStream, async, ZipArchiveMode.Create); + await IsZipSameAsDir(s, zfolder("unicode"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async); } } - [Fact] - public static async Task CreateNormal_Unicode_Unseekable() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task CreateNormal_Unicode_Unseekable(bool async) { using (var s = new MemoryStream()) { var testStream = new WrappedStream(s, false, true, false, null); - await CreateFromDir(zfolder("unicode"), testStream, ZipArchiveMode.Create); - - IsZipSameAsDir(s, zfolder("unicode"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + await CreateFromDir(zfolder("unicode"), testStream, async, ZipArchiveMode.Create); + await IsZipSameAsDir(s, zfolder("unicode"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async); } } - [Fact] - public static void CreateUncompressedArchive() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task CreateUncompressedArchive(bool async) { using (var testStream = new MemoryStream()) { var testfilename = "testfile"; var testFileContent = "Lorem ipsum dolor sit amet, consectetur adipiscing elit."; - using (var zip = new ZipArchive(testStream, ZipArchiveMode.Create)) + + ZipArchive zip = await CreateZipArchive(async, testStream, ZipArchiveMode.Create); + + var utf8WithoutBom = new Text.UTF8Encoding(encoderShouldEmitUTF8Identifier: false); + ZipArchiveEntry newEntry = zip.CreateEntry(testfilename, CompressionLevel.NoCompression); + + Stream entryStream = await OpenEntryStream(async, newEntry); + using (var writer = new StreamWriter(entryStream, utf8WithoutBom)) { - var utf8WithoutBom = new Text.UTF8Encoding(encoderShouldEmitUTF8Identifier: false); - ZipArchiveEntry newEntry = zip.CreateEntry(testfilename, CompressionLevel.NoCompression); - using (var writer = new StreamWriter(newEntry.Open(), utf8WithoutBom)) - { - writer.Write(testFileContent); - writer.Flush(); - } - byte[] fileContent = testStream.ToArray(); - // zip file header stores values as little-endian - byte compressionMethod = fileContent[8]; - Assert.Equal(0, compressionMethod); // stored => 0, deflate => 8 - uint compressedSize = BitConverter.ToUInt32(fileContent, 18); - uint uncompressedSize = BitConverter.ToUInt32(fileContent, 22); - Assert.Equal(uncompressedSize, compressedSize); - byte filenamelength = fileContent[26]; - Assert.Equal(testfilename.Length, filenamelength); - string readFileName = ReadStringFromSpan(fileContent.AsSpan(30, filenamelength)); - Assert.Equal(testfilename, readFileName); - string readFileContent = ReadStringFromSpan(fileContent.AsSpan(30 + filenamelength, testFileContent.Length)); - Assert.Equal(testFileContent, readFileContent); + writer.Write(testFileContent); + writer.Flush(); } + + byte[] fileContent = testStream.ToArray(); + // zip file header stores values as little-endian + byte compressionMethod = fileContent[8]; + Assert.Equal(0, compressionMethod); // stored => 0, deflate => 8 + uint compressedSize = BitConverter.ToUInt32(fileContent, 18); + uint uncompressedSize = BitConverter.ToUInt32(fileContent, 22); + Assert.Equal(uncompressedSize, compressedSize); + byte filenamelength = fileContent[26]; + Assert.Equal(testfilename.Length, filenamelength); + string readFileName = ReadStringFromSpan(fileContent.AsSpan(30, filenamelength)); + Assert.Equal(testfilename, readFileName); + string readFileContent = ReadStringFromSpan(fileContent.AsSpan(30 + filenamelength, testFileContent.Length)); + Assert.Equal(testFileContent, readFileContent); + + await DisposeZipArchive(async, zip); + } + } + + public static IEnumerable Get_CreateArchiveEntriesWithBitFlags_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { CompressionLevel.NoCompression, 0, async }; + yield return new object[] { CompressionLevel.Optimal, 0, async }; + yield return new object[] { CompressionLevel.SmallestSize, 2, async }; + yield return new object[] { CompressionLevel.Fastest, 6, async }; } } @@ -147,11 +248,8 @@ public static void CreateUncompressedArchive() // reopening the generated file to confirm that the compression levels match. [Theory] // Special-case NoCompression: in this case, the CompressionMethod becomes Stored and the bits are unset. - [InlineData(CompressionLevel.NoCompression, 0)] - [InlineData(CompressionLevel.Optimal, 0)] - [InlineData(CompressionLevel.SmallestSize, 2)] - [InlineData(CompressionLevel.Fastest, 6)] - public static void CreateArchiveEntriesWithBitFlags(CompressionLevel compressionLevel, ushort expectedGeneralBitFlags) + [MemberData(nameof(Get_CreateArchiveEntriesWithBitFlags_Data))] + public static async Task CreateArchiveEntriesWithBitFlags(CompressionLevel compressionLevel, ushort expectedGeneralBitFlags, bool async) { var testfilename = "testfile"; var testFileContent = "Lorem ipsum dolor sit amet, consectetur adipiscing elit."; @@ -161,123 +259,138 @@ public static void CreateArchiveEntriesWithBitFlags(CompressionLevel compression using (var testStream = new MemoryStream()) { + ZipArchive zip = await CreateZipArchive(async, testStream, ZipArchiveMode.Create); - using (var zip = new ZipArchive(testStream, ZipArchiveMode.Create)) + ZipArchiveEntry newEntry = zip.CreateEntry(testfilename, compressionLevel); + + Stream entryStream = await OpenEntryStream(async, newEntry); + using (var writer = new StreamWriter(entryStream, utf8WithoutBom)) { - ZipArchiveEntry newEntry = zip.CreateEntry(testfilename, compressionLevel); - using (var writer = new StreamWriter(newEntry.Open(), utf8WithoutBom)) - { - writer.Write(testFileContent); - writer.Flush(); - } - - ZipArchiveEntry secondNewEntry = zip.CreateEntry(testFileContent + "_post", CompressionLevel.NoCompression); + writer.Write(testFileContent); + writer.Flush(); } + ZipArchiveEntry secondNewEntry = zip.CreateEntry(testFileContent + "_post", CompressionLevel.NoCompression); + + await DisposeZipArchive(async, zip); + zipFileContent = testStream.ToArray(); } // expected bit flags are at position 6 in the file header - var generalBitFlags = System.Buffers.Binary.BinaryPrimitives.ReadUInt16LittleEndian(zipFileContent.AsSpan(6)); + var generalBitFlags = BinaryPrimitives.ReadUInt16LittleEndian(zipFileContent.AsSpan(6)); Assert.Equal(expectedGeneralBitFlags, generalBitFlags); using (var reReadStream = new MemoryStream(zipFileContent)) { - using (var reReadZip = new ZipArchive(reReadStream, ZipArchiveMode.Read)) - { - var firstArchive = reReadZip.Entries[0]; - var secondArchive = reReadZip.Entries[1]; - var compressionLevelFieldInfo = typeof(ZipArchiveEntry).GetField("_compressionLevel", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance); - var generalBitFlagsFieldInfo = typeof(ZipArchiveEntry).GetField("_generalPurposeBitFlag", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance); - - var reReadCompressionLevel = (CompressionLevel)compressionLevelFieldInfo.GetValue(firstArchive); - var reReadGeneralBitFlags = (ushort)generalBitFlagsFieldInfo.GetValue(firstArchive); + ZipArchive reReadZip = await CreateZipArchive(async, reReadStream, ZipArchiveMode.Read); - Assert.Equal(compressionLevel, reReadCompressionLevel); - Assert.Equal(expectedGeneralBitFlags, reReadGeneralBitFlags); + var firstArchive = reReadZip.Entries[0]; + var secondArchive = reReadZip.Entries[1]; + var compressionLevelFieldInfo = typeof(ZipArchiveEntry).GetField("_compressionLevel", Reflection.BindingFlags.NonPublic | Reflection.BindingFlags.Instance); + var generalBitFlagsFieldInfo = typeof(ZipArchiveEntry).GetField("_generalPurposeBitFlag", Reflection.BindingFlags.NonPublic | Reflection.BindingFlags.Instance); - reReadCompressionLevel = (CompressionLevel)compressionLevelFieldInfo.GetValue(secondArchive); - Assert.Equal(CompressionLevel.NoCompression, reReadCompressionLevel); + var reReadCompressionLevel = (CompressionLevel)compressionLevelFieldInfo.GetValue(firstArchive); + var reReadGeneralBitFlags = (ushort)generalBitFlagsFieldInfo.GetValue(firstArchive); - using (var strm = firstArchive.Open()) - { - var readBuffer = new byte[firstArchive.Length]; + Assert.Equal(compressionLevel, reReadCompressionLevel); + Assert.Equal(expectedGeneralBitFlags, reReadGeneralBitFlags); - strm.Read(readBuffer); + reReadCompressionLevel = (CompressionLevel)compressionLevelFieldInfo.GetValue(secondArchive); + Assert.Equal(CompressionLevel.NoCompression, reReadCompressionLevel); - var readText = Text.Encoding.UTF8.GetString(readBuffer); + Stream entryStream = await OpenEntryStream(async, firstArchive); + var readBuffer = new byte[firstArchive.Length]; + entryStream.Read(readBuffer); + var readText = Text.Encoding.UTF8.GetString(readBuffer); + Assert.Equal(readText, testFileContent); + await DisposeStream(async, entryStream); - Assert.Equal(readText, testFileContent); - } - } + await DisposeZipArchive(async, reReadZip); } } - [Fact] - public static void CreateNormal_VerifyDataDescriptor() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task CreateNormal_VerifyDataDescriptor(bool async) { using var memoryStream = new MemoryStream(); // We need an non-seekable stream so the data descriptor bit is turned on when saving var wrappedStream = new WrappedStream(memoryStream, true, true, false, null); // Creation will go through the path that sets the data descriptor bit when the stream is unseekable - using (var archive = new ZipArchive(wrappedStream, ZipArchiveMode.Create)) - { - CreateEntry(archive, "A", "xxx"); - CreateEntry(archive, "B", "yyy"); - } + ZipArchive archive = await CreateZipArchive(async, wrappedStream, ZipArchiveMode.Create); + + CreateEntry(archive, "A", "xxx"); + CreateEntry(archive, "B", "yyy"); + + await DisposeZipArchive(async, archive); AssertDataDescriptor(memoryStream, true); // Update should flip the data descriptor bit to zero on save - using (var archive = new ZipArchive(memoryStream, ZipArchiveMode.Update)) - { - ZipArchiveEntry entry = archive.Entries[0]; - using Stream entryStream = entry.Open(); - StreamReader reader = new StreamReader(entryStream); - string content = reader.ReadToEnd(); - - // Append a string to this entry - entryStream.Seek(0, SeekOrigin.End); - StreamWriter writer = new StreamWriter(entryStream); - writer.Write("zzz"); - writer.Flush(); - } + archive = await CreateZipArchive(async, memoryStream, ZipArchiveMode.Update); + + ZipArchiveEntry entry = archive.Entries[0]; + Stream entryStream = await OpenEntryStream(async, entry); + StreamReader reader = new StreamReader(entryStream); + string content = reader.ReadToEnd(); + + // Append a string to this entry + entryStream.Seek(0, SeekOrigin.End); + StreamWriter writer = new StreamWriter(entryStream); + writer.Write("zzz"); + writer.Flush(); + + await DisposeStream(async, entryStream); + + await DisposeZipArchive(async, archive); AssertDataDescriptor(memoryStream, false); } + public static IEnumerable Get_CreateNormal_VerifyUnicodeFileNameAndComment_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { UnicodeFileName, UnicodeFileName, true, async }; + yield return new object[] { UnicodeFileName, AsciiFileName, true, async }; + yield return new object[] { AsciiFileName, UnicodeFileName, true, async }; + yield return new object[] { AsciiFileName, AsciiFileName, false, async }; + } + } + [Theory] - [InlineData(UnicodeFileName, UnicodeFileName, true)] - [InlineData(UnicodeFileName, AsciiFileName, true)] - [InlineData(AsciiFileName, UnicodeFileName, true)] - [InlineData(AsciiFileName, AsciiFileName, false)] - public static void CreateNormal_VerifyUnicodeFileNameAndComment(string fileName, string entryComment, bool isUnicodeFlagExpected) + [MemberData(nameof(Get_CreateNormal_VerifyUnicodeFileNameAndComment_Data))] + public static async Task CreateNormal_VerifyUnicodeFileNameAndComment(string fileName, string entryComment, bool isUnicodeFlagExpected, bool async) { using var ms = new MemoryStream(); - using var archive = new ZipArchive(ms, ZipArchiveMode.Create); - + ZipArchive archive = await CreateZipArchive(async, ms, ZipArchiveMode.Create); CreateEntry(archive, fileName, fileContents: "xxx", entryComment); - AssertUnicodeFileNameAndComment(ms, isUnicodeFlagExpected); + await DisposeZipArchive(async, archive); } - [Fact] - public void Create_VerifyDuplicateEntriesAreAllowed() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task Create_VerifyDuplicateEntriesAreAllowed(bool async) { using var ms = new MemoryStream(); - using (var archive = new ZipArchive(ms, ZipArchiveMode.Create, leaveOpen: true)) - { - string entryName = "foo"; - AddEntry(archive, entryName, contents: "xxx", DateTimeOffset.Now); - AddEntry(archive, entryName, contents: "yyy", DateTimeOffset.Now); - } + ZipArchive archive = await CreateZipArchive(async, ms, ZipArchiveMode.Create, leaveOpen: true); + + string entryName = "foo"; + await AddEntry(archive, entryName, contents: "xxx", DateTimeOffset.Now, async); + await AddEntry(archive, entryName, contents: "yyy", DateTimeOffset.Now, async); + + await DisposeZipArchive(async, archive); + + archive = await CreateZipArchive(async, ms, ZipArchiveMode.Update); + + Assert.Equal(2, archive.Entries.Count); + + await DisposeZipArchive(async, archive); - using (var archive = new ZipArchive(ms, ZipArchiveMode.Update)) - { - Assert.Equal(2, archive.Entries.Count); - } } private static string ReadStringFromSpan(Span input) diff --git a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_InvalidParametersAndStrangeFiles.cs b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_InvalidParametersAndStrangeFiles.cs index 76bc365bda32b0..cd158b931dfd45 100644 --- a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_InvalidParametersAndStrangeFiles.cs +++ b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_InvalidParametersAndStrangeFiles.cs @@ -4,6 +4,7 @@ using System.Buffers.Binary; using System.Collections.Generic; using System.Linq; +using System.Runtime.CompilerServices; using System.Text; using System.Threading.Tasks; using Xunit; @@ -32,23 +33,25 @@ private static void ConstructorThrows(Func constructor, } } - [Fact] - public static async Task InvalidInstanceMethods() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task InvalidInstanceMethods(bool async) { Stream zipFile = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); - using (ZipArchive archive = new ZipArchive(zipFile, ZipArchiveMode.Update)) - { - //non-existent entry - Assert.True(null == archive.GetEntry("nonExistentEntry")); //"Should return null on non-existent entry name" - //null/empty string - Assert.Throws(() => archive.GetEntry(null)); //"Should throw on null entry name" + ZipArchive archive = await CreateZipArchive(async, zipFile, ZipArchiveMode.Update); - ZipArchiveEntry entry = archive.GetEntry("first.txt"); + //non-existent entry + Assert.True(null == archive.GetEntry("nonExistentEntry")); //"Should return null on non-existent entry name" + //null/empty string + Assert.Throws(() => archive.GetEntry(null)); //"Should throw on null entry name" - //null/empty string - AssertExtensions.Throws("entryName", () => archive.CreateEntry("")); //"Should throw on empty entry name" - Assert.Throws(() => archive.CreateEntry(null)); //"should throw on null entry name" - } + ZipArchiveEntry entry = archive.GetEntry("first.txt"); + + //null/empty string + AssertExtensions.Throws("entryName", () => archive.CreateEntry("")); //"Should throw on empty entry name" + Assert.Throws(() => archive.CreateEntry(null)); //"should throw on null entry name" + + await DisposeZipArchive(async, archive); } [Fact] @@ -99,304 +102,387 @@ public static void InvalidConstructors() } } + [Fact] + public static async Task InvalidConstructorsAsync() + { + //out of range enum values + await Assert.ThrowsAsync(() => + ZipArchive.CreateAsync(new MemoryStream(), (ZipArchiveMode)(-1), leaveOpen: false, entryNameEncoding: null)); + await Assert.ThrowsAsync(() => + ZipArchive.CreateAsync(new MemoryStream(), (ZipArchiveMode)(4), leaveOpen: false, entryNameEncoding: null)); + await Assert.ThrowsAsync(() => + ZipArchive.CreateAsync(new MemoryStream(), (ZipArchiveMode)(10), leaveOpen: false, entryNameEncoding: null)); + + //null/closed stream + await Assert.ThrowsAsync(() => + ZipArchive.CreateAsync((Stream)null, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null)); + await Assert.ThrowsAsync(() => + ZipArchive.CreateAsync((Stream)null, ZipArchiveMode.Create, leaveOpen: false, entryNameEncoding: null)); + await Assert.ThrowsAsync(() => + ZipArchive.CreateAsync((Stream)null, ZipArchiveMode.Update, leaveOpen: false, entryNameEncoding: null)); + + MemoryStream ms = new MemoryStream(); + ms.Dispose(); + + await Assert.ThrowsAsync(() => + ZipArchive.CreateAsync(ms, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null)); + await Assert.ThrowsAsync(() => + ZipArchive.CreateAsync(ms, ZipArchiveMode.Create, leaveOpen: false, entryNameEncoding: null)); + await Assert.ThrowsAsync(() => + ZipArchive.CreateAsync(ms, ZipArchiveMode.Update, leaveOpen: false, entryNameEncoding: null)); + + //non-seekable to update + using (LocalMemoryStream nonReadable = new LocalMemoryStream(), + nonWriteable = new LocalMemoryStream(), + nonSeekable = new LocalMemoryStream()) + { + nonReadable.SetCanRead(false); + nonWriteable.SetCanWrite(false); + nonSeekable.SetCanSeek(false); + + await Assert.ThrowsAsync(() => ZipArchive.CreateAsync(nonReadable, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null)); + await Assert.ThrowsAsync(() => ZipArchive.CreateAsync(nonWriteable, ZipArchiveMode.Create, leaveOpen: false, entryNameEncoding: null)); + await Assert.ThrowsAsync(() => ZipArchive.CreateAsync(nonReadable, ZipArchiveMode.Update, leaveOpen: false, entryNameEncoding: null)); + await Assert.ThrowsAsync(() => ZipArchive.CreateAsync(nonWriteable, ZipArchiveMode.Update, leaveOpen: false, entryNameEncoding: null)); + await Assert.ThrowsAsync(() => ZipArchive.CreateAsync(nonSeekable, ZipArchiveMode.Update, leaveOpen: false, entryNameEncoding: null)); + } + } + [Theory] - [InlineData("LZMA.zip")] - [InlineData("invalidDeflate.zip")] - public static async Task ZipArchiveEntry_InvalidUpdate(string zipname) + [InlineData("LZMA.zip", false)] + [InlineData("LZMA.zip", true)] + [InlineData("invalidDeflate.zip", false)] + [InlineData("invalidDeflate.zip", true)] + public static async Task ZipArchiveEntry_InvalidUpdate(string zipname, bool async) { string filename = bad(zipname); Stream updatedCopy = await StreamHelpers.CreateTempCopyStream(filename); string name; long length, compressedLength; DateTimeOffset lastWriteTime; - using (ZipArchive archive = new ZipArchive(updatedCopy, ZipArchiveMode.Update, true)) - { - ZipArchiveEntry e = archive.Entries[0]; - name = e.FullName; - lastWriteTime = e.LastWriteTime; - length = e.Length; - compressedLength = e.CompressedLength; - Assert.Throws(() => e.Open()); //"Should throw on open" - } + ZipArchive archive = await CreateZipArchive(async, updatedCopy, ZipArchiveMode.Update, true); + ZipArchiveEntry e = archive.Entries[0]; + name = e.FullName; + lastWriteTime = e.LastWriteTime; + length = e.Length; + compressedLength = e.CompressedLength; + await Assert.ThrowsAsync(() => OpenEntryStream(async, e)); //"Should throw on open" + await DisposeZipArchive(async, archive); //make sure that update mode preserves that unreadable file - using (ZipArchive archive = new ZipArchive(updatedCopy, ZipArchiveMode.Update)) - { - ZipArchiveEntry e = archive.Entries[0]; - Assert.Equal(name, e.FullName); //"Name isn't the same" - Assert.Equal(lastWriteTime, e.LastWriteTime); //"LastWriteTime not the same" - Assert.Equal(length, e.Length); //"Length isn't the same" - Assert.Equal(compressedLength, e.CompressedLength); //"CompressedLength isn't the same" - Assert.Throws(() => e.Open()); //"Should throw on open" - } + archive = await CreateZipArchive(async, updatedCopy, ZipArchiveMode.Update); + e = archive.Entries[0]; + Assert.Equal(name, e.FullName); //"Name isn't the same" + Assert.Equal(lastWriteTime, e.LastWriteTime); //"LastWriteTime not the same" + Assert.Equal(length, e.Length); //"Length isn't the same" + Assert.Equal(compressedLength, e.CompressedLength); //"CompressedLength isn't the same" + await Assert.ThrowsAsync(() => OpenEntryStream(async, e)); //"Should throw on open" + await DisposeZipArchive(async, archive); } - [Fact] - public static async Task LargeArchive_DataDescriptor_Read_NonZip64_FileLengthGreaterThanIntMax() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task LargeArchive_DataDescriptor_Read_NonZip64_FileLengthGreaterThanIntMax(bool async) { - MemoryStream stream = await LocalMemoryStream.readAppFileAsync(strange("fileLengthGreaterIntLessUInt.zip")); + MemoryStream stream = await LocalMemoryStream.ReadAppFileAsync(strange("fileLengthGreaterIntLessUInt.zip")); - using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) - { - ZipArchiveEntry e = archive.GetEntry("large.bin"); + ZipArchive archive = await CreateZipArchive(async, stream, ZipArchiveMode.Read); + ZipArchiveEntry e = archive.GetEntry("large.bin"); - Assert.Equal(3_600_000_000, e.Length); - Assert.Equal(3_499_028, e.CompressedLength); + Assert.Equal(3_600_000_000, e.Length); + Assert.Equal(3_499_028, e.CompressedLength); - using (Stream source = e.Open()) + Stream source = await OpenEntryStream(async, e); + byte[] buffer = new byte[s_bufferSize]; + int read = await source.ReadAsync(buffer, 0, buffer.Length); // We don't want to inflate this large archive entirely + // just making sure it read successfully + Assert.Equal(s_bufferSize, read); + foreach (byte b in buffer) + { + if (b != '0') { - byte[] buffer = new byte[s_bufferSize]; - int read = source.Read(buffer, 0, buffer.Length); // We don't want to inflate this large archive entirely - // just making sure it read successfully - Assert.Equal(s_bufferSize, read); - foreach (byte b in buffer) - { - if (b != '0') - { - Assert.Fail($"The file should be all '0's, but found '{(char)b}'"); - } - } + Assert.Fail($"The file should be all '0's, but found '{(char)b}'"); } } + await DisposeStream(async, source); + + await DisposeZipArchive(async, archive); } - [Fact] - public static async Task ZipArchiveEntry_CorruptedStream_ReadMode_CopyTo_UpToUncompressedSize() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ZipArchiveEntry_CorruptedStream_ReadMode_CopyTo_UpToUncompressedSize(bool async) { - MemoryStream stream = await LocalMemoryStream.readAppFileAsync(zfile("normal.zip")); + MemoryStream stream = await LocalMemoryStream.ReadAppFileAsync(zfile("normal.zip")); int nameOffset = PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too - using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) + ZipArchive archive = await CreateZipArchive(async, stream, ZipArchiveMode.Read); + + ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); + + using (MemoryStream ms = new MemoryStream()) { - ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); - using (MemoryStream ms = new MemoryStream()) - using (Stream source = e.Open()) - { - source.CopyTo(ms); - Assert.Equal(e.Length, ms.Length); // Only allow to decompress up to uncompressed size - byte[] buffer = new byte[s_bufferSize]; - Assert.Equal(0, source.Read(buffer, 0, buffer.Length)); // shouldn't be able read more - ms.Seek(0, SeekOrigin.Begin); - int read; - while ((read = ms.Read(buffer, 0, buffer.Length)) != 0) - { // No need to do anything, just making sure all bytes readable - } - Assert.Equal(ms.Position, ms.Length); // all bytes must be read + Stream source = await OpenEntryStream(async, e); + + await source.CopyToAsync(ms); + Assert.Equal(e.Length, ms.Length); // Only allow to decompress up to uncompressed size + byte[] buffer = new byte[s_bufferSize]; + Assert.Equal(0, await source.ReadAsync(buffer, 0, buffer.Length)); // shouldn't be able read more + ms.Seek(0, SeekOrigin.Begin); + int read; + while ((read = await ms.ReadAsync(buffer, 0, buffer.Length)) != 0) + { // No need to do anything, just making sure all bytes readable } + Assert.Equal(ms.Position, ms.Length); // all bytes must be read + + await DisposeStream(async, source); } + + await DisposeZipArchive(async, archive); } - [Fact] - public static async Task ZipArchiveEntry_CorruptedStream_ReadMode_Read_UpToUncompressedSize() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ZipArchiveEntry_CorruptedStream_ReadMode_Read_UpToUncompressedSize(bool async) { - MemoryStream stream = await LocalMemoryStream.readAppFileAsync(zfile("normal.zip")); + MemoryStream stream = await LocalMemoryStream.ReadAppFileAsync(zfile("normal.zip")); int nameOffset = PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too - using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) + ZipArchive archive = await CreateZipArchive(async, stream, ZipArchiveMode.Read); + + ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); + using (MemoryStream ms = new MemoryStream()) { - ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); - using (MemoryStream ms = new MemoryStream()) - using (Stream source = e.Open()) + Stream source = await OpenEntryStream(async, e); + + byte[] buffer = new byte[s_bufferSize]; + int read; + while ((read = await source.ReadAsync(buffer, 0, buffer.Length)) != 0) { - byte[] buffer = new byte[s_bufferSize]; - int read; - while ((read = source.Read(buffer, 0, buffer.Length)) != 0) - { - ms.Write(buffer, 0, read); - } - Assert.Equal(e.Length, ms.Length); // Only allow to decompress up to uncompressed size - Assert.Equal(0, source.Read(buffer, 0, s_bufferSize)); // shouldn't be able read more - ms.Seek(0, SeekOrigin.Begin); - while ((read = ms.Read(buffer, 0, buffer.Length)) != 0) - { // No need to do anything, just making sure all bytes readable from output stream - } - Assert.Equal(ms.Position, ms.Length); // all bytes must be read + await ms.WriteAsync(buffer, 0, read); + } + Assert.Equal(e.Length, ms.Length); // Only allow to decompress up to uncompressed size + Assert.Equal(0, await source.ReadAsync(buffer, 0, s_bufferSize)); // shouldn't be able read more + ms.Seek(0, SeekOrigin.Begin); + while ((read = await ms.ReadAsync(buffer, 0, buffer.Length)) != 0) + { // No need to do anything, just making sure all bytes readable from output stream } + Assert.Equal(ms.Position, ms.Length); // all bytes must be read + + await DisposeStream(async, source); } + + await DisposeZipArchive(async, archive); } - [Fact] - public static void ZipArchiveEntry_CorruptedStream_EnsureNoExtraBytesReadOrOverWritten() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ZipArchiveEntry_CorruptedStream_EnsureNoExtraBytesReadOrOverWritten(bool async) { - MemoryStream stream = populateStream().Result; + MemoryStream stream = PopulateStream().Result; int nameOffset = PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too - using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) - { - ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); - using (Stream source = e.Open()) - { - byte[] buffer = new byte[e.Length + 20]; - Array.Fill(buffer, 0xDE); - int read; - int offset = 0; - int length = buffer.Length; + ZipArchive archive = await CreateZipArchive(async, stream, ZipArchiveMode.Read); - while ((read = source.Read(buffer, offset, length)) != 0) - { - offset += read; - length -= read; - } - for (int i = offset; i < buffer.Length; i++) - { - Assert.Equal(0xDE, buffer[i]); - } - } + ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); + Stream source = await OpenEntryStream(async, e); + + byte[] buffer = new byte[e.Length + 20]; + Array.Fill(buffer, 0xDE); + int read; + int offset = 0; + int length = buffer.Length; + + while ((read = await source.ReadAsync(buffer, offset, length)) != 0) + { + offset += read; + length -= read; + } + for (int i = offset; i < buffer.Length; i++) + { + Assert.Equal(0xDE, buffer[i]); } + + await DisposeStream(async, source); + + await DisposeZipArchive(async, archive); } - private static async Task populateStream() + private static async Task PopulateStream() { - return await LocalMemoryStream.readAppFileAsync(zfile("normal.zip")); + return await LocalMemoryStream.ReadAppFileAsync(zfile("normal.zip")); } - [Fact] - public static async Task Zip64ArchiveEntry_CorruptedStream_CopyTo_UpToUncompressedSize() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task Zip64ArchiveEntry_CorruptedStream_CopyTo_UpToUncompressedSize(bool async) { - MemoryStream stream = await LocalMemoryStream.readAppFileAsync(compat("deflate64.zip")); + MemoryStream stream = await LocalMemoryStream.ReadAppFileAsync(compat("deflate64.zip")); int nameOffset = PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too - using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) + ZipArchive archive = await CreateZipArchive(async, stream, ZipArchiveMode.Read); + + ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); + using (var ms = new MemoryStream()) { - ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); - using (var ms = new MemoryStream()) - using (Stream source = e.Open()) - { - source.CopyTo(ms); - Assert.Equal(e.Length, ms.Length); // Only allow to decompress up to uncompressed size - ms.Seek(0, SeekOrigin.Begin); - int read; - byte[] buffer = new byte[s_bufferSize]; - while ((read = ms.Read(buffer, 0, buffer.Length)) != 0) - { // No need to do anything, just making sure all bytes readable - } - Assert.Equal(ms.Position, ms.Length); // all bytes must be read + Stream source = await OpenEntryStream(async, e); + + await source.CopyToAsync(ms); + Assert.Equal(e.Length, ms.Length); // Only allow to decompress up to uncompressed size + ms.Seek(0, SeekOrigin.Begin); + int read; + byte[] buffer = new byte[s_bufferSize]; + while ((read = await ms.ReadAsync(buffer, 0, buffer.Length)) != 0) + { // No need to do anything, just making sure all bytes readable } + Assert.Equal(ms.Position, ms.Length); // all bytes must be read + + await DisposeStream(async, source); } + + await DisposeZipArchive(async, archive); } - [Fact] - public static async Task ZipArchiveEntry_CorruptedStream_UnCompressedSizeBiggerThanExpected_NothingShouldBreak() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ZipArchiveEntry_CorruptedStream_UnCompressedSizeBiggerThanExpected_NothingShouldBreak(bool async) { - MemoryStream stream = await LocalMemoryStream.readAppFileAsync(zfile("normal.zip")); + MemoryStream stream = await LocalMemoryStream.ReadAppFileAsync(zfile("normal.zip")); int nameOffset = PatchDataRelativeToFileNameFillBytes(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileNameFillBytes(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too - using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) + ZipArchive archive = await CreateZipArchive(async, stream, ZipArchiveMode.Read); + + ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); + using (MemoryStream ms = new MemoryStream()) { - ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); - using (MemoryStream ms = new MemoryStream()) - using (Stream source = e.Open()) - { - source.CopyTo(ms); - Assert.True(e.Length > ms.Length); // Even uncompressed size is bigger than decompressed size there should be no error - Assert.True(e.CompressedLength < ms.Length); - } + Stream source = await OpenEntryStream(async, e); + + await source.CopyToAsync(ms); + Assert.True(e.Length > ms.Length); // Even uncompressed size is bigger than decompressed size there should be no error + Assert.True(e.CompressedLength < ms.Length); + + await DisposeStream(async, source); } + + await DisposeZipArchive(async, archive); } - [Fact] - public static async Task Zip64ArchiveEntry_CorruptedFile_Read_UpToUncompressedSize() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task Zip64ArchiveEntry_CorruptedFile_Read_UpToUncompressedSize(bool async) { - MemoryStream stream = await LocalMemoryStream.readAppFileAsync(compat("deflate64.zip")); + MemoryStream stream = await LocalMemoryStream.ReadAppFileAsync(compat("deflate64.zip")); int nameOffset = PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too - using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) + ZipArchive archive = await CreateZipArchive(async, stream, ZipArchiveMode.Read); + + ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); + using (var ms = new MemoryStream()) { - ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); - using (var ms = new MemoryStream()) - using (Stream source = e.Open()) + Stream source = await OpenEntryStream(async, e); + + byte[] buffer = new byte[s_bufferSize]; + int read; + while ((read = await source.ReadAsync(buffer, 0, buffer.Length)) != 0) { - byte[] buffer = new byte[s_bufferSize]; - int read; - while ((read = source.Read(buffer, 0, buffer.Length)) != 0) - { - ms.Write(buffer, 0, read); - } - Assert.Equal(e.Length, ms.Length); // Only allow to decompress up to uncompressed size - Assert.Equal(0, source.Read(buffer, 0, buffer.Length)); // Shouldn't be readable more + await ms.WriteAsync(buffer, 0, read); } + Assert.Equal(e.Length, ms.Length); // Only allow to decompress up to uncompressed size + Assert.Equal(0, await source.ReadAsync(buffer, 0, buffer.Length)); // Shouldn't be readable more + + await DisposeStream(async, source); } - } + await DisposeZipArchive(async, archive); + } - [Fact] - public static async Task UnseekableVeryLargeArchive_DataDescriptor_Read_Zip64() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task UnseekableVeryLargeArchive_DataDescriptor_Read_Zip64(bool async) { - MemoryStream stream = await LocalMemoryStream.readAppFileAsync(strange("veryLarge.zip")); + MemoryStream stream = await LocalMemoryStream.ReadAppFileAsync(strange("veryLarge.zip")); - using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) - { - ZipArchiveEntry e = archive.GetEntry("bigFile.bin"); + ZipArchive archive = await CreateZipArchive(async, stream, ZipArchiveMode.Read); - Assert.Equal(6_442_450_944, e.Length); - Assert.Equal(6_261_752, e.CompressedLength); + ZipArchiveEntry e = archive.GetEntry("bigFile.bin"); - using (Stream source = e.Open()) - { - byte[] buffer = new byte[s_bufferSize]; - int read = source.Read(buffer, 0, buffer.Length); // We don't want to inflate this large archive entirely - // just making sure it read successfully - Assert.Equal(s_bufferSize, read); - } - } + Assert.Equal(6_442_450_944, e.Length); + Assert.Equal(6_261_752, e.CompressedLength); + + Stream source = await OpenEntryStream(async, e); + + byte[] buffer = new byte[s_bufferSize]; + int read = source.Read(buffer, 0, buffer.Length); // We don't want to inflate this large archive entirely + // just making sure it read successfully + Assert.Equal(s_bufferSize, read); + + await DisposeStream(async, source); + + await DisposeZipArchive(async, archive); } - [Fact] - public static async Task UpdateZipArchive_AppendTo_CorruptedFileEntry() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task UpdateZipArchive_AppendTo_CorruptedFileEntry(bool async) { MemoryStream stream = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); int updatedUncompressedLength = 1310976; string append = "\r\n\r\nThe answer my friend, is blowin' in the wind."; byte[] data = Encoding.ASCII.GetBytes(append); - long oldCompressedSize = 0; int nameOffset = PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too - using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Update, true)) - { - ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); - oldCompressedSize = e.CompressedLength; - using (Stream s = e.Open()) - { - Assert.Equal(updatedUncompressedLength, s.Length); - s.Seek(0, SeekOrigin.End); - s.Write(data, 0, data.Length); - Assert.Equal(updatedUncompressedLength + data.Length, s.Length); - } - } + ZipArchive archive = await CreateZipArchive(async, stream, ZipArchiveMode.Update, true); + + ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); + long oldCompressedSize = e.CompressedLength; + Stream source = await OpenEntryStream(async, e); + + Assert.Equal(updatedUncompressedLength, source.Length); + source.Seek(0, SeekOrigin.End); + source.Write(data, 0, data.Length); + Assert.Equal(updatedUncompressedLength + data.Length, source.Length); + + await DisposeStream(async, source); + + await DisposeZipArchive(async, archive); + + ZipArchive modifiedArchive = await CreateZipArchive(async, stream, ZipArchiveMode.Read); + + e = modifiedArchive.GetEntry(s_tamperedFileName); - using (ZipArchive modifiedArchive = new ZipArchive(stream, ZipArchiveMode.Read)) + source = await OpenEntryStream(async, e); + using (var ms = new MemoryStream()) { - ZipArchiveEntry e = modifiedArchive.GetEntry(s_tamperedFileName); - using (Stream s = e.Open()) - using (var ms = new MemoryStream()) - { - await s.CopyToAsync(ms, s_bufferSize); - Assert.Equal(updatedUncompressedLength + data.Length, ms.Length); - ms.Seek(updatedUncompressedLength, SeekOrigin.Begin); - byte[] read = new byte[data.Length]; - ms.Read(read, 0, data.Length); - Assert.Equal(append, Encoding.ASCII.GetString(read)); - } - Assert.True(oldCompressedSize > e.CompressedLength); // old compressed size must be reduced by Uncomressed size limit + await source.CopyToAsync(ms, s_bufferSize); + Assert.Equal(updatedUncompressedLength + data.Length, ms.Length); + ms.Seek(updatedUncompressedLength, SeekOrigin.Begin); + byte[] read = new byte[data.Length]; + await ms.ReadAsync(read, 0, data.Length); + Assert.Equal(append, Encoding.ASCII.GetString(read)); } + await DisposeStream(async, source); + Assert.True(oldCompressedSize > e.CompressedLength); // old compressed size must be reduced by Uncomressed size limit + + await DisposeZipArchive(async, archive); } - [Fact] - public static async Task UpdateZipArchive_OverwriteCorruptedEntry() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task UpdateZipArchive_OverwriteCorruptedEntry(bool async) { MemoryStream stream = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); int updatedUncompressedLength = 1310976; @@ -406,37 +492,44 @@ public static async Task UpdateZipArchive_OverwriteCorruptedEntry() int nameOffset = PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too - using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Update, true)) + ZipArchive archive = await CreateZipArchive(async, stream, ZipArchiveMode.Update, true); + + ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); + string fileName = zmodified(Path.Combine("overwrite", "first.txt")); + var file = FileData.GetFile(fileName); + + using (var ms = new MemoryStream(data)) { - ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); - string fileName = zmodified(Path.Combine("overwrite", "first.txt")); - var file = FileData.GetFile(fileName); + Stream es = await OpenEntryStream(async, e); - using (var s = new MemoryStream(data)) - using (Stream es = e.Open()) - { - Assert.Equal(updatedUncompressedLength, es.Length); - es.SetLength(0); - await s.CopyToAsync(es, s_bufferSize); - Assert.Equal(data.Length, es.Length); - } + Assert.Equal(updatedUncompressedLength, es.Length); + es.SetLength(0); + await ms.CopyToAsync(es, s_bufferSize); + Assert.Equal(data.Length, es.Length); + + await DisposeStream(async, es); } - using (ZipArchive modifiedArchive = new ZipArchive(stream, ZipArchiveMode.Read)) + await DisposeZipArchive(async, archive); + + ZipArchive modifiedArchive = await CreateZipArchive(async, stream, ZipArchiveMode.Read); + + e = modifiedArchive.GetEntry(s_tamperedFileName); + Stream s = await OpenEntryStream(async, e); + using (var ms = new MemoryStream()) { - ZipArchiveEntry e = modifiedArchive.GetEntry(s_tamperedFileName); - using (Stream s = e.Open()) - using (var ms = new MemoryStream()) - { - await s.CopyToAsync(ms, s_bufferSize); - Assert.Equal(data.Length, ms.Length); - Assert.Equal(overwrite, Encoding.ASCII.GetString(ms.GetBuffer(), 0, data.Length)); - } + await s.CopyToAsync(ms, s_bufferSize); + Assert.Equal(data.Length, ms.Length); + Assert.Equal(overwrite, Encoding.ASCII.GetString(ms.GetBuffer(), 0, data.Length)); } + await DisposeStream(async, s); + + await DisposeZipArchive(async, archive); } - [Fact] - public static async Task UpdateZipArchive_AddFileTo_ZipWithCorruptedFile() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task UpdateZipArchive_AddFileTo_ZipWithCorruptedFile(bool async) { string addingFile = "added.txt"; MemoryStream stream = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); @@ -445,43 +538,46 @@ public static async Task UpdateZipArchive_AddFileTo_ZipWithCorruptedFile() int nameOffset = PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too - using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Update, true)) - { - ZipArchiveEntry e = archive.CreateEntry(addingFile); - using (Stream es = e.Open()) - { - file.CopyTo(es); - } - } + ZipArchive archive = await CreateZipArchive(async, stream, ZipArchiveMode.Update, true); + + ZipArchiveEntry e = archive.CreateEntry(addingFile); + + Stream es = await OpenEntryStream(async, e); + await file.CopyToAsync(es); + await DisposeStream(async, es); + + await DisposeZipArchive(async, archive); - using (ZipArchive modifiedArchive = new ZipArchive(stream, ZipArchiveMode.Read)) + ZipArchive modifiedArchive = await CreateZipArchive(async, stream, ZipArchiveMode.Read); + + e = modifiedArchive.GetEntry(s_tamperedFileName); + Stream s = await OpenEntryStream(async, e); + using (var ms = new MemoryStream()) { - ZipArchiveEntry e = modifiedArchive.GetEntry(s_tamperedFileName); - using (Stream s = e.Open()) - using (var ms = new MemoryStream()) - { - await s.CopyToAsync(ms, s_bufferSize); - Assert.Equal(e.Length, ms.Length); // tampered file should read up to uncompressed size - } + await s.CopyToAsync(ms, s_bufferSize); + Assert.Equal(e.Length, ms.Length); // tampered file should read up to uncompressed size + } + await DisposeStream(async, s); - ZipArchiveEntry addedEntry = modifiedArchive.GetEntry(addingFile); - Assert.NotNull(addedEntry); - Assert.Equal(addedEntry.Length, file.Length); + ZipArchiveEntry addedEntry = modifiedArchive.GetEntry(addingFile); + Assert.NotNull(addedEntry); + Assert.Equal(addedEntry.Length, file.Length); - using (Stream s = addedEntry.Open()) - { // Make sure file content added correctly - int read = 0; - byte[] buffer1 = new byte[1024]; - byte[] buffer2 = new byte[1024]; - file.Seek(0, SeekOrigin.Begin); + s = await OpenEntryStream(async, addedEntry); + // Make sure file content added correctly + byte[] buffer1 = new byte[1024]; + byte[] buffer2 = new byte[1024]; + file.Seek(0, SeekOrigin.Begin); - while ((read = s.Read(buffer1, 0, buffer1.Length)) != 0 ) - { - file.Read(buffer2, 0, buffer2.Length); - Assert.Equal(buffer1, buffer2); - } - } + while (await s.ReadAsync(buffer1, 0, buffer1.Length) != 0 ) + { + await file.ReadAsync(buffer2, 0, buffer2.Length); + Assert.Equal(buffer1, buffer2); } + + await DisposeStream(async, s); + + await DisposeZipArchive(async, archive); } private static int PatchDataRelativeToFileName(byte[] fileNameInBytes, MemoryStream packageStream, int distance, int start = 0) @@ -532,119 +628,151 @@ private static int FindSequenceIndex(byte[] searchItem, byte[] whereToSearch, in } [Theory] - [InlineData("CDoffsetOutOfBounds.zip")] - [InlineData("EOCDmissing.zip")] - public static async Task ZipArchive_InvalidStream(string zipname) + [InlineData("CDoffsetOutOfBounds.zip", false)] + [InlineData("CDoffsetOutOfBounds.zip", true)] + [InlineData("EOCDmissing.zip", false)] + [InlineData("EOCDmissing.zip", true)] + public static async Task ZipArchive_InvalidStream(string zipname, bool async) { string filename = bad(zipname); using (var stream = await StreamHelpers.CreateTempCopyStream(filename)) - Assert.Throws(() => new ZipArchive(stream, ZipArchiveMode.Read)); + { + await Assert.ThrowsAsync(() => CreateZipArchive(async, stream, ZipArchiveMode.Read)); + } } [Theory] - [InlineData("CDoffsetInBoundsWrong.zip")] - [InlineData("numberOfEntriesDifferent.zip")] - public static async Task ZipArchive_InvalidEntryTable(string zipname) + [InlineData("CDoffsetInBoundsWrong.zip", false)] + [InlineData("CDoffsetInBoundsWrong.zip", true)] + [InlineData("numberOfEntriesDifferent.zip", false)] + [InlineData("numberOfEntriesDifferent.zip", true)] + public static async Task ZipArchive_InvalidEntryTable(string zipname, bool async) { string filename = bad(zipname); - using (ZipArchive archive = new ZipArchive(await StreamHelpers.CreateTempCopyStream(filename), ZipArchiveMode.Read)) + await using (ZipArchive archive = await CreateZipArchive(async, await StreamHelpers.CreateTempCopyStream(filename), ZipArchiveMode.Read)) + { Assert.Throws(() => archive.Entries[0]); + } + } + + public static IEnumerable Get_ZipArchive_InvalidEntry_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "compressedSizeOutOfBounds.zip", true, async }; + yield return new object[] { "localFileHeaderSignatureWrong.zip", true, async }; + yield return new object[] { "localFileOffsetOutOfBounds.zip", true, async }; + yield return new object[] { "LZMA.zip", true, async }; + yield return new object[] { "invalidDeflate.zip", false, async }; + } } [Theory] - [InlineData("compressedSizeOutOfBounds.zip", true)] - [InlineData("localFileHeaderSignatureWrong.zip", true)] - [InlineData("localFileOffsetOutOfBounds.zip", true)] - [InlineData("LZMA.zip", true)] - [InlineData("invalidDeflate.zip", false)] - public static async Task ZipArchive_InvalidEntry(string zipname, bool throwsOnOpen) + [MemberData(nameof(Get_ZipArchive_InvalidEntry_Data))] + public static async Task ZipArchive_InvalidEntry(string zipname, bool throwsOnOpen, bool async) { string filename = bad(zipname); - using (ZipArchive archive = new ZipArchive(await StreamHelpers.CreateTempCopyStream(filename), ZipArchiveMode.Read)) + ZipArchive archive = await CreateZipArchive(async, await StreamHelpers.CreateTempCopyStream(filename), ZipArchiveMode.Read); + + ZipArchiveEntry e = archive.Entries[0]; + if (throwsOnOpen) { - ZipArchiveEntry e = archive.Entries[0]; - if (throwsOnOpen) - { - Assert.Throws(() => e.Open()); //"should throw on open" - } - else - { - using (Stream s = e.Open()) - { - Assert.Throws(() => s.ReadByte()); //"Unreadable stream" - } - } + await Assert.ThrowsAsync(() => OpenEntryStream(async, e)); //"should throw on open" } + else + { + Stream s = await OpenEntryStream(async, e); + Assert.Throws(() => s.ReadByte()); //"Unreadable stream" + await DisposeStream(async, s); + } + + await DisposeZipArchive(async, archive); } - [Fact] - public static async Task ZipArchiveEntry_InvalidLastWriteTime_Read() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ZipArchiveEntry_InvalidLastWriteTime_Read(bool async) + { + ZipArchive archive = await CreateZipArchive(async, await StreamHelpers.CreateTempCopyStream( + bad("invaliddate.zip")), ZipArchiveMode.Read); + Assert.Equal(new DateTime(1980, 1, 1, 0, 0, 0), archive.Entries[0].LastWriteTime.DateTime); //"Date isn't correct on invalid date" + await DisposeZipArchive(async, archive); + } + + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ZipArchiveEntry_InvalidLastWriteTime_Write(bool async) { - using (ZipArchive archive = new ZipArchive(await StreamHelpers.CreateTempCopyStream( - bad("invaliddate.zip")), ZipArchiveMode.Read)) + ZipArchive archive = await CreateZipArchive(async, new MemoryStream(), ZipArchiveMode.Create); + + ZipArchiveEntry entry = archive.CreateEntry("test"); + Assert.Throws(() => { - Assert.Equal(new DateTime(1980, 1, 1, 0, 0, 0), archive.Entries[0].LastWriteTime.DateTime); //"Date isn't correct on invalid date" - } + //"should throw on bad date" + entry.LastWriteTime = new DateTimeOffset(1979, 12, 3, 5, 6, 2, new TimeSpan()); + }); + Assert.Throws(() => + { + //"Should throw on bad date" + entry.LastWriteTime = new DateTimeOffset(2980, 12, 3, 5, 6, 2, new TimeSpan()); + }); + + await DisposeZipArchive(async, archive); } - [Fact] - public static void ZipArchiveEntry_InvalidLastWriteTime_Write() + public static IEnumerable Get_StrangeFiles_Data() { - using (ZipArchive archive = new ZipArchive(new MemoryStream(), ZipArchiveMode.Create)) + foreach (bool async in _bools) { - ZipArchiveEntry entry = archive.CreateEntry("test"); - Assert.Throws(() => - { - //"should throw on bad date" - entry.LastWriteTime = new DateTimeOffset(1979, 12, 3, 5, 6, 2, new TimeSpan()); - }); - Assert.Throws(() => - { - //"Should throw on bad date" - entry.LastWriteTime = new DateTimeOffset(2980, 12, 3, 5, 6, 2, new TimeSpan()); - }); + yield return new object[] { "extradata/extraDataLHandCDentryAndArchiveComments.zip", "verysmall", true, async }; + yield return new object[] { "extradata/extraDataThenZip64.zip", "verysmall", true, async }; + yield return new object[] { "extradata/zip64ThenExtraData.zip", "verysmall", true, async }; + yield return new object[] { "dataDescriptor.zip", "normalWithoutBinary", false, async }; + yield return new object[] { "filenameTimeAndSizesDifferentInLH.zip", "verysmall", false, async }; } } [Theory] - [InlineData("extradata/extraDataLHandCDentryAndArchiveComments.zip", "verysmall", true)] - [InlineData("extradata/extraDataThenZip64.zip", "verysmall", true)] - [InlineData("extradata/zip64ThenExtraData.zip", "verysmall", true)] - [InlineData("dataDescriptor.zip", "normalWithoutBinary", false)] - [InlineData("filenameTimeAndSizesDifferentInLH.zip", "verysmall", false)] - public static async Task StrangeFiles(string zipFile, string zipFolder, bool requireExplicit) + [MemberData(nameof(Get_StrangeFiles_Data))] + public static async Task StrangeFiles(string zipFile, string zipFolder, bool requireExplicit, bool async) { - IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream(strange(zipFile)), zfolder(zipFolder), ZipArchiveMode.Update, requireExplicit, checkTimes: true); + MemoryStream stream = await StreamHelpers.CreateTempCopyStream(strange(zipFile)); + await IsZipSameAsDir(stream, zfolder(zipFolder), ZipArchiveMode.Update, requireExplicit, checkTimes: true, async); } /// /// This test tiptoes the buffer boundaries to ensure that the size of a read buffer doesn't /// cause any bytes to be left in ZLib's buffer. /// - [Fact] - public static void ZipWithLargeSparseFile() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ZipWithLargeSparseFile(bool async) { string zipname = strange("largetrailingwhitespacedeflation.zip"); string entryname = "A/B/C/D"; using (FileStream stream = File.Open(zipname, FileMode.Open, FileAccess.Read)) - using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) { + ZipArchive archive = await CreateZipArchive(async, stream, ZipArchiveMode.Read); + ZipArchiveEntry entry = archive.GetEntry(entryname); long size = entry.Length; for (int bufferSize = 1; bufferSize <= size; bufferSize++) { - using (Stream entryStream = entry.Open()) + Stream entryStream = await OpenEntryStream(async, entry); + + byte[] b = new byte[bufferSize]; + int read = 0, count = 0; + while ((read = await entryStream.ReadAsync(b, 0, bufferSize)) > 0) { - byte[] b = new byte[bufferSize]; - int read = 0, count = 0; - while ((read = entryStream.Read(b, 0, bufferSize)) > 0) - { - count += read; - } - Assert.Equal(size, count); + count += read; } + Assert.Equal(size, count); + + await DisposeStream(async, entryStream); } + + await DisposeZipArchive(async, archive); } } @@ -673,11 +801,15 @@ public static void ZipWithLargeSparseFile() 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x44, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x00, 0x00 }; - public static IEnumerable EmptyFiles = new List() + + public static IEnumerable EmptyFiles() { - new object[] { s_emptyFileCompressedWithEtx }, - new object[] { s_emptyFileCompressedWrongSize } - }; + foreach (bool async in _bools) + { + yield return new object[] { s_emptyFileCompressedWithEtx, async }; + yield return new object[] { s_emptyFileCompressedWrongSize, async }; + } + } /// /// This test checks behavior of ZipArchive with unexpected zip files: @@ -691,7 +823,7 @@ public static void ZipWithLargeSparseFile() /// [Theory] [MemberData(nameof(EmptyFiles))] - public void ReadArchive_WithEmptyDeflatedFile(byte[] fileBytes) + public async Task ReadArchive_WithEmptyDeflatedFile(byte[] fileBytes, bool async) { using (var testStream = new MemoryStream(fileBytes)) { @@ -700,10 +832,9 @@ public void ReadArchive_WithEmptyDeflatedFile(byte[] fileBytes) byte firstEntryCompressionMethod = fileBytes[8]; // first attempt: open archive with zero-length file that is compressed (Deflate = 0x8) - using (var zip = new ZipArchive(testStream, ZipArchiveMode.Update, leaveOpen: true)) - { - // dispose without making any changes will make no changes to the input stream - } + ZipArchive zip = await CreateZipArchive(async, testStream, ZipArchiveMode.Update, leaveOpen: true); + // dispose without making any changes will make no changes to the input stream + await DisposeZipArchive(async, zip); byte[] fileContent = testStream.ToArray(); @@ -712,11 +843,13 @@ public void ReadArchive_WithEmptyDeflatedFile(byte[] fileBytes) testStream.Seek(0, SeekOrigin.Begin); // second attempt: open archive with zero-length file that is compressed (Deflate = 0x8) - using (var zip = new ZipArchive(testStream, ZipArchiveMode.Update, leaveOpen: true)) - using (var zipEntryStream = zip.Entries[0].Open()) - { - // dispose after opening an entry will rewrite the archive - } + zip = await CreateZipArchive(async, testStream, ZipArchiveMode.Update, leaveOpen: true); + + var zipEntryStream = await OpenEntryStream(async, zip.Entries[0]); + // dispose after opening an entry will rewrite the archive + await DisposeStream(async, zipEntryStream); + + await DisposeZipArchive(async, zip); fileContent = testStream.ToArray(); @@ -724,16 +857,16 @@ public void ReadArchive_WithEmptyDeflatedFile(byte[] fileBytes) Assert.Equal(0, fileContent[8]); // extract and check the file. should stay empty. - using (var zip = new ZipArchive(testStream, ZipArchiveMode.Update)) - { - ZipArchiveEntry entry = zip.GetEntry(ExpectedFileName); - Assert.Equal(0, entry.Length); - Assert.Equal(0, entry.CompressedLength); - using (Stream entryStream = entry.Open()) - { - Assert.Equal(0, entryStream.Length); - } - } + zip = await CreateZipArchive(async, testStream, ZipArchiveMode.Update); + + ZipArchiveEntry entry = zip.GetEntry(ExpectedFileName); + Assert.Equal(0, entry.Length); + Assert.Equal(0, entry.CompressedLength); + Stream entryStream = await OpenEntryStream(async, entry); + Assert.Equal(0, entryStream.Length); + await DisposeStream(async, entryStream); + + await DisposeZipArchive(async, zip); } } @@ -743,84 +876,88 @@ public void ReadArchive_WithEmptyDeflatedFile(byte[] fileBytes) /// Appends 64KB of garbage at the end of the file. Verifies we throw. /// Prepends 64KB of garbage at the beginning of the file. Verifies we throw. /// - [Fact] - public static void ReadArchive_WithEOCDComment_TrailingPrecedingGarbage() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ReadArchive_WithEOCDComment_TrailingPrecedingGarbage(bool async) { - void InsertEntry(ZipArchive archive, string name, string contents) + async Task InsertEntry(ZipArchive archive, string name, string contents, bool async) { ZipArchiveEntry entry = archive.CreateEntry(name); - using (StreamWriter writer = new StreamWriter(entry.Open())) + Stream s = await OpenEntryStream(async, entry); + using (StreamWriter writer = new StreamWriter(s)) { writer.WriteLine(contents); } + await DisposeStream(async, s); } - int GetEntryContentsLength(ZipArchiveEntry entry) + async Task GetEntryContentsLength(ZipArchiveEntry entry, bool async) { int length = 0; - using (Stream stream = entry.Open()) + Stream stream = await OpenEntryStream(async, entry); + using (var reader = new StreamReader(stream)) { - using (var reader = new StreamReader(stream)) - { - length = reader.ReadToEnd().Length; - } + length = reader.ReadToEnd().Length; } + await DisposeStream(async, stream); return length; } - void VerifyValidEntry(ZipArchiveEntry entry, string expectedName, int expectedMinLength) + async Task VerifyValidEntry(ZipArchiveEntry entry, string expectedName, int expectedMinLength, bool async) { Assert.NotNull(entry); Assert.Equal(expectedName, entry.Name); // The file has a few more bytes, but should be at least as large as its contents - Assert.True(GetEntryContentsLength(entry) >= expectedMinLength); + Assert.True(await GetEntryContentsLength(entry, async) >= expectedMinLength); } string name0 = "huge0.txt"; string name1 = "huge1.txt"; string str64KB = new string('x', ushort.MaxValue); - byte[] byte64KB = Text.Encoding.ASCII.GetBytes(str64KB); + byte[] byte64KB = Encoding.ASCII.GetBytes(str64KB); // Open empty file with 64KB EOCD comment string path = strange("extradata/emptyWith64KBComment.zip"); - using (MemoryStream archiveStream = StreamHelpers.CreateTempCopyStream(path).Result) + using (MemoryStream archiveStream = await StreamHelpers.CreateTempCopyStream(path)) { // Insert 2 64KB txt entries - using (ZipArchive archive = new ZipArchive(archiveStream, ZipArchiveMode.Update, leaveOpen: true)) - { - InsertEntry(archive, name0, str64KB); - InsertEntry(archive, name1, str64KB); - } + ZipArchive archive = await CreateZipArchive(async, archiveStream, ZipArchiveMode.Update, leaveOpen: true); + + await InsertEntry(archive, name0, str64KB, async); + await InsertEntry(archive, name1, str64KB, async); + + await DisposeZipArchive(async, archive); // Open and verify items archiveStream.Seek(0, SeekOrigin.Begin); - using (ZipArchive archive = new ZipArchive(archiveStream, ZipArchiveMode.Read, leaveOpen: true)) - { - Assert.Equal(2, archive.Entries.Count); - VerifyValidEntry(archive.Entries[0], name0, ushort.MaxValue); - VerifyValidEntry(archive.Entries[1], name1, ushort.MaxValue); - } + archive = await CreateZipArchive(async, archiveStream, ZipArchiveMode.Read, leaveOpen: true); + + Assert.Equal(2, archive.Entries.Count); + await VerifyValidEntry(archive.Entries[0], name0, ushort.MaxValue, async); + await VerifyValidEntry(archive.Entries[1], name1, ushort.MaxValue, async); + + await DisposeZipArchive(async, archive); // Append 64KB of garbage archiveStream.Seek(0, SeekOrigin.End); - archiveStream.Write(byte64KB, 0, byte64KB.Length); + await archiveStream.WriteAsync(byte64KB, 0, byte64KB.Length); // Open should not be possible because we can't find the EOCD in the max search length from the end - Assert.Throws(() => + await Assert.ThrowsAsync(async () => { - ZipArchive archive = new ZipArchive(archiveStream, ZipArchiveMode.Read, leaveOpen: true); + ZipArchive archive = await CreateZipArchive(async, archiveStream, ZipArchiveMode.Read, leaveOpen: true); }); // Create stream with 64KB of prepended garbage, then the above stream appended // Attempting to create a ZipArchive should fail: no EOCD found using (MemoryStream prependStream = new MemoryStream()) { - prependStream.Write(byte64KB, 0, byte64KB.Length); + await prependStream.WriteAsync(byte64KB, 0, byte64KB.Length); archiveStream.WriteTo(prependStream); - Assert.Throws(() => + await Assert.ThrowsAsync(async () => { - ZipArchive archive = new ZipArchive(prependStream, ZipArchiveMode.Read); + ZipArchive archive = await CreateZipArchive(async, prependStream, ZipArchiveMode.Read); }); } } @@ -832,83 +969,106 @@ void VerifyValidEntry(ZipArchiveEntry entry, string expectedName, int expectedMi /// Although this contravenes the Zip spec, such files are created by common tools and are successfully read by Python, Go and Rust, and /// 7zip (albeit with a warning) /// - [Fact] - public void ReadArchive_WithUnexpectedZip64ExtraFieldSize() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ReadArchive_WithUnexpectedZip64ExtraFieldSize(bool async) { - using ZipArchive archive = new (new MemoryStream(s_slightlyIncorrectZip64)); + ZipArchive archive = await CreateZipArchive(async, new MemoryStream(s_slightlyIncorrectZip64), ZipArchiveMode.Read); ZipArchiveEntry entry = archive.GetEntry("file.txt"); Assert.Equal(4, entry.Length); Assert.Equal(6, entry.CompressedLength); - using var stream = entry.Open(); - using StreamReader reader = new (stream); - string text = reader.ReadToEnd(); + + Stream stream = await OpenEntryStream(async, entry); + string text; + using (StreamReader reader = new(stream)) + { + text = await reader.ReadToEndAsync(); + } + await DisposeStream(async, stream); + Assert.Equal("test", text); + await DisposeZipArchive(async, archive); } /// /// As above, but the compressed size in the central directory record is less than 0xFFFFFFFF so the value in that location /// should be used instead of in the Zip64 extra field. /// - [Fact] - public void ReadArchive_WithUnexpectedZip64ExtraFieldSizeCompressedSizeIn32Bit() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ReadArchive_WithUnexpectedZip64ExtraFieldSizeCompressedSizeIn32Bit(bool async) { byte[] input = (byte[])s_slightlyIncorrectZip64.Clone(); BinaryPrimitives.WriteInt32LittleEndian(input.AsSpan(120), 9); // change 32-bit compressed size from -1 - using var archive = new ZipArchive(new MemoryStream(input)); + ZipArchive archive = await CreateZipArchive(async, new MemoryStream(input), ZipArchiveMode.Read); ZipArchiveEntry entry = archive.GetEntry("file.txt"); Assert.Equal(4, entry.Length); Assert.Equal(9, entry.CompressedLength); // it should have used 32-bit size + await DisposeZipArchive(async, archive); } /// /// As above, but the uncompressed size in the central directory record is less than 0xFFFFFFFF so the value in that location /// should be used instead of in the Zip64 extra field. /// - [Fact] - public void ReadArchive_WithUnexpectedZip64ExtraFieldSizeUncompressedSizeIn32Bit() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ReadArchive_WithUnexpectedZip64ExtraFieldSizeUncompressedSizeIn32Bit(bool async) { byte[] input = (byte[])s_slightlyIncorrectZip64.Clone(); BinaryPrimitives.WriteInt32LittleEndian(input.AsSpan(124), 9); // change 32-bit uncompressed size from -1 - using var archive = new ZipArchive(new MemoryStream(input)); + ZipArchive archive = await CreateZipArchive(async, new MemoryStream(input), ZipArchiveMode.Read); ZipArchiveEntry entry = archive.GetEntry("file.txt"); Assert.Equal(9, entry.Length); Assert.Equal(6, entry.CompressedLength); // it should have used 32-bit size + await DisposeZipArchive(async, archive); } /// /// This test checks behavior of ZipArchive when the startDiskNumber in the extraField is greater than IntMax /// - [Fact] - public void ReadArchive_WithDiskStartNumberGreaterThanIntMax() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task ReadArchive_WithDiskStartNumberGreaterThanIntMax(bool async) { byte[] input = (byte[])s_zip64WithBigStartDiskNumber.Clone(); - using var archive = new ZipArchive(new MemoryStream(input)); - + ZipArchive archive = await CreateZipArchive(async, new MemoryStream(input), ZipArchiveMode.Read); var exception = Record.Exception(() => archive.Entries.First()); - Assert.Null(exception); + await DisposeZipArchive(async, archive); } /// /// This test checks that an InvalidDataException will be thrown when consuming a zip with bad Huffman data. /// - [Fact] - public static async Task ZipArchive_InvalidHuffmanData() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ZipArchive_InvalidHuffmanData(bool async) { string filename = bad("HuffmanTreeException.zip"); - using (ZipArchive archive = new ZipArchive(await StreamHelpers.CreateTempCopyStream(filename), ZipArchiveMode.Read)) + ZipArchive archive = await CreateZipArchive(async, await StreamHelpers.CreateTempCopyStream(filename), ZipArchiveMode.Read); + + ZipArchiveEntry e = archive.Entries[0]; + using (MemoryStream ms = new MemoryStream()) + using (Stream s = await OpenEntryStream(async, e)) { - ZipArchiveEntry e = archive.Entries[0]; - using (MemoryStream ms = new MemoryStream()) - using (Stream s = e.Open()) + //"Should throw on creating Huffman tree" + if (async) + { + await Assert.ThrowsAsync(() => s.CopyToAsync(ms)); + } + else { - Assert.Throws(() => s.CopyTo(ms)); //"Should throw on creating Huffman tree" + Assert.Throws(() => s.CopyTo(ms)); } } + + await DisposeZipArchive(async, archive); } + [Fact] public static void ZipArchive_InvalidVersionToExtract() { @@ -1008,6 +1168,105 @@ public static void ZipArchive_InvalidVersionToExtract() } } + [Fact] + public static async Task ZipArchive_InvalidVersionToExtract_Async() + { + await using (MemoryStream updatedStream = new MemoryStream()) + { + int originalLocalVersionToExtract = s_inconsistentVersionToExtract[4]; + int originalCentralDirectoryVersionToExtract = s_inconsistentVersionToExtract[57]; + + // The existing archive will have a "version to extract" of 0.0, but will contain entries + // with deflate compression (which has a minimum version to extract of 2.0.) + Assert.Equal(0x00, originalLocalVersionToExtract); + Assert.Equal(0x00, originalCentralDirectoryVersionToExtract); + + // Write the example data to the stream. We expect to be able to read it (and the entry contents) successfully. + await updatedStream.WriteAsync(s_inconsistentVersionToExtract); + updatedStream.Seek(0, SeekOrigin.Begin); + + await using (ZipArchive originalArchive = await ZipArchive.CreateAsync(updatedStream, ZipArchiveMode.Read, leaveOpen: true, entryNameEncoding: null)) + { + Assert.Equal(1, originalArchive.Entries.Count); + + ZipArchiveEntry firstEntry = originalArchive.Entries[0]; + + Assert.Equal("first.bin", firstEntry.Name); + Assert.Equal(s_existingSampleData.Length, firstEntry.Length); + + await using (Stream entryStream = await firstEntry.OpenAsync()) + { + byte[] uncompressedBytes = new byte[firstEntry.Length]; + int bytesRead = await entryStream.ReadAsync(uncompressedBytes); + + Assert.Equal(s_existingSampleData.Length, bytesRead); + Assert.Equal(s_existingSampleData, uncompressedBytes); + } + } + + updatedStream.Seek(0, SeekOrigin.Begin); + + // Create a new entry, forcing the central directory headers to be rewritten. The local file header + // for first.bin would normally be skipped (because it hasn't changed) but it needs to be rewritten + // because the central directory headers will be rewritten with a valid value and the local file header + // needs to match. + await using (ZipArchive updatedArchive = await ZipArchive.CreateAsync(updatedStream, ZipArchiveMode.Update, leaveOpen: true, entryNameEncoding: null)) + { + ZipArchiveEntry newEntry = updatedArchive.CreateEntry("second.bin", CompressionLevel.NoCompression); + + // Add data to the new entry + await using (Stream entryStream = await newEntry.OpenAsync()) + { + await entryStream.WriteAsync(s_sampleDataToWrite); + } + } + + byte[] updatedContents = updatedStream.ToArray(); + // Verify that the local file header and the central directory headers have both been rewritten, and both have + // the correct value. + int updatedLocalVersionToExtract = updatedContents[4]; + int updatedCentralDirectoryVersionToExtract = updatedContents[101]; + + Assert.Equal(20, updatedCentralDirectoryVersionToExtract); + Assert.Equal(20, updatedLocalVersionToExtract); + + updatedStream.Seek(0, SeekOrigin.Begin); + // Following an update of the ZipArchive, reopen it in read-only mode. Make sure that both entries are correct. + + await using (ZipArchive updatedArchive = await ZipArchive.CreateAsync(updatedStream, ZipArchiveMode.Read, true, entryNameEncoding: null)) + { + Assert.Equal(2, updatedArchive.Entries.Count); + + ZipArchiveEntry firstEntry = updatedArchive.Entries[0]; + ZipArchiveEntry secondEntry = updatedArchive.Entries[1]; + + Assert.Equal("first.bin", firstEntry.Name); + Assert.Equal(s_existingSampleData.Length, firstEntry.Length); + + Assert.Equal("second.bin", secondEntry.Name); + Assert.Equal(s_sampleDataToWrite.Length, secondEntry.Length); + + await using (Stream entryStream = await firstEntry.OpenAsync()) + { + byte[] uncompressedBytes = new byte[firstEntry.Length]; + int bytesRead = await entryStream.ReadAsync(uncompressedBytes); + + Assert.Equal(s_existingSampleData.Length, bytesRead); + Assert.Equal(s_existingSampleData, uncompressedBytes); + } + + await using (Stream entryStream = await secondEntry.OpenAsync()) + { + byte[] uncompressedBytes = new byte[secondEntry.Length]; + int bytesRead = await entryStream.ReadAsync(uncompressedBytes); + + Assert.Equal(s_sampleDataToWrite.Length, bytesRead); + Assert.Equal(s_sampleDataToWrite, uncompressedBytes); + } + } + } + } + public static IEnumerable ZipArchive_InvalidExtraFieldData_Data() { // Parameter 1 is the version to extract. Parameter 2 is the total number of "extra data" bytes. @@ -1126,6 +1385,285 @@ public void ZipArchive_InvalidExtraFieldData(byte validVersionToExtract, ushort } } + [Theory] + [MemberData(nameof(ZipArchive_InvalidExtraFieldData_Data))] + public async Task ZipArchive_InvalidExtraFieldData_Async(byte validVersionToExtract, ushort extraFieldDataLength) + { + byte[] invalidExtraFieldData = GenerateInvalidExtraFieldData(validVersionToExtract, extraFieldDataLength, + out int lhOffset, out int cdOffset); + + await using MemoryStream updatedStream = new MemoryStream(); + + // Write the example data to the stream. We expect to be able to read it (and the entry contents) successfully. + await updatedStream.WriteAsync(invalidExtraFieldData); + updatedStream.Seek(0, SeekOrigin.Begin); + + await using (ZipArchive originalArchive = await ZipArchive.CreateAsync(updatedStream, ZipArchiveMode.Read, leaveOpen: true, entryNameEncoding: null)) + { + Assert.Equal(1, originalArchive.Entries.Count); + + ZipArchiveEntry firstEntry = originalArchive.Entries[0]; + + Assert.Equal("first.bin", firstEntry.Name); + Assert.Equal(s_existingSampleData.Length, firstEntry.Length); + + await using (Stream entryStream = await firstEntry.OpenAsync()) + { + byte[] uncompressedBytes = new byte[firstEntry.Length]; + int bytesRead = await entryStream.ReadAsync(uncompressedBytes); + + Assert.Equal(s_existingSampleData.Length, bytesRead); + Assert.Equal(s_existingSampleData, uncompressedBytes); + } + } + + updatedStream.Seek(0, SeekOrigin.Begin); + + // Create a new entry, forcing the central directory headers to be rewritten. The local file header + // for first.bin would normally be skipped (because it hasn't changed) but it needs to be rewritten + // because the central directory headers will be rewritten with a valid value and the local file header + // needs to match. + await using (ZipArchive updatedArchive = await ZipArchive.CreateAsync(updatedStream, ZipArchiveMode.Update, leaveOpen: true, entryNameEncoding: null)) + { + ZipArchiveEntry newEntry = updatedArchive.CreateEntry("second.bin", CompressionLevel.NoCompression); + + // Add data to the new entry + await using (Stream entryStream = await newEntry.OpenAsync()) + { + await entryStream.WriteAsync(s_sampleDataToWrite); + } + } + + byte[] updatedContents = updatedStream.ToArray(); + // Verify that the local file header and the central directory headers have both been rewritten, and both have + // the correct value. The central directory offset will have moved forwards by 44 bytes - our new entry has been + // written in front of it. + int updatedLocalVersionToExtract = updatedContents[lhOffset]; + int updatedCentralDirectoryVersionToExtract = updatedContents[cdOffset + 44]; + + Assert.Equal(20, updatedLocalVersionToExtract); + Assert.Equal(20, updatedCentralDirectoryVersionToExtract); + + updatedStream.Seek(0, SeekOrigin.Begin); + // Following an update of the ZipArchive, reopen it in read-only mode. Make sure that both entries are correct. + + await using (ZipArchive updatedArchive = await ZipArchive.CreateAsync(updatedStream, ZipArchiveMode.Read, true, entryNameEncoding: null)) + { + Assert.Equal(2, updatedArchive.Entries.Count); + + ZipArchiveEntry firstEntry = updatedArchive.Entries[0]; + ZipArchiveEntry secondEntry = updatedArchive.Entries[1]; + + Assert.Equal("first.bin", firstEntry.Name); + Assert.Equal(s_existingSampleData.Length, firstEntry.Length); + + Assert.Equal("second.bin", secondEntry.Name); + Assert.Equal(s_sampleDataToWrite.Length, secondEntry.Length); + + await using (Stream entryStream = await firstEntry.OpenAsync()) + { + byte[] uncompressedBytes = new byte[firstEntry.Length]; + int bytesRead = await entryStream.ReadAsync(uncompressedBytes); + + Assert.Equal(s_existingSampleData.Length, bytesRead); + Assert.Equal(s_existingSampleData, uncompressedBytes); + } + + await using (Stream entryStream = await secondEntry.OpenAsync()) + { + byte[] uncompressedBytes = new byte[secondEntry.Length]; + int bytesRead = await entryStream.ReadAsync(uncompressedBytes); + + Assert.Equal(s_sampleDataToWrite.Length, bytesRead); + Assert.Equal(s_sampleDataToWrite, uncompressedBytes); + } + } + } + + [Fact] + public static async Task NoAsyncCallsWhenUsingSync() + { + using MemoryStream ms = new(); + using NoAsyncCallsStream s = new(ms); // Only allows sync calls + + // Create mode + using (ZipArchive archive = new ZipArchive(s, ZipArchiveMode.Create, leaveOpen: true, entryNameEncoding: Encoding.UTF8)) + { + using MemoryStream normalZipStream = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); + normalZipStream.Position = 0; + + // Note this is not using NoAsyncCallsStream, so it can be opened in async mode + await using (ZipArchive normalZipArchive = await ZipArchive.CreateAsync(normalZipStream, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null)) + { + var normalZipEntries = normalZipArchive.Entries; + + foreach (ZipArchiveEntry normalEntry in normalZipEntries) + { + ZipArchiveEntry newEntry = archive.CreateEntry(normalEntry.FullName); + using (Stream newEntryStream = newEntry.Open()) + { + // Note the parent archive is not using NoAsyncCallsStream, so it can be opened in async mode + await using (Stream normalEntryStream = await normalEntry.OpenAsync()) + { + // Note the parent archive is not using NoAsyncCallsStream, so it can be copied in async mode + await normalEntryStream.CopyToAsync(newEntryStream); + } + } + } + } + } + + ms.Position = 0; + + // Read mode + using (ZipArchive archive = new ZipArchive(s, ZipArchiveMode.Read, leaveOpen: true, entryNameEncoding: Encoding.UTF8)) + { + _ = archive.Comment; + + // Entries is sync only + s.IsRestrictionEnabled = false; + var entries = archive.Entries; + s.IsRestrictionEnabled = true; + + foreach (var entry in entries) + { + _ = archive.GetEntry(entry.Name); + _ = entry.Archive; + _ = entry.Comment; + _ = entry.CompressedLength; + _ = entry.Crc32; + _ = entry.ExternalAttributes; + _ = entry.FullName; + _ = entry.IsEncrypted; + _ = entry.LastWriteTime; + _ = entry.Length; + _ = entry.Name; + using (var es = entry.Open()) + { + byte[] buffer = [0x0]; + + _ = es.Read(buffer, 0, buffer.Length); + _ = es.Read(buffer.AsSpan()); + _ = es.ReadByte(); + } + } + _ = archive.Mode; + } + + ms.Position = 0; + + // Update mode + using (ZipArchive archive = new ZipArchive(s, ZipArchiveMode.Update, leaveOpen: false, entryNameEncoding: Encoding.UTF8)) + { + // Entries is sync only + s.IsRestrictionEnabled = false; + ZipArchiveEntry entryToDelete = archive.Entries[0]; + s.IsRestrictionEnabled = true; + + entryToDelete.Delete(); + + ZipArchiveEntry entry = archive.CreateEntry("mynewentry.txt"); + using (var es = entry.Open()) + { + byte[] buffer = [0x0]; + es.Write(buffer, 0, buffer.Length); + es.Write(buffer.AsSpan()); + es.WriteByte(buffer[0]); + } + } + } + + [Fact] + public static async Task NoSyncCallsWhenUsingAsync() + { + using MemoryStream ms = new(); + using NoSyncCallsStream s = new(ms); // Only allows async calls + + // Create mode + await using (ZipArchive archive = await ZipArchive.CreateAsync(s, ZipArchiveMode.Create, leaveOpen: true, entryNameEncoding: Encoding.UTF8)) + { + await using MemoryStream normalZipStream = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); + normalZipStream.Position = 0; + + // Note this is not using NoSyncCallsStream, so it can be opened in sync mode + using (ZipArchive normalZipArchive = new ZipArchive(normalZipStream, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null)) + { + var normalZipEntries = normalZipArchive.Entries; + + foreach (ZipArchiveEntry normalEntry in normalZipEntries) + { + ZipArchiveEntry newEntry = archive.CreateEntry(normalEntry.FullName); + await using (Stream newEntryStream = await newEntry.OpenAsync()) + { + // Note the parent archive is not using NoSyncCallsStream, so it can be opened in sync mode + using (Stream normalEntryStream = normalEntry.Open()) + { + // Note the parent archive is not using NoSyncCallsStream, so it can be copied in sync mode + normalEntryStream.CopyTo(newEntryStream); + } + } + } + } + } + + ms.Position = 0; + + // Read mode + await using (ZipArchive archive = await ZipArchive.CreateAsync(s, ZipArchiveMode.Read, leaveOpen: true, entryNameEncoding: Encoding.UTF8)) + { + _ = archive.Comment; + + // Entries is sync only + s.IsRestrictionEnabled = false; + var entries = archive.Entries; + s.IsRestrictionEnabled = true; + + foreach (var entry in entries) + { + _ = archive.GetEntry(entry.Name); + _ = entry.Archive; + _ = entry.Comment; + _ = entry.CompressedLength; + _ = entry.Crc32; + _ = entry.ExternalAttributes; + _ = entry.FullName; + _ = entry.IsEncrypted; + _ = entry.LastWriteTime; + _ = entry.Length; + _ = entry.Name; + await using (var es = await entry.OpenAsync()) + { + byte[] buffer = [0x0]; + + _ = await es.ReadAsync(buffer); + _ = await es.ReadAsync(buffer.AsMemory()); + _ = await es.ReadByteAsync(); + } + } + _ = archive.Mode; + } + + ms.Position = 0; + + await using (ZipArchive archive = await ZipArchive.CreateAsync(s, ZipArchiveMode.Update, leaveOpen: false, entryNameEncoding: Encoding.UTF8)) + { + // Entries is sync only + s.IsRestrictionEnabled = false; + ZipArchiveEntry entryToDelete = archive.Entries[0]; + s.IsRestrictionEnabled = true; + + entryToDelete.Delete(); // Delete is async only + + ZipArchiveEntry entry = archive.CreateEntry("mynewentry.txt"); + await using (var es = await entry.OpenAsync()) + { + byte[] buffer = [0x0]; + await es.WriteAsync(buffer, 0, buffer.Length); + await es.WriteAsync(buffer.AsMemory()); + } + } + } + // Generates a copy of s_invalidExtraFieldData with a variable number of bytes as extra field data. private static byte[] GenerateInvalidExtraFieldData(byte modifiedVersionToExtract, ushort extraFieldDataLength, out int lhVersionToExtractOffset, @@ -1501,7 +2039,7 @@ private static byte[] GenerateInvalidExtraFieldData(byte modifiedVersionToExtrac // uncompressed size 0xff, 0xff, 0xff, 0xff, // file name length - + 0x08, 0x00, // extra field length 0x20, 0x00, @@ -1515,10 +2053,10 @@ private static byte[] GenerateInvalidExtraFieldData(byte modifiedVersionToExtrac 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 8 byte Zip64 compressed size, index 50 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - // 8 byte Relative Header Offset + // 8 byte Relative Header Offset 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Disk Start Number - 0xff, 0xff, 0xff, 0xfe, + 0xff, 0xff, 0xff, 0xfe, // ----- NTFS extra field tag 0x0a, 0x00, // size of extra field block diff --git a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_LargeFiles.cs b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_LargeFiles.cs index dbb91fc5523de0..1200707acd5708 100644 --- a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_LargeFiles.cs +++ b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_LargeFiles.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. using System.Reflection; +using System.Threading.Tasks; using Xunit; namespace System.IO.Compression.Tests; @@ -51,9 +52,8 @@ private static void FillWithHardToCompressData(byte[] buffer) [ConditionalTheory(typeof(PlatformDetection), nameof(PlatformDetection.IsNotMobile), nameof(PlatformDetection.Is64BitProcess))] // don't run it on slower runtimes [OuterLoop("It requires 5~6 GB of free disk space and a lot of CPU time for compressed tests")] - [InlineData(false)] - [InlineData(true)] - public static void CheckZIP64VersionIsSet_ForSmallFilesAfterBigFiles(bool isCompressed) + [MemberData(nameof(Get_Booleans_Data))] + public static async Task CheckZIP64VersionIsSet_ForSmallFilesAfterBigFiles_Async(bool isCompressed) { // issue #94899 @@ -71,11 +71,11 @@ public static void CheckZIP64VersionIsSet_ForSmallFilesAfterBigFiles(bool isComp using FileStream fs = File.Open(zipArchivePath, FileMode.Create, FileAccess.ReadWrite); // Create - using (ZipArchive archive = new(fs, ZipArchiveMode.Create, true)) + await using (ZipArchive archive = await ZipArchive.CreateAsync(fs, ZipArchiveMode.Create, true, entryNameEncoding: null)) { ZipArchiveEntry file = archive.CreateEntry(LargeFileName, compressLevel); - using (Stream stream = file.Open()) + await using (Stream stream = await file.OpenAsync()) { // Write 5GB of data for (var i = 0; i < 5; i++) @@ -85,22 +85,22 @@ public static void CheckZIP64VersionIsSet_ForSmallFilesAfterBigFiles(bool isComp FillWithHardToCompressData(largeBuffer); } - stream.Write(largeBuffer); + await stream.WriteAsync(largeBuffer); } } file = archive.CreateEntry(SmallFileName, compressLevel); - using (Stream stream = file.Open()) + await using (Stream stream = await file.OpenAsync()) { - stream.Write(smallBuffer); + await stream.WriteAsync(smallBuffer); } } fs.Position = 0; // Validate - using (ZipArchive archive = new(fs, ZipArchiveMode.Read)) + await using (ZipArchive archive = await ZipArchive.CreateAsync(fs, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null)) { using var reader = new BinaryReader(fs); diff --git a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_ManualAndCompatibilityTests.cs b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_ManualAndCompatibilityTests.cs index 3d0bdeef04a5a3..1acc67ffa64ba1 100644 --- a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_ManualAndCompatibilityTests.cs +++ b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_ManualAndCompatibilityTests.cs @@ -1,6 +1,8 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Collections; +using System.Collections.Generic; using System.Threading.Tasks; using Xunit; @@ -10,32 +12,52 @@ public class zip_ManualAndCompatibilityTests : ZipFileTestBase { public static bool IsUsingNewPathNormalization => !PathFeatures.IsUsingLegacyPathNormalization(); + public static IEnumerable Get_CompatibilityTests_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "7zip.zip", "normal", true, true, async }; + yield return new object[] { "windows.zip", "normalWithoutEmptyDir", false, true, async }; + yield return new object[] { "dotnetzipstreaming.zip", "normal", false, false, async }; + yield return new object[] { "sharpziplib.zip", "normalWithoutEmptyDir", false, false, async }; + yield return new object[] { "xceedstreaming.zip", "normal", false, false, async }; + } + } + + [Theory] + [MemberData(nameof(Get_CompatibilityTests_Data))] + public static async Task CompatibilityTests(string zipFile, string zipFolder, bool requireExplicit, bool checkTimes, bool async) + { + MemoryStream ms = await StreamHelpers.CreateTempCopyStream(compat(zipFile)); + await IsZipSameAsDir(ms, zfolder(zipFolder), ZipArchiveMode.Update, requireExplicit, checkTimes, async); + } + [Theory] - [InlineData("7zip.zip", "normal", true, true)] - [InlineData("windows.zip", "normalWithoutEmptyDir", false, true)] - [InlineData("dotnetzipstreaming.zip", "normal", false, false)] - [InlineData("sharpziplib.zip", "normalWithoutEmptyDir", false, false)] - [InlineData("xceedstreaming.zip", "normal", false, false)] - public static async Task CompatibilityTests(string zipFile, string zipFolder, bool requireExplicit, bool checkTimes) + [MemberData(nameof(Get_Booleans_Data))] + public static async Task Deflate64Zip(bool async) { - IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream(compat(zipFile)), zfolder(zipFolder), ZipArchiveMode.Update, requireExplicit, checkTimes); + MemoryStream ms = await StreamHelpers.CreateTempCopyStream(compat("deflate64.zip")); + await IsZipSameAsDir(ms, zfolder("normal"), ZipArchiveMode.Update, requireExplicit: true, checkTimes: true, async); } - [Fact] - public static async Task Deflate64Zip() + public static IEnumerable Get_CompatibilityTestsMsFiles_Data() { - IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream(compat("deflate64.zip")), zfolder("normal"), ZipArchiveMode.Update, requireExplicit: true, checkTimes: true); + foreach (bool async in _bools) + { + yield return new object[] { "excel.xlsx", "excel", false, false, async }; + yield return new object[] { "powerpoint.pptx", "powerpoint", false, false, async }; + yield return new object[] { "word.docx", "word", false, false, async }; + yield return new object[] { "silverlight.xap", "silverlight", false, false, async }; + yield return new object[] { "packaging.package", "packaging", false, false, async }; + } } [Theory] - [InlineData("excel.xlsx", "excel", false, false)] - [InlineData("powerpoint.pptx", "powerpoint", false, false)] - [InlineData("word.docx", "word", false, false)] - [InlineData("silverlight.xap", "silverlight", false, false)] - [InlineData("packaging.package", "packaging", false, false)] - public static async Task CompatibilityTestsMsFiles(string withTrailing, string withoutTrailing, bool requireExplicit, bool checkTimes) + [MemberData(nameof(Get_CompatibilityTestsMsFiles_Data))] + public static async Task CompatibilityTestsMsFiles(string withTrailing, string withoutTrailing, bool requireExplicit, bool checkTimes, bool async) { - IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream(compat(withTrailing)), compat(withoutTrailing), ZipArchiveMode.Update, requireExplicit, checkTimes); + MemoryStream ms = await StreamHelpers.CreateTempCopyStream(compat(withTrailing)); + await IsZipSameAsDir(ms, compat(withoutTrailing), ZipArchiveMode.Update, requireExplicit, checkTimes, async); } /// @@ -63,6 +85,17 @@ public static async Task ZipWithInvalidFileNames_ParsedBasedOnSourceOS(string zi } } + public static IEnumerable Get_ZipBinaryCompat_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "net45_unicode.zip", "unicode", async }; + yield return new object[] { "net46_unicode.zip", "unicode", async }; + yield return new object[] { "net45_normal.zip", "normal", async }; + yield return new object[] { "net46_normal.zip", "normal", async }; + } + } + /// /// This test compares binary content of a zip produced by the current version with a zip produced by /// other frameworks. It does this by searching the two zips for the header signature and then @@ -88,11 +121,8 @@ public static async Task ZipWithInvalidFileNames_ParsedBasedOnSourceOS(string zi /// extra field(variable size) /// [Theory] - [InlineData("net45_unicode.zip", "unicode")] - [InlineData("net46_unicode.zip", "unicode")] - [InlineData("net45_normal.zip", "normal")] - [InlineData("net46_normal.zip", "normal")] - public static async Task ZipBinaryCompat_LocalFileHeaders(string zipFile, string zipFolder) + [MemberData(nameof(Get_ZipBinaryCompat_Data))] + public static async Task ZipBinaryCompat_LocalFileHeaders(string zipFile, string zipFolder, bool async) { using (MemoryStream actualArchiveStream = new MemoryStream()) using (MemoryStream expectedArchiveStream = await StreamHelpers.CreateTempCopyStream(compat(zipFile))) @@ -100,7 +130,7 @@ public static async Task ZipBinaryCompat_LocalFileHeaders(string zipFile, string byte[] localFileHeaderSignature = new byte[] { 0x50, 0x4b, 0x03, 0x04 }; // Produce a ZipFile - await CreateFromDir(zfolder(zipFolder), actualArchiveStream, ZipArchiveMode.Create); + await CreateFromDir(zfolder(zipFolder), actualArchiveStream, async, ZipArchiveMode.Create); // Read the streams to byte arrays byte[] actualBytes = actualArchiveStream.ToArray(); @@ -153,11 +183,8 @@ public static async Task ZipBinaryCompat_LocalFileHeaders(string zipFile, string /// file comment (variable size) /// [Theory] - [InlineData("net45_unicode.zip", "unicode")] - [InlineData("net46_unicode.zip", "unicode")] - [InlineData("net45_normal.zip", "normal")] - [InlineData("net46_normal.zip", "normal")] - public static async Task ZipBinaryCompat_CentralDirectoryHeaders(string zipFile, string zipFolder) + [MemberData(nameof(Get_ZipBinaryCompat_Data))] + public static async Task ZipBinaryCompat_CentralDirectoryHeaders(string zipFile, string zipFolder, bool async) { using (MemoryStream actualArchiveStream = new MemoryStream()) using (MemoryStream expectedArchiveStream = await StreamHelpers.CreateTempCopyStream(compat(zipFile))) @@ -165,7 +192,7 @@ public static async Task ZipBinaryCompat_CentralDirectoryHeaders(string zipFile, byte[] signature = new byte[] { 0x50, 0x4b, 0x03, 0x04 }; // Produce a ZipFile - await CreateFromDir(zfolder(zipFolder), actualArchiveStream, ZipArchiveMode.Create); + await CreateFromDir(zfolder(zipFolder), actualArchiveStream, async, ZipArchiveMode.Create); // Read the streams to byte arrays byte[] actualBytes = actualArchiveStream.ToArray(); diff --git a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_ReadTests.cs b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_ReadTests.cs index 5cbc859df0f5c2..d680df466586bd 100644 --- a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_ReadTests.cs +++ b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_ReadTests.cs @@ -12,74 +12,149 @@ namespace System.IO.Compression.Tests { public class zip_ReadTests : ZipFileTestBase { - [Theory] - [InlineData("normal.zip", "normal")] - [InlineData("fake64.zip", "small")] - [InlineData("empty.zip", "empty")] - [InlineData("appended.zip", "small")] - [InlineData("prepended.zip", "small")] - [InlineData("emptydir.zip", "emptydir")] - [InlineData("small.zip", "small")] - [InlineData("unicode.zip", "unicode")] - public static async Task ReadNormal(string zipFile, string zipFolder) + public static IEnumerable Get_ReadNormal_Data() { - await IsZipSameAsDirAsync(zfile(zipFile), zfolder(zipFolder), ZipArchiveMode.Read); + foreach (bool async in _bools) + { + yield return new object[] { "normal.zip", "normal", async }; + yield return new object[] { "fake64.zip", "small", async }; + yield return new object[] { "empty.zip", "empty", async }; + yield return new object[] { "appended.zip", "small", async }; + yield return new object[] { "prepended.zip", "small", async }; + yield return new object[] { "emptydir.zip", "emptydir", async }; + yield return new object[] { "small.zip", "small", async }; + yield return new object[] { "unicode.zip", "unicode", async }; + } } [Theory] - [InlineData("normal.zip", "normal")] - [InlineData("fake64.zip", "small")] - [InlineData("empty.zip", "empty")] - [InlineData("appended.zip", "small")] - [InlineData("prepended.zip", "small")] - [InlineData("emptydir.zip", "emptydir")] - [InlineData("small.zip", "small")] - [InlineData("unicode.zip", "unicode")] - public static async Task TestStreamingRead(string zipFile, string zipFolder) + [MemberData(nameof(Get_ReadNormal_Data))] + public static Task ReadNormal(string zipFile, string zipFolder, bool async) => IsZipSameAsDir(zfile(zipFile), zfolder(zipFolder), ZipArchiveMode.Read, async); + + [Theory] + [MemberData(nameof(Get_ReadNormal_Data))] + public static async Task TestStreamingRead(string zipFile, string zipFolder, bool async) { using (var stream = await StreamHelpers.CreateTempCopyStream(zfile(zipFile))) { Stream wrapped = new WrappedStream(stream, true, false, false, null); - IsZipSameAsDir(wrapped, zfolder(zipFolder), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + await IsZipSameAsDir(wrapped, zfolder(zipFolder), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async); Assert.False(wrapped.CanRead, "Wrapped stream should be closed at this point"); //check that it was closed } } + public static IEnumerable Get_TestPartialReads_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "normal.zip", "normal", async }; + yield return new object[] { "fake64.zip", "small", async }; + yield return new object[] { "empty.zip", "empty", async }; + yield return new object[] { "appended.zip", "small", async }; + yield return new object[] { "prepended.zip", "small", async }; + yield return new object[] { "emptydir.zip", "emptydir", async }; + yield return new object[] { "small.zip", "small", async }; + yield return new object[] { "unicode.zip", "unicode", async }; + } + } + [Theory] - [InlineData("normal.zip", "normal")] - [InlineData("fake64.zip", "small")] - [InlineData("empty.zip", "empty")] - [InlineData("appended.zip", "small")] - [InlineData("prepended.zip", "small")] - [InlineData("emptydir.zip", "emptydir")] - [InlineData("small.zip", "small")] - [InlineData("unicode.zip", "unicode")] - public static async Task TestPartialReads(string zipFile, string zipFolder) + [MemberData(nameof(Get_TestPartialReads_Data))] + public static async Task TestPartialReads(string zipFile, string zipFolder, bool async) { - using (var stream = await StreamHelpers.CreateTempCopyStream(zfile(zipFile))) + using (MemoryStream stream = await StreamHelpers.CreateTempCopyStream(zfile(zipFile))) { Stream clamped = new ClampedReadStream(stream, readSizeLimit: 1); - - IsZipSameAsDir(clamped, zfolder(zipFolder), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + await IsZipSameAsDir(clamped, zfolder(zipFolder), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async); } } [Fact] - public static async Task ReadStreamOps() + public static async Task ReadInterleavedAsync() { - using (ZipArchive archive = new ZipArchive(await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")), ZipArchiveMode.Read)) + ZipArchive archive = await ZipArchive.CreateAsync(await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")), ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null); + + ZipArchiveEntry e1 = archive.GetEntry("first.txt"); + ZipArchiveEntry e2 = archive.GetEntry("notempty/second.txt"); + + //read all of e1 and e2's contents + byte[] e1readnormal = new byte[e1.Length]; + byte[] e2readnormal = new byte[e2.Length]; + byte[] e1interleaved = new byte[e1.Length]; + byte[] e2interleaved = new byte[e2.Length]; + + await using (Stream e1s = await e1.OpenAsync()) + { + await ReadBytes(e1s, e1readnormal, e1.Length, async: true); + } + await using (Stream e2s = await e2.OpenAsync()) + { + await ReadBytes(e2s, e2readnormal, e2.Length, async: true); + } + + //now read interleaved, assume we are working with < 4gb files + const int bytesAtATime = 15; + + await using (Stream e1s = await e1.OpenAsync(), e2s = await e2.OpenAsync()) { - foreach (ZipArchiveEntry e in archive.Entries) + int e1pos = 0; + int e2pos = 0; + + while (e1pos < e1.Length || e2pos < e2.Length) { - using (Stream s = e.Open()) + if (e1pos < e1.Length) + { + int e1bytesRead = await e1s.ReadAsync(e1interleaved, e1pos, + bytesAtATime + e1pos > e1.Length ? (int)e1.Length - e1pos : bytesAtATime); + e1pos += e1bytesRead; + } + + if (e2pos < e2.Length) { - Assert.True(s.CanRead, "Can read to read archive"); - Assert.False(s.CanWrite, "Can't write to read archive"); - Assert.False(s.CanSeek, "Can't seek on archive"); - Assert.Equal(LengthOfUnseekableStream(s), e.Length); //"Length is not correct on unseekable stream" + int e2bytesRead = await e2s.ReadAsync(e2interleaved, e2pos, + bytesAtATime + e2pos > e2.Length ? (int)e2.Length - e2pos : bytesAtATime); + e2pos += e2bytesRead; } } } + + //now compare to original read + ArraysEqual(e1readnormal, e1interleaved, e1readnormal.Length); + ArraysEqual(e2readnormal, e2interleaved, e2readnormal.Length); + + //now read one entry interleaved + byte[] e1selfInterleaved1 = new byte[e1.Length]; + byte[] e1selfInterleaved2 = new byte[e2.Length]; + + + await using (Stream s1 = await e1.OpenAsync(), s2 = await e1.OpenAsync()) + { + int s1pos = 0; + int s2pos = 0; + + while (s1pos < e1.Length || s2pos < e1.Length) + { + if (s1pos < e1.Length) + { + int s1bytesRead = s1.Read(e1interleaved, s1pos, + bytesAtATime + s1pos > e1.Length ? (int)e1.Length - s1pos : bytesAtATime); + s1pos += s1bytesRead; + } + + if (s2pos < e1.Length) + { + int s2bytesRead = s2.Read(e2interleaved, s2pos, + bytesAtATime + s2pos > e1.Length ? (int)e1.Length - s2pos : bytesAtATime); + s2pos += s2bytesRead; + } + } + } + + //now compare to original read + ArraysEqual(e1readnormal, e1selfInterleaved1, e1readnormal.Length); + ArraysEqual(e1readnormal, e1selfInterleaved2, e1readnormal.Length); + + await archive.DisposeAsync(); } [Fact] @@ -98,11 +173,11 @@ public static async Task ReadInterleaved() using (Stream e1s = e1.Open()) { - ReadBytes(e1s, e1readnormal, e1.Length); + await ReadBytes(e1s, e1readnormal, e1.Length, async: false); } using (Stream e2s = e2.Open()) { - ReadBytes(e2s, e2readnormal, e2.Length); + await ReadBytes(e2s, e2readnormal, e2.Length, async: false); } //now read interleaved, assume we are working with < 4gb files @@ -168,10 +243,14 @@ public static async Task ReadInterleaved() ArraysEqual(e1readnormal, e1selfInterleaved2, e1readnormal.Length); } } - [Fact] - public static async Task ReadModeInvalidOpsTest() + + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ReadModeInvalidOpsTest(bool async) { - ZipArchive archive = new ZipArchive(await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")), ZipArchiveMode.Read); + await using MemoryStream ms = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); + + ZipArchive archive = await CreateZipArchive(async, ms, ZipArchiveMode.Read); ZipArchiveEntry e = archive.GetEntry("first.txt"); //should also do it on deflated stream @@ -185,39 +264,39 @@ public static async Task ReadModeInvalidOpsTest() Assert.Throws(() => e.LastWriteTime = new DateTimeOffset()); //"Should not be able to update time" //on stream - Stream s = e.Open(); + Stream s = await OpenEntryStream(async, e); Assert.Throws(() => s.Flush()); //"Should not be able to flush on read stream" Assert.Throws(() => s.WriteByte(25)); //"should not be able to write to read stream" Assert.Throws(() => s.Position = 4); //"should not be able to seek on read stream" Assert.Throws(() => s.Seek(0, SeekOrigin.Begin)); //"should not be able to seek on read stream" Assert.Throws(() => s.SetLength(0)); //"should not be able to resize read stream" - archive.Dispose(); + await DisposeZipArchive(async, archive); //after disposed Assert.Throws(() => { var x = archive.Entries; }); //"Should not be able to get entries on disposed archive" Assert.Throws(() => archive.CreateEntry("dirka")); //"should not be able to create on disposed archive" - Assert.Throws(() => e.Open()); //"should not be able to open on disposed archive" + await Assert.ThrowsAsync(() => OpenEntryStream(async, e)); //"should not be able to open on disposed archive" Assert.Throws(() => e.Delete()); //"should not be able to delete on disposed archive" Assert.Throws(() => { e.LastWriteTime = new DateTimeOffset(); }); //"Should not be able to update on disposed archive" Assert.Throws(() => s.ReadByte()); //"should not be able to read on disposed archive" - s.Dispose(); + await DisposeStream(async, s); } - [Fact] - public static void TestEmptyLastModifiedEntryValueNotThrowingInternalException() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task TestEmptyLastModifiedEntryValueNotThrowingInternalException(bool async) { var emptyDateIndicator = new DateTimeOffset(new DateTime(1980, 1, 1, 0, 0, 0)); var buffer = new byte[100];//empty archive we will make will have exact this size using var memoryStream = new MemoryStream(buffer); - using (var singleEntryArchive = new ZipArchive(memoryStream, ZipArchiveMode.Create, true)) - { - singleEntryArchive.CreateEntry("1"); - } + ZipArchive singleEntryArchive = await CreateZipArchive(async, memoryStream, ZipArchiveMode.Create, true); + singleEntryArchive.CreateEntry("1"); + await DisposeZipArchive(async, singleEntryArchive); //set LastWriteTime bits to 0 in this trivial archive const int lastWritePosition = 43; @@ -227,39 +306,48 @@ public static void TestEmptyLastModifiedEntryValueNotThrowingInternalException() buffer[lastWritePosition + 3] = 0; memoryStream.Seek(0, SeekOrigin.Begin); - using var archive = new ZipArchive(memoryStream, ZipArchiveMode.Read, true); + ZipArchive archive = await CreateZipArchive(async, memoryStream, ZipArchiveMode.Read, true); Assert.Equal(archive.Entries[0].LastWriteTime, emptyDateIndicator); + await DisposeZipArchive(async, archive); } [Theory] - [InlineData("normal.zip")] - [InlineData("small.zip")] - public static async Task EntriesNotEncryptedByDefault(string zipFile) + [InlineData("normal.zip", false)] + [InlineData("normal.zip", true)] + [InlineData("small.zip", false)] + [InlineData("small.zip", true)] + public static async Task EntriesNotEncryptedByDefault(string zipFile, bool async) { - using (ZipArchive archive = new ZipArchive(await StreamHelpers.CreateTempCopyStream(zfile(zipFile)), ZipArchiveMode.Read)) + ZipArchive archive = await CreateZipArchive(async, await StreamHelpers.CreateTempCopyStream(zfile(zipFile)), ZipArchiveMode.Read); + foreach (ZipArchiveEntry entry in archive.Entries) { - foreach (ZipArchiveEntry entry in archive.Entries) - { - Assert.False(entry.IsEncrypted); - } + Assert.False(entry.IsEncrypted); + } + await DisposeZipArchive(async, archive); + } + + public static IEnumerable Get_IdentifyEncryptedEntries_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "encrypted_entries_weak.zip", async }; + yield return new object[] { "encrypted_entries_aes256.zip", async }; + yield return new object[] { "encrypted_entries_mixed.zip", async }; } } [Theory] - [InlineData("encrypted_entries_weak.zip")] - [InlineData("encrypted_entries_aes256.zip")] - [InlineData("encrypted_entries_mixed.zip")] - public static async Task IdentifyEncryptedEntries(string zipFile) + [MemberData(nameof(Get_IdentifyEncryptedEntries_Data))] + public static async Task IdentifyEncryptedEntries(string zipFile, bool async) { var entriesEncrypted = new Dictionary(); - using (ZipArchive archive = new ZipArchive(await StreamHelpers.CreateTempCopyStream(zfile(zipFile)), ZipArchiveMode.Read)) + ZipArchive archive = await CreateZipArchive(async, await StreamHelpers.CreateTempCopyStream(zfile(zipFile)), ZipArchiveMode.Read); + foreach (ZipArchiveEntry entry in archive.Entries) { - foreach (ZipArchiveEntry entry in archive.Entries) - { - entriesEncrypted.Add(entry.Name, entry.IsEncrypted); - } + entriesEncrypted.Add(entry.Name, entry.IsEncrypted); } + await DisposeZipArchive(async, archive); var expectedEntries = new Dictionary() { @@ -272,10 +360,19 @@ public static async Task IdentifyEncryptedEntries(string zipFile) Assert.Equal(expectedEntries, entriesEncrypted); } + public static IEnumerable Get_EnsureDisposeIsCalledAsExpectedOnTheUnderlyingStream_Data() + { + foreach (bool async in _bools) + { + // leaveOpen, expectedDisposeCalls, async + yield return new object[] { true, 0, async }; + yield return new object[] { false, 1, async }; + } + } + [Theory] - [InlineData(true, 0)] - [InlineData(false, 1)] - public static async Task EnsureDisposeIsCalledAsExpectedOnTheUnderlyingStream(bool leaveOpen, int expectedDisposeCalls) + [MemberData(nameof(Get_EnsureDisposeIsCalledAsExpectedOnTheUnderlyingStream_Data))] + public static async Task EnsureDisposeIsCalledAsExpectedOnTheUnderlyingStream(bool leaveOpen, int expectedDisposeCalls, bool async) { var disposeCallCountingStream = new DisposeCallCountingStream(); using (var tempStream = await StreamHelpers.CreateTempCopyStream(zfile("small.zip"))) @@ -283,20 +380,20 @@ public static async Task EnsureDisposeIsCalledAsExpectedOnTheUnderlyingStream(bo tempStream.CopyTo(disposeCallCountingStream); } - using (ZipArchive archive = new ZipArchive(disposeCallCountingStream, ZipArchiveMode.Read, leaveOpen)) + ZipArchive archive = await CreateZipArchive(async, disposeCallCountingStream, ZipArchiveMode.Read, leaveOpen); + // Iterate through entries to ensure read of zip file + foreach (ZipArchiveEntry entry in archive.Entries) { - // Iterate through entries to ensure read of zip file - foreach (ZipArchiveEntry entry in archive.Entries) - { - Assert.False(entry.IsEncrypted); - } + Assert.False(entry.IsEncrypted); } + await DisposeZipArchive(async, archive); Assert.Equal(expectedDisposeCalls, disposeCallCountingStream.NumberOfDisposeCalls); } - [Fact] - public static void CanReadLargeCentralDirectoryHeader() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task CanReadLargeCentralDirectoryHeader(bool async) { // A 19-character filename will result in a 65-byte central directory header. 64 of these will make the central directory // read process stretch into two 4KB buffers. @@ -305,121 +402,121 @@ public static void CanReadLargeCentralDirectoryHeader() using (MemoryStream archiveStream = new MemoryStream()) { - using (ZipArchive creationArchive = new ZipArchive(archiveStream, ZipArchiveMode.Create, true)) + ZipArchive creationArchive = await CreateZipArchive(async, archiveStream, ZipArchiveMode.Create, true); + for (int i = 0; i < count; i++) { - for (int i = 0; i < count; i++) - { - creationArchive.CreateEntry(string.Format(entryNameFormat, i)); - } + creationArchive.CreateEntry(string.Format(entryNameFormat, i)); } + await DisposeZipArchive(async, creationArchive); archiveStream.Seek(0, SeekOrigin.Begin); - using (ZipArchive readArchive = new ZipArchive(archiveStream, ZipArchiveMode.Read)) + ZipArchive readArchive = await CreateZipArchive(async, archiveStream, ZipArchiveMode.Read); + Assert.Equal(count, readArchive.Entries.Count); + for (int i = 0; i < count; i++) { - Assert.Equal(count, readArchive.Entries.Count); - - for (int i = 0; i < count; i++) - { - Assert.Equal(string.Format(entryNameFormat, i), readArchive.Entries[i].FullName); - Assert.Equal(0, readArchive.Entries[i].CompressedLength); - Assert.Equal(0, readArchive.Entries[i].Length); - } + Assert.Equal(string.Format(entryNameFormat, i), readArchive.Entries[i].FullName); + Assert.Equal(0, readArchive.Entries[i].CompressedLength); + Assert.Equal(0, readArchive.Entries[i].Length); } + await DisposeZipArchive(async, readArchive); } } - [Fact] - public static void ArchivesInOffsetOrder_UpdateMode() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ArchivesInOffsetOrder_UpdateMode(bool async) { // When the ZipArchive which has been opened in Update mode is disposed of, its entries will be rewritten in order of their offset within the file. // This requires the entries to be sorted when the file is opened. byte[] sampleEntryContents = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]; - byte[] sampleZipFile = ReverseCentralDirectoryEntries(CreateZipFile(50, sampleEntryContents)); + byte[] sampleZipFile = ReverseCentralDirectoryEntries(await CreateZipFile(50, sampleEntryContents, async)); - using (MemoryStream ms = new MemoryStream()) - { - ms.Write(sampleZipFile); - ms.Seek(0, SeekOrigin.Begin); + using MemoryStream ms = new MemoryStream(); - ZipArchive source = new ZipArchive(ms, ZipArchiveMode.Update, leaveOpen: true); - long previousOffset = long.MinValue; - System.Reflection.FieldInfo offsetOfLocalHeader = typeof(ZipArchiveEntry).GetField("_offsetOfLocalHeader", System.Reflection.BindingFlags.NonPublic | Reflection.BindingFlags.Instance); + ms.Write(sampleZipFile); + ms.Seek(0, SeekOrigin.Begin); - for (int i = 0; i < source.Entries.Count; i++) - { - ZipArchiveEntry entry = source.Entries[i]; - long offset = (long)offsetOfLocalHeader.GetValue(entry); + ZipArchive source = await CreateZipArchive(async, ms, ZipArchiveMode.Update, leaveOpen: true); - Assert.True(offset > previousOffset); - previousOffset = offset; - } + long previousOffset = long.MinValue; + FieldInfo offsetOfLocalHeader = typeof(ZipArchiveEntry).GetField("_offsetOfLocalHeader", BindingFlags.NonPublic | BindingFlags.Instance); - source.Dispose(); + for (int i = 0; i < source.Entries.Count; i++) + { + ZipArchiveEntry entry = source.Entries[i]; + long offset = (long)offsetOfLocalHeader.GetValue(entry); + + Assert.True(offset > previousOffset); + previousOffset = offset; } + + await DisposeZipArchive(async, source); } - [Fact] - public static void ArchivesInCentralDirectoryOrder_ReadMode() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ArchivesInCentralDirectoryOrder_ReadMode(bool async) { // When the ZipArchive is opened in Read mode, no sort is necessary. The entries will be added to the ZipArchive in the order // that they appear in the central directory (in this case, sorted by offset descending.) byte[] sampleEntryContents = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]; - byte[] sampleZipFile = ReverseCentralDirectoryEntries(CreateZipFile(50, sampleEntryContents)); + byte[] sampleZipFile = ReverseCentralDirectoryEntries(await CreateZipFile(50, sampleEntryContents, async)); - using (MemoryStream ms = new MemoryStream()) - { - ms.Write(sampleZipFile); - ms.Seek(0, SeekOrigin.Begin); + using MemoryStream ms = new MemoryStream(); - ZipArchive source = new ZipArchive(ms, ZipArchiveMode.Read, leaveOpen: true); - long previousOffset = long.MaxValue; - System.Reflection.FieldInfo offsetOfLocalHeader = typeof(ZipArchiveEntry).GetField("_offsetOfLocalHeader", System.Reflection.BindingFlags.NonPublic | Reflection.BindingFlags.Instance); + ms.Write(sampleZipFile); + ms.Seek(0, SeekOrigin.Begin); - for (int i = 0; i < source.Entries.Count; i++) - { - ZipArchiveEntry entry = source.Entries[i]; - long offset = (long)offsetOfLocalHeader.GetValue(entry); + ZipArchive source = await CreateZipArchive(async, ms, ZipArchiveMode.Read, true); - Assert.True(offset < previousOffset); - previousOffset = offset; - } + long previousOffset = long.MaxValue; + FieldInfo offsetOfLocalHeader = typeof(ZipArchiveEntry).GetField("_offsetOfLocalHeader", BindingFlags.NonPublic | BindingFlags.Instance); + + for (int i = 0; i < source.Entries.Count; i++) + { + ZipArchiveEntry entry = source.Entries[i]; + long offset = (long)offsetOfLocalHeader.GetValue(entry); - source.Dispose(); + Assert.True(offset < previousOffset); + previousOffset = offset; } + + await DisposeZipArchive(async, source); } - [Fact] - public static void EntriesMalformed_InvalidDataException() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task EntriesMalformed_InvalidDataException(bool async) { string entryName = "entry.txt"; var stream = new MemoryStream(); - using (var archiveWrite = new ZipArchive(stream, ZipArchiveMode.Create, leaveOpen: true)) - { - archiveWrite.CreateEntry(entryName); - } + ZipArchive archiveWrite = await CreateZipArchive(async, stream, ZipArchiveMode.Create, true); + archiveWrite.CreateEntry(entryName); + await DisposeZipArchive(async, archiveWrite); stream.Position = 0; // Malform the archive - using (var archiveRead = new ZipArchive(stream, ZipArchiveMode.Read, leaveOpen: true)) - { - var unused = archiveRead.Entries; + ZipArchive archiveRead = await CreateZipArchive(async, stream, ZipArchiveMode.Read, true); - // Read the last 22 bytes of stream to get the EOCD. - byte[] buffer = new byte[22]; - stream.Seek(-22, SeekOrigin.End); - stream.ReadExactly(buffer); + var unused = archiveRead.Entries; - var startCentralDir = (long)typeof(ZipArchive).GetField("_centralDirectoryStart", BindingFlags.Instance | BindingFlags.NonPublic).GetValue(archiveRead); - // Truncate to exactly 46 bytes after start. - stream.SetLength(startCentralDir + 46); + // Read the last 22 bytes of stream to get the EOCD. + byte[] buffer = new byte[22]; + stream.Seek(-22, SeekOrigin.End); + stream.ReadExactly(buffer); - // Write the EOCD back. - stream.Seek(-22, SeekOrigin.End); - stream.Write(buffer); - } + var startCentralDir = (long)typeof(ZipArchive).GetField("_centralDirectoryStart", BindingFlags.Instance | BindingFlags.NonPublic).GetValue(archiveRead); + // Truncate to exactly 46 bytes after start. + stream.SetLength(startCentralDir + 46); + + // Write the EOCD back. + stream.Seek(-22, SeekOrigin.End); + stream.Write(buffer); + + await DisposeZipArchive(async, archiveRead); stream.Position = 0; @@ -428,6 +525,28 @@ public static void EntriesMalformed_InvalidDataException() Assert.Throws(() => _ = archive.Entries); } + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task ReadStreamOps(bool async) + { + MemoryStream ms = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); + ZipArchive archive = await CreateZipArchive(async, ms, ZipArchiveMode.Read); + + foreach (ZipArchiveEntry e in archive.Entries) + { + Stream s = await OpenEntryStream(async, e); + + Assert.True(s.CanRead, "Can read to read archive"); + Assert.False(s.CanWrite, "Can't write to read archive"); + Assert.False(s.CanSeek, "Can't seek on archive"); + Assert.Equal(await LengthOfUnseekableStream(s), e.Length); //"Length is not correct on unseekable stream" + + await DisposeStream(async, s); + } + + await DisposeZipArchive(async, archive); + } + private static byte[] ReverseCentralDirectoryEntries(byte[] zipFile) { byte[] destinationBuffer = new byte[zipFile.Length]; diff --git a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_UpdateTests.Comments.cs b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_UpdateTests.Comments.cs index 7195dd504e0755..9befa3f9a0cd60 100644 --- a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_UpdateTests.Comments.cs +++ b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_UpdateTests.Comments.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Linq; using System.Text; +using System.Threading.Tasks; using Xunit; namespace System.IO.Compression.Tests @@ -12,84 +13,80 @@ public partial class zip_UpdateTests : ZipFileTestBase { [Theory] [MemberData(nameof(Utf8Comment_Data))] - public static void Update_Comment_AsciiEntryName_NullEncoding(string originalComment, string expectedComment) => + public static Task Update_Comment_AsciiEntryName_NullEncoding(string originalComment, string expectedComment, bool async) => Update_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedComment, null, - ALettersUShortMaxValueMinusOneAndCopyRightChar, ALettersUShortMaxValueMinusOne); + ALettersUShortMaxValueMinusOneAndCopyRightChar, ALettersUShortMaxValueMinusOne, async); [Theory] [MemberData(nameof(Utf8Comment_Data))] - public static void Update_Comment_AsciiEntryName_Utf8Encoding(string originalComment, string expectedComment) => + public static Task Update_Comment_AsciiEntryName_Utf8Encoding(string originalComment, string expectedComment, bool async) => Update_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedComment, Encoding.UTF8, - ALettersUShortMaxValueMinusOneAndCopyRightChar, ALettersUShortMaxValueMinusOne); + ALettersUShortMaxValueMinusOneAndCopyRightChar, ALettersUShortMaxValueMinusOne, async); [Theory] [MemberData(nameof(Latin1Comment_Data))] - public static void Update_Comment_AsciiEntryName_Latin1Encoding(string originalComment, string expectedComment) => + public static Task Update_Comment_AsciiEntryName_Latin1Encoding(string originalComment, string expectedComment, bool async) => Update_Comment_EntryName_Encoding_Internal(AsciiFileName, originalComment, expectedComment, Encoding.Latin1, - ALettersUShortMaxValueMinusOneAndTwoCopyRightChars, ALettersUShortMaxValueMinusOneAndCopyRightChar); + ALettersUShortMaxValueMinusOneAndTwoCopyRightChars, ALettersUShortMaxValueMinusOneAndCopyRightChar, async); [Theory] [MemberData(nameof(Utf8Comment_Data))] - public static void Update_Comment_Utf8EntryName_NullEncoding(string originalComment, string expectedComment) => + public static Task Update_Comment_Utf8EntryName_NullEncoding(string originalComment, string expectedComment, bool async) => Update_Comment_EntryName_Encoding_Internal(Utf8FileName, originalComment, expectedComment, null, - ALettersUShortMaxValueMinusOneAndCopyRightChar, ALettersUShortMaxValueMinusOne); + ALettersUShortMaxValueMinusOneAndCopyRightChar, ALettersUShortMaxValueMinusOne, async); [Theory] [MemberData(nameof(Utf8Comment_Data))] - public static void Update_Comment_Utf8EntryName_Utf8Encoding(string originalComment, string expectedComment) => + public static Task Update_Comment_Utf8EntryName_Utf8Encoding(string originalComment, string expectedComment, bool async) => Update_Comment_EntryName_Encoding_Internal(Utf8FileName, originalComment, expectedComment, Encoding.UTF8, - ALettersUShortMaxValueMinusOneAndCopyRightChar, ALettersUShortMaxValueMinusOne); + ALettersUShortMaxValueMinusOneAndCopyRightChar, ALettersUShortMaxValueMinusOne, async); [Theory] [MemberData(nameof(Latin1Comment_Data))] - public static void Update_Comment_Utf8EntryName_Latin1Encoding(string originalComment, string expectedComment) => + public static Task Update_Comment_Utf8EntryName_Latin1Encoding(string originalComment, string expectedComment, bool async) => // Emoji is not supported/detected in latin1 Update_Comment_EntryName_Encoding_Internal(Utf8AndLatin1FileName, originalComment, expectedComment, Encoding.Latin1, - ALettersUShortMaxValueMinusOneAndTwoCopyRightChars, ALettersUShortMaxValueMinusOneAndCopyRightChar); + ALettersUShortMaxValueMinusOneAndTwoCopyRightChars, ALettersUShortMaxValueMinusOneAndCopyRightChar, async); - private static void Update_Comment_EntryName_Encoding_Internal(string entryName, + private static async Task Update_Comment_EntryName_Encoding_Internal(string entryName, string originalCreateComment, string expectedCreateComment, Encoding encoding, - string originalUpdateComment, string expectedUpdateComment) + string originalUpdateComment, string expectedUpdateComment, bool async) { using var ms = new MemoryStream(); - using (var zip = new ZipArchive(ms, ZipArchiveMode.Create, leaveOpen: true, encoding)) - { - ZipArchiveEntry entry = zip.CreateEntry(entryName, CompressionLevel.NoCompression); - entry.Comment = originalCreateComment; - Assert.Equal(expectedCreateComment, entry.Comment); - } + ZipArchive zip = await CreateZipArchive(async, ms, ZipArchiveMode.Create, leaveOpen: true, encoding); + ZipArchiveEntry entry1 = zip.CreateEntry(entryName, CompressionLevel.NoCompression); + entry1.Comment = originalCreateComment; + Assert.Equal(expectedCreateComment, entry1.Comment); + await DisposeZipArchive(async, zip); - using (var zip = new ZipArchive(ms, ZipArchiveMode.Read, leaveOpen: true, encoding)) + zip = await CreateZipArchive(async, ms, ZipArchiveMode.Read, leaveOpen: true, encoding); + foreach (ZipArchiveEntry entry2 in zip.Entries) { - foreach (ZipArchiveEntry entry in zip.Entries) - { - Assert.Equal(expectedCreateComment, entry.Comment); - } + Assert.Equal(expectedCreateComment, entry2.Comment); } + await DisposeZipArchive(async, zip); - using (var zip = new ZipArchive(ms, ZipArchiveMode.Update, leaveOpen: true, encoding)) + zip = await CreateZipArchive(async, ms, ZipArchiveMode.Update, leaveOpen: true, encoding); + foreach (ZipArchiveEntry entry3 in zip.Entries) { - foreach (ZipArchiveEntry entry in zip.Entries) - { - entry.Comment = originalUpdateComment; - Assert.Equal(expectedUpdateComment, entry.Comment); - } + entry3.Comment = originalUpdateComment; + Assert.Equal(expectedUpdateComment, entry3.Comment); } + await DisposeZipArchive(async, zip); - using (var zip = new ZipArchive(ms, ZipArchiveMode.Read, leaveOpen: false, encoding)) + zip = await CreateZipArchive(async, ms, ZipArchiveMode.Read, leaveOpen: false, encoding); + foreach (ZipArchiveEntry entry4 in zip.Entries) { - foreach (ZipArchiveEntry entry in zip.Entries) - { - Assert.Equal(expectedUpdateComment, entry.Comment); - } + Assert.Equal(expectedUpdateComment, entry4.Comment); } + await DisposeZipArchive(async, zip); } } -} \ No newline at end of file +} diff --git a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_UpdateTests.cs b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_UpdateTests.cs index 6b4dcadf01a280..f36b8653ecc334 100644 --- a/src/libraries/System.IO.Compression/tests/ZipArchive/zip_UpdateTests.cs +++ b/src/libraries/System.IO.Compression/tests/ZipArchive/zip_UpdateTests.cs @@ -11,311 +11,404 @@ namespace System.IO.Compression.Tests { public partial class zip_UpdateTests : ZipFileTestBase { + public static IEnumerable Get_UpdateReadNormal_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "normal.zip", "normal", async }; + yield return new object[] { "fake64.zip", "small", async }; + yield return new object[] { "empty.zip", "empty", async }; + yield return new object[] { "appended.zip", "small", async }; + yield return new object[] { "prepended.zip", "small", async }; + yield return new object[] { "emptydir.zip", "emptydir", async }; + yield return new object[] { "small.zip", "small", async }; + yield return new object[] { "unicode.zip", "unicode", async }; + } + } + [Theory] - [InlineData("normal.zip", "normal")] - [InlineData("fake64.zip", "small")] - [InlineData("empty.zip", "empty")] - [InlineData("appended.zip", "small")] - [InlineData("prepended.zip", "small")] - [InlineData("emptydir.zip", "emptydir")] - [InlineData("small.zip", "small")] - [InlineData("unicode.zip", "unicode")] - public static async Task UpdateReadNormal(string zipFile, string zipFolder) + [MemberData(nameof(Get_UpdateReadNormal_Data))] + public static async Task UpdateReadNormal(string zipFile, string zipFolder, bool async) { - IsZipSameAsDir(await StreamHelpers.CreateTempCopyStream(zfile(zipFile)), zfolder(zipFolder), ZipArchiveMode.Update, requireExplicit: true, checkTimes: true); + MemoryStream ms = await StreamHelpers.CreateTempCopyStream(zfile(zipFile)); + await IsZipSameAsDir(ms, zfolder(zipFolder), ZipArchiveMode.Update, requireExplicit: true, checkTimes: true, async); } - [Fact] - public static async Task UpdateReadTwice() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task UpdateReadTwice(bool async) { - using (ZipArchive archive = new ZipArchive(await StreamHelpers.CreateTempCopyStream(zfile("small.zip")), ZipArchiveMode.Update)) + MemoryStream ms = await StreamHelpers.CreateTempCopyStream(zfile("small.zip")); + + ZipArchive archive = await CreateZipArchive(async, ms, ZipArchiveMode.Update); + + ZipArchiveEntry entry = archive.Entries[0]; + string contents1, contents2; + + Stream es = await OpenEntryStream(async, entry); + using (StreamReader s = new StreamReader(es)) { - ZipArchiveEntry entry = archive.Entries[0]; - string contents1, contents2; - using (StreamReader s = new StreamReader(entry.Open())) - { - contents1 = s.ReadToEnd(); - } - using (StreamReader s = new StreamReader(entry.Open())) - { - contents2 = s.ReadToEnd(); - } - Assert.Equal(contents1, contents2); + contents1 = s.ReadToEnd(); + } + + es = await OpenEntryStream(async, entry); + using (StreamReader s = new StreamReader(es)) + { + contents2 = s.ReadToEnd(); + } + + Assert.Equal(contents1, contents2); + + await DisposeZipArchive(async, archive); + } + + public static IEnumerable Get_UpdateCreate_Data() + { + foreach (bool async in _bools) + { + yield return new object[] { "normal", async }; + yield return new object[] { "empty", async }; + yield return new object[] { "unicode", async }; } } [Theory] - [InlineData("normal")] - [InlineData("empty")] - [InlineData("unicode")] - public static async Task UpdateCreate(string zipFolder) + [MemberData(nameof(Get_UpdateCreate_Data))] + public static async Task UpdateCreate(string zipFolder, bool async) { var zs = new LocalMemoryStream(); - await CreateFromDir(zfolder(zipFolder), zs, ZipArchiveMode.Update); - IsZipSameAsDir(zs.Clone(), zfolder(zipFolder), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + await CreateFromDir(zfolder(zipFolder), zs, async, ZipArchiveMode.Update); + await IsZipSameAsDir(zs.Clone(), zfolder(zipFolder), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async); } [Theory] - [InlineData(ZipArchiveMode.Create)] - [InlineData(ZipArchiveMode.Update)] - public static void EmptyEntryTest(ZipArchiveMode mode) + [InlineData(ZipArchiveMode.Create, false)] + [InlineData(ZipArchiveMode.Update, false)] + [InlineData(ZipArchiveMode.Create, true)] + [InlineData(ZipArchiveMode.Update, true)] + public static async Task EmptyEntryTest(ZipArchiveMode mode, bool async) { string data1 = "test data written to file."; string data2 = "more test data written to file."; DateTimeOffset lastWrite = new DateTimeOffset(1992, 4, 5, 12, 00, 30, new TimeSpan(-5, 0, 0)); var baseline = new LocalMemoryStream(); - using (ZipArchive archive = new ZipArchive(baseline, mode)) - { - AddEntry(archive, "data1.txt", data1, lastWrite); + ZipArchive archive = await CreateZipArchive(async, baseline, mode); - ZipArchiveEntry e = archive.CreateEntry("empty.txt"); - e.LastWriteTime = lastWrite; - using (Stream s = e.Open()) { } + await AddEntry(archive, "data1.txt", data1, lastWrite, async); - AddEntry(archive, "data2.txt", data2, lastWrite); - } + ZipArchiveEntry e = archive.CreateEntry("empty.txt"); + e.LastWriteTime = lastWrite; + + Stream s = await OpenEntryStream(async, e); + await DisposeStream(async, s); + + await AddEntry(archive, "data2.txt", data2, lastWrite, async); + + await DisposeZipArchive(async, archive); var test = new LocalMemoryStream(); - using (ZipArchive archive = new ZipArchive(test, mode)) - { - AddEntry(archive, "data1.txt", data1, lastWrite); + archive = await CreateZipArchive(async, test, mode); - ZipArchiveEntry e = archive.CreateEntry("empty.txt"); - e.LastWriteTime = lastWrite; + await AddEntry(archive, "data1.txt", data1, lastWrite, async); + + e = archive.CreateEntry("empty.txt"); + e.LastWriteTime = lastWrite; + + await AddEntry(archive, "data2.txt", data2, lastWrite, async); + + await DisposeZipArchive(async, archive); - AddEntry(archive, "data2.txt", data2, lastWrite); - } //compare Assert.True(ArraysEqual(baseline.ToArray(), test.ToArray()), "Arrays didn't match"); //second test, this time empty file at end baseline = baseline.Clone(); - using (ZipArchive archive = new ZipArchive(baseline, mode)) - { - AddEntry(archive, "data1.txt", data1, lastWrite); + archive = await CreateZipArchive(async, baseline, mode); - ZipArchiveEntry e = archive.CreateEntry("empty.txt"); - e.LastWriteTime = lastWrite; - using (Stream s = e.Open()) { } - } + await AddEntry(archive, "data1.txt", data1, lastWrite, async); + + e = archive.CreateEntry("empty.txt"); + e.LastWriteTime = lastWrite; + + s = await OpenEntryStream(async, e); + await DisposeStream(async, s); + + await DisposeZipArchive(async, archive); test = test.Clone(); - using (ZipArchive archive = new ZipArchive(test, mode)) - { - AddEntry(archive, "data1.txt", data1, lastWrite); + archive = await CreateZipArchive(async, test, mode); + + await AddEntry(archive, "data1.txt", data1, lastWrite, async); + + e = archive.CreateEntry("empty.txt"); + e.LastWriteTime = lastWrite; + + await DisposeZipArchive(async, archive); - ZipArchiveEntry e = archive.CreateEntry("empty.txt"); - e.LastWriteTime = lastWrite; - } //compare Assert.True(ArraysEqual(baseline.ToArray(), test.ToArray()), "Arrays didn't match after update"); } - [Fact] - public static async Task DeleteAndMoveEntries() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task DeleteAndMoveEntries(bool async) { //delete and move - var testArchive = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); + MemoryStream testArchive = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); + + ZipArchive archive = await CreateZipArchive(async, testArchive, ZipArchiveMode.Update, leaveOpen: true); + + ZipArchiveEntry toBeDeleted = archive.GetEntry("binary.wmv"); + toBeDeleted.Delete(); + toBeDeleted.Delete(); //delete twice should be okay + ZipArchiveEntry moved = archive.CreateEntry("notempty/secondnewname.txt"); + ZipArchiveEntry orig = archive.GetEntry("notempty/second.txt"); - using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) + if (async) + { + await using (Stream origMoved = await orig.OpenAsync(), movedStream = await moved.OpenAsync()) + { + await origMoved.CopyToAsync(movedStream); + } + } + else { - ZipArchiveEntry toBeDeleted = archive.GetEntry("binary.wmv"); - toBeDeleted.Delete(); - toBeDeleted.Delete(); //delete twice should be okay - ZipArchiveEntry moved = archive.CreateEntry("notempty/secondnewname.txt"); - ZipArchiveEntry orig = archive.GetEntry("notempty/second.txt"); using (Stream origMoved = orig.Open(), movedStream = moved.Open()) { origMoved.CopyTo(movedStream); } - moved.LastWriteTime = orig.LastWriteTime; - orig.Delete(); } - IsZipSameAsDir(testArchive, zmodified("deleteMove"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + moved.LastWriteTime = orig.LastWriteTime; + orig.Delete(); + + await DisposeZipArchive(async, archive); + + await IsZipSameAsDir(testArchive, zmodified("deleteMove"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async); } + [Theory] - [InlineData(false)] - [InlineData(true)] - public static async Task AppendToEntry(bool writeWithSpans) + [InlineData(false, false)] + [InlineData(false, true)] + [InlineData(true, false)] + [InlineData(true, true)] + public static async Task AppendToEntry(bool writeWithSpans, bool async) { //append Stream testArchive = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); - using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) + ZipArchive archive = await CreateZipArchive(async, testArchive, ZipArchiveMode.Update, true); + + ZipArchiveEntry e = archive.GetEntry("first.txt"); + + Stream s = await OpenEntryStream(async, e); + + s.Seek(0, SeekOrigin.End); + + byte[] data = "\r\n\r\nThe answer my friend, is blowin' in the wind."u8.ToArray(); + if (writeWithSpans) { - ZipArchiveEntry e = archive.GetEntry("first.txt"); - using (Stream s = e.Open()) + if (async) { - s.Seek(0, SeekOrigin.End); - - byte[] data = "\r\n\r\nThe answer my friend, is blowin' in the wind."u8.ToArray(); - if (writeWithSpans) - { - s.Write(new ReadOnlySpan(data)); - } - else - { - s.Write(data, 0, data.Length); - } + await s.WriteAsync(data); + } + else + { + s.Write(new ReadOnlySpan(data)); + } + } + else + { + if (async) + { + await s.WriteAsync(data); + } + else + { + s.Write(data, 0, data.Length); } - - var file = FileData.GetFile(zmodified(Path.Combine("append", "first.txt"))); - e.LastWriteTime = file.LastModifiedDate; } - IsZipSameAsDir(testArchive, zmodified("append"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + await DisposeStream(async, s); + + var file = FileData.GetFile(zmodified(Path.Combine("append", "first.txt"))); + e.LastWriteTime = file.LastModifiedDate; + + await DisposeZipArchive(async, archive); + + await IsZipSameAsDir(testArchive, zmodified("append"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async); } - [Fact] - public static async Task OverwriteEntry() + + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task OverwriteEntry(bool async) { //Overwrite file Stream testArchive = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); - using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) - { - string fileName = zmodified(Path.Combine("overwrite", "first.txt")); - ZipArchiveEntry e = archive.GetEntry("first.txt"); + ZipArchive archive = await CreateZipArchive(async, testArchive, ZipArchiveMode.Update, true); - var file = FileData.GetFile(fileName); - e.LastWriteTime = file.LastModifiedDate; + string fileName = zmodified(Path.Combine("overwrite", "first.txt")); - using (var stream = await StreamHelpers.CreateTempCopyStream(fileName)) - { - using (Stream es = e.Open()) - { - es.SetLength(0); - stream.CopyTo(es); - } - } - } + string entryName = "first.txt"; + ZipArchiveEntry e = archive.GetEntry(entryName); + await UpdateEntry(e, fileName, entryName, async); - IsZipSameAsDir(testArchive, zmodified("overwrite"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + await DisposeZipArchive(async, archive); + + await IsZipSameAsDir(testArchive, zmodified("overwrite"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async); } - [Fact] - public static async Task AddFileToArchive() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task AddFileToArchive(bool async) { //add file var testArchive = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); - using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) - { - await updateArchive(archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); - } + ZipArchive archive = await CreateZipArchive(async, testArchive, ZipArchiveMode.Update, true); + + await CreateAndUpdateEntry(archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt", async); + + await DisposeZipArchive(async, archive); - IsZipSameAsDir(testArchive, zmodified("addFile"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + await IsZipSameAsDir(testArchive, zmodified("addFile"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async); } - [Fact] - public static async Task AddFileToArchive_AfterReading() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task AddFileToArchive_AfterReading(bool async) { //add file and read entries before Stream testArchive = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); - using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) - { - var x = archive.Entries; + ZipArchive archive = await CreateZipArchive(async, testArchive, ZipArchiveMode.Update, true); - await updateArchive(archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); - } + var x = archive.Entries; + + await CreateAndUpdateEntry(archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt", async); + + await DisposeZipArchive(async, archive); - IsZipSameAsDir(testArchive, zmodified("addFile"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + await IsZipSameAsDir(testArchive, zmodified("addFile"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async); } - [Fact] - public static async Task AddFileToArchive_ThenReadEntries() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task AddFileToArchive_ThenReadEntries(bool async) { //add file and read entries after Stream testArchive = await StreamHelpers.CreateTempCopyStream(zfile("normal.zip")); - using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) - { - await updateArchive(archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt"); + ZipArchive archive = await CreateZipArchive(async, testArchive, ZipArchiveMode.Update, true); - var x = archive.Entries; - } + await CreateAndUpdateEntry(archive, zmodified(Path.Combine("addFile", "added.txt")), "added.txt", async); + + var x = archive.Entries; - IsZipSameAsDir(testArchive, zmodified("addFile"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true); + await DisposeZipArchive(async, archive); + + await IsZipSameAsDir(testArchive, zmodified("addFile"), ZipArchiveMode.Read, requireExplicit: true, checkTimes: true, async); } - private static async Task updateArchive(ZipArchive archive, string installFile, string entryName) + private static Task CreateAndUpdateEntry(ZipArchive archive, string installFile, string entryName, bool async) { ZipArchiveEntry e = archive.CreateEntry(entryName); + return UpdateEntry(e, installFile, entryName, async); + } - var file = FileData.GetFile(installFile); + private static async Task UpdateEntry(ZipArchiveEntry e, string installFile, string entryName, bool async) + { + FileData file = FileData.GetFile(installFile); e.LastWriteTime = file.LastModifiedDate; Assert.Equal(e.LastWriteTime, file.LastModifiedDate); using (var stream = await StreamHelpers.CreateTempCopyStream(installFile)) { - using (Stream es = e.Open()) + if (async) { + await using Stream es = await e.OpenAsync(); + es.SetLength(0); + await stream.CopyToAsync(es); + } + else + { + using Stream es = e.Open(); es.SetLength(0); stream.CopyTo(es); } } } - [Fact] - public static async Task UpdateModeInvalidOperations() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public static async Task UpdateModeInvalidOperations(bool async) { - using (LocalMemoryStream ms = await LocalMemoryStream.readAppFileAsync(zfile("normal.zip"))) - { - ZipArchive target = new ZipArchive(ms, ZipArchiveMode.Update, leaveOpen: true); + using LocalMemoryStream ms = await LocalMemoryStream.ReadAppFileAsync(zfile("normal.zip")); - ZipArchiveEntry edeleted = target.GetEntry("first.txt"); + ZipArchive target = await CreateZipArchive(async, ms, ZipArchiveMode.Update, true); - Stream s = edeleted.Open(); - //invalid ops while entry open - Assert.Throws(() => edeleted.Open()); - Assert.Throws(() => { var x = edeleted.Length; }); - Assert.Throws(() => { var x = edeleted.CompressedLength; }); - Assert.Throws(() => edeleted.Delete()); - s.Dispose(); + ZipArchiveEntry edeleted = target.GetEntry("first.txt"); - //invalid ops on stream after entry closed - Assert.Throws(() => s.ReadByte()); + Stream s = await OpenEntryStream(async, edeleted); - Assert.Throws(() => { var x = edeleted.Length; }); - Assert.Throws(() => { var x = edeleted.CompressedLength; }); + //invalid ops while entry open + await Assert.ThrowsAsync(() => OpenEntryStream(async, edeleted)); - edeleted.Delete(); - //invalid ops while entry deleted - Assert.Throws(() => edeleted.Open()); - Assert.Throws(() => { edeleted.LastWriteTime = new DateTimeOffset(); }); + Assert.Throws(() => { var x = edeleted.Length; }); + Assert.Throws(() => { var x = edeleted.CompressedLength; }); + Assert.Throws(() => edeleted.Delete()); - ZipArchiveEntry e = target.GetEntry("notempty/second.txt"); + await DisposeStream(async, s); - target.Dispose(); + //invalid ops on stream after entry closed + Assert.Throws(() => s.ReadByte()); - Assert.Throws(() => { var x = target.Entries; }); - Assert.Throws(() => target.CreateEntry("dirka")); - Assert.Throws(() => e.Open()); - Assert.Throws(() => e.Delete()); - Assert.Throws(() => { e.LastWriteTime = new DateTimeOffset(); }); - } + Assert.Throws(() => { var x = edeleted.Length; }); + Assert.Throws(() => { var x = edeleted.CompressedLength; }); + + edeleted.Delete(); + + //invalid ops while entry deleted + await Assert.ThrowsAsync(() => OpenEntryStream(async, edeleted)); + + Assert.Throws(() => { edeleted.LastWriteTime = new DateTimeOffset(); }); + + ZipArchiveEntry e = target.GetEntry("notempty/second.txt"); + + await DisposeZipArchive(async, target); + + Assert.Throws(() => { var x = target.Entries; }); + Assert.Throws(() => target.CreateEntry("dirka")); + + await Assert.ThrowsAsync(() => OpenEntryStream(async, e)); + + Assert.Throws(() => e.Delete()); + Assert.Throws(() => { e.LastWriteTime = new DateTimeOffset(); }); } - [Fact] - public void UpdateUncompressedArchive() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task UpdateUncompressedArchive(bool async) { - var utf8WithoutBom = new Text.UTF8Encoding(encoderShouldEmitUTF8Identifier: false); + var utf8WithoutBom = new UTF8Encoding(encoderShouldEmitUTF8Identifier: false); byte[] fileContent; using (var memStream = new MemoryStream()) { - using (var zip = new ZipArchive(memStream, ZipArchiveMode.Create)) + ZipArchive zip = await CreateZipArchive(async, memStream, ZipArchiveMode.Create); + + ZipArchiveEntry entry = zip.CreateEntry("testing", CompressionLevel.NoCompression); + using (var writer = new StreamWriter(entry.Open(), utf8WithoutBom)) { - ZipArchiveEntry entry = zip.CreateEntry("testing", CompressionLevel.NoCompression); - using (var writer = new StreamWriter(entry.Open(), utf8WithoutBom)) - { - writer.Write("hello"); - writer.Flush(); - } + writer.Write("hello"); + writer.Flush(); } + + await DisposeZipArchive(async, zip); + fileContent = memStream.ToArray(); } byte compressionMethod = fileContent[8]; @@ -324,44 +417,48 @@ public void UpdateUncompressedArchive() { memStream.Write(fileContent); memStream.Position = 0; - using (var archive = new ZipArchive(memStream, ZipArchiveMode.Update)) + ZipArchive archive = await CreateZipArchive(async, memStream, ZipArchiveMode.Update); + + ZipArchiveEntry entry = archive.GetEntry("testing"); + using (var writer = new StreamWriter(entry.Open(), utf8WithoutBom)) { - ZipArchiveEntry entry = archive.GetEntry("testing"); - using (var writer = new StreamWriter(entry.Open(), utf8WithoutBom)) - { - writer.Write("new"); - writer.Flush(); - } + writer.Write("new"); + writer.Flush(); } + + await DisposeZipArchive(async, archive); + byte[] modifiedTestContent = memStream.ToArray(); compressionMethod = modifiedTestContent[8]; Assert.Equal(0, compressionMethod); // stored => 0, deflate => 8 } } - [Fact] - public void Update_VerifyDuplicateEntriesAreAllowed() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task Update_VerifyDuplicateEntriesAreAllowed(bool async) { using var ms = new MemoryStream(); - using var archive = new ZipArchive(ms, ZipArchiveMode.Update); + ZipArchive archive = await CreateZipArchive(async, ms, ZipArchiveMode.Update); string entryName = "foo"; - AddEntry(archive, entryName, contents: "xxx", DateTimeOffset.Now); - AddEntry(archive, entryName, contents: "yyy", DateTimeOffset.Now); + await AddEntry(archive, entryName, contents: "xxx", DateTimeOffset.Now, async); + await AddEntry(archive, entryName, contents: "yyy", DateTimeOffset.Now, async); Assert.Equal(2, archive.Entries.Count); } - [Fact] - public static async Task Update_PerformMinimalWritesWhenNoFilesChanged() + [Theory] + [MemberData(nameof(Get_Booleans_Data))] + public async Task Update_PerformMinimalWritesWhenNoFilesChanged(bool async) { - using (LocalMemoryStream ms = await LocalMemoryStream.readAppFileAsync(zfile("normal.zip"))) + using (LocalMemoryStream ms = await LocalMemoryStream.ReadAppFileAsync(zfile("normal.zip"))) using (CallTrackingStream trackingStream = new CallTrackingStream(ms)) { int writesCalled = trackingStream.TimesCalled(nameof(trackingStream.Write)); int writeBytesCalled = trackingStream.TimesCalled(nameof(trackingStream.WriteByte)); - ZipArchive target = new ZipArchive(trackingStream, ZipArchiveMode.Update, leaveOpen: true); + ZipArchive target = await CreateZipArchive(async, trackingStream, ZipArchiveMode.Update, leaveOpen: true); int archiveEntries = target.Entries.Count; target.Dispose(); @@ -370,6 +467,8 @@ public static async Task Update_PerformMinimalWritesWhenNoFilesChanged() // No changes to the archive should result in no writes to the file. Assert.Equal(0, writesCalled + writeBytesCalled); + + await DisposeZipArchive(async, target); } } @@ -381,7 +480,7 @@ public static async Task Update_PerformMinimalWritesWhenNoFilesChanged() public void Update_PerformMinimalWritesWhenFixedLengthEntryHeaderFieldChanged(int startIndex, int entriesToModify, int step) { byte[] sampleEntryContents = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]; - byte[] sampleZipFile = CreateZipFile(50, sampleEntryContents); + byte[] sampleZipFile = CreateZipFile(50, sampleEntryContents, async: false).Result; using (MemoryStream ms = new MemoryStream()) { @@ -455,6 +554,88 @@ public void Update_PerformMinimalWritesWhenFixedLengthEntryHeaderFieldChanged(in } } + //[Theory] + //[InlineData(49, 1, 1)] + //[InlineData(40, 3, 2)] + //[InlineData(30, 5, 3)] + //[InlineData(0, 8, 1)] + //public async Task Update_PerformMinimalWritesWhenFixedLengthEntryHeaderFieldChanged_Async(int startIndex, int entriesToModify, int step) + //{ + // byte[] sampleEntryContents = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]; + // byte[] sampleZipFile = await CreateZipFile(50, sampleEntryContents, async: true); + + // await using (MemoryStream ms = new MemoryStream()) + // { + // await ms.WriteAsync(sampleZipFile); + // ms.Seek(0, SeekOrigin.Begin); + + // await using (CallTrackingStream trackingStream = new CallTrackingStream(ms)) + // { + // // Open the first archive in Update mode, then change the value of {entriesToModify} fixed-length entry headers + // // (LastWriteTime.) Verify the correct number of writes performed as a result, then reopen the same + // // archive, get the entries and make sure that the fields hold the expected value. + // int writesCalled = trackingStream.TimesCalled(nameof(trackingStream.Write)); + // int writeBytesCalled = trackingStream.TimesCalled(nameof(trackingStream.WriteByte)); + // ZipArchive target = await ZipArchive.CreateAsync(trackingStream, ZipArchiveMode.Update, leaveOpen: true, entryNameEncoding: null); + // List<(string EntryName, DateTimeOffset LastWriteTime)> updatedMetadata = new(entriesToModify); + + // for (int i = 0; i < entriesToModify; i++) + // { + // int modificationIndex = startIndex + (i * step); + // ZipArchiveEntry entryToModify = target.Entries[modificationIndex]; + // string entryName = entryToModify.FullName; + // DateTimeOffset expectedDateTimeOffset = entryToModify.LastWriteTime.AddHours(1.0); + + // entryToModify.LastWriteTime = expectedDateTimeOffset; + // updatedMetadata.Add((entryName, expectedDateTimeOffset)); + // } + + // await target.DisposeAsync(); + + // writesCalled = trackingStream.TimesCalled(nameof(trackingStream.Write)) - writesCalled; + // writeBytesCalled = trackingStream.TimesCalled(nameof(trackingStream.WriteByte)) - writeBytesCalled; + // // As above, check 1: the number of writes performed should be minimal. + // // 2 writes per archive entry for the local file header. + // // 2 writes per archive entry for the central directory header. + // // 1 write (sometimes 2, if there's a comment) for the end of central directory block. + // // The EOCD block won't change as a result of our modifications, so is excluded from the counts. + // Assert.Equal(((2 + 2) * entriesToModify), writesCalled + writeBytesCalled); + + // trackingStream.Seek(0, SeekOrigin.Begin); + // target = await ZipArchive.CreateAsync(trackingStream, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null); + + // for (int i = 0; i < entriesToModify; i++) + // { + // int modificationIndex = startIndex + (i * step); + // var expectedValues = updatedMetadata[i]; + // ZipArchiveEntry verifiedEntry = target.Entries[modificationIndex]; + + // // Check 2: the field holds the expected value (and thus has been written to the file.) + // Assert.NotNull(verifiedEntry); + // Assert.Equal(expectedValues.EntryName, verifiedEntry.FullName); + // Assert.Equal(expectedValues.LastWriteTime, verifiedEntry.LastWriteTime); + // } + + // // Check 3: no other data has been corrupted as a result + // for (int i = 0; i < target.Entries.Count; i++) + // { + // ZipArchiveEntry entry = target.Entries[i]; + // byte[] expectedBuffer = [.. sampleEntryContents, (byte)(i % byte.MaxValue)]; + // byte[] readBuffer = new byte[expectedBuffer.Length]; + + // await using (Stream readStream = await entry.OpenAsync()) + // { + // await readStream.ReadAsync(readBuffer); + // } + + // Assert.Equal(expectedBuffer, readBuffer); + // } + + // await target.DisposeAsync(); + // } + // } + //} + [Theory] [InlineData(0)] [InlineData(10)] @@ -464,6 +645,14 @@ public void Update_PerformMinimalWritesWhenFixedLengthEntryHeaderFieldChanged(in public void Update_PerformMinimalWritesWhenEntryDataChanges(int index) => Update_PerformMinimalWritesWithDataAndHeaderChanges(index, -1); + //[Theory] + //[InlineData(0)] + //[InlineData(10)] + //[InlineData(20)] + //[InlineData(30)] + //[InlineData(49)] + //public Task Update_PerformMinimalWritesWhenEntryDataChanges_Async(int index) => Update_PerformMinimalWritesWithDataAndHeaderChanges_Async(index, -1); + [Theory] [InlineData(0, 0)] [InlineData(20, 40)] @@ -471,7 +660,7 @@ public void Update_PerformMinimalWritesWhenEntryDataChanges(int index) public void Update_PerformMinimalWritesWithDataAndHeaderChanges(int dataChangeIndex, int lastWriteTimeChangeIndex) { byte[] sampleEntryContents = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]; - byte[] sampleZipFile = CreateZipFile(50, sampleEntryContents); + byte[] sampleZipFile = CreateZipFile(50, sampleEntryContents, async: false).Result; byte[] expectedUpdatedEntryContents = [19, 18, 17, 16, 15, 14, 13, 12, 11, 10]; using (MemoryStream ms = new MemoryStream()) @@ -562,10 +751,110 @@ public void Update_PerformMinimalWritesWithDataAndHeaderChanges(int dataChangeIn } } + //[Theory] + //[InlineData(0, 0)] + //[InlineData(20, 40)] + //[InlineData(30, 10)] + //public async Task Update_PerformMinimalWritesWithDataAndHeaderChanges_Async(int dataChangeIndex, int lastWriteTimeChangeIndex) + //{ + // byte[] sampleEntryContents = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]; + // byte[] sampleZipFile = await CreateZipFile(50, sampleEntryContents, async: true); + // byte[] expectedUpdatedEntryContents = [19, 18, 17, 16, 15, 14, 13, 12, 11, 10]; + + // await using (MemoryStream ms = new MemoryStream()) + // { + // await ms.WriteAsync(sampleZipFile); + // ms.Seek(0, SeekOrigin.Begin); + + // await using (CallTrackingStream trackingStream = new CallTrackingStream(ms)) + // { + // // Open the archive in Update mode, then rewrite the data of the {dataChangeIndex}th entry + // // and set the LastWriteTime of the {lastWriteTimeChangeIndex}th entry. + // // Verify the correct number of writes performed as a result, then reopen the same + // // archive, get the entries and make sure that the fields hold the expected value. + // int writesCalled = trackingStream.TimesCalled(nameof(trackingStream.Write)); + // int writeBytesCalled = trackingStream.TimesCalled(nameof(trackingStream.WriteByte)); + // ZipArchive target = await ZipArchive.CreateAsync(trackingStream, ZipArchiveMode.Update, leaveOpen: true, entryNameEncoding: null); + // ZipArchiveEntry entryToRewrite = target.Entries[dataChangeIndex]; + // int totalEntries = target.Entries.Count; + // int expectedEntriesToWrite = target.Entries.Count - dataChangeIndex; + // DateTimeOffset expectedWriteTime = default; + + // if (lastWriteTimeChangeIndex != -1) + // { + // ZipArchiveEntry entryToModify = target.Entries[lastWriteTimeChangeIndex]; + + // expectedWriteTime = entryToModify.LastWriteTime.AddHours(1.0); + // entryToModify.LastWriteTime = expectedWriteTime; + // } + + // await using (var entryStream = await entryToRewrite.OpenAsync()) + // { + // entryStream.SetLength(0); + // await entryStream.WriteAsync(expectedUpdatedEntryContents); + // } + + // await target.DisposeAsync(); + + // writesCalled = trackingStream.TimesCalled(nameof(trackingStream.Write)) - writesCalled; + // writeBytesCalled = trackingStream.TimesCalled(nameof(trackingStream.WriteByte)) - writeBytesCalled; + + // // If the data changed first, then every entry after it will be written in full. If the fixed-length + // // metadata changed first, some entries which won't have been fully written - just updated in place. + // // 2 writes per archive entry for the local file header. + // // 2 writes per archive entry for the central directory header. + // // 2 writes for the file data of the updated entry itself + // // 1 write per archive entry for the file data of other entries after this in the file + // // 1 write (sometimes 2, if there's a comment) for the end of central directory block. + // // All of the central directory headers must be rewritten after an entry's data has been modified. + // if (dataChangeIndex <= lastWriteTimeChangeIndex || lastWriteTimeChangeIndex == -1) + // { + // // dataChangeIndex -> totalEntries: rewrite in full + // // all central directories headers + // Assert.Equal(1 + 1 + ((2 + 1) * expectedEntriesToWrite) + (2 * totalEntries), writesCalled + writeBytesCalled); + // } + // else + // { + // // lastWriteTimeChangeIndex: partial rewrite + // // dataChangeIndex -> totalEntries: rewrite in full + // // all central directory headers + // Assert.Equal(1 + 1 + ((2 + 1) * expectedEntriesToWrite) + (2 * totalEntries) + 2, writesCalled + writeBytesCalled); + // } + + // trackingStream.Seek(0, SeekOrigin.Begin); + // target = await ZipArchive.CreateAsync(trackingStream, ZipArchiveMode.Read, leaveOpen: false, entryNameEncoding: null); + + // // Check 2: no other data has been corrupted as a result + // for (int i = 0; i < target.Entries.Count; i++) + // { + // ZipArchiveEntry entry = target.Entries[i]; + // byte[] expectedBuffer = i == dataChangeIndex + // ? expectedUpdatedEntryContents + // : [.. sampleEntryContents, (byte)(i % byte.MaxValue)]; + // byte[] readBuffer = new byte[expectedBuffer.Length]; + + // await using (Stream readStream = await entry.OpenAsync()) + // { + // await readStream.ReadAsync(readBuffer); + // } + + // Assert.Equal(expectedBuffer, readBuffer); + + // if (i == lastWriteTimeChangeIndex) + // { + // Assert.Equal(expectedWriteTime, entry.LastWriteTime); + // } + // } + + // await target.DisposeAsync(); + // } + // } + //} + [Fact] public async Task Update_PerformMinimalWritesWhenArchiveCommentChanged() { - using (LocalMemoryStream ms = await LocalMemoryStream.readAppFileAsync(zfile("normal.zip"))) + using (LocalMemoryStream ms = await LocalMemoryStream.ReadAppFileAsync(zfile("normal.zip"))) using (CallTrackingStream trackingStream = new CallTrackingStream(ms)) { int writesCalled = trackingStream.TimesCalled(nameof(trackingStream.Write)); @@ -598,7 +887,7 @@ public async Task Update_PerformMinimalWritesWhenArchiveCommentChanged() public void Update_PerformMinimalWritesWhenEntriesModifiedAndDeleted(int modifyIndex, int deleteIndex) { byte[] sampleEntryContents = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]; - byte[] sampleZipFile = CreateZipFile(50, sampleEntryContents); + byte[] sampleZipFile = CreateZipFile(50, sampleEntryContents, async: false).Result; byte[] expectedUpdatedEntryContents = [22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]; using (MemoryStream ms = new MemoryStream()) @@ -694,7 +983,7 @@ public void Update_PerformMinimalWritesWhenEntriesModifiedAndDeleted(int modifyI public void Update_PerformMinimalWritesWhenEntriesModifiedAndAdded(int entriesToCreate) { byte[] sampleEntryContents = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]; - byte[] sampleZipFile = CreateZipFile(50, sampleEntryContents); + byte[] sampleZipFile = CreateZipFile(50, sampleEntryContents, async: false).Result; using (MemoryStream ms = new MemoryStream()) {