Skip to content

Commit

Permalink
Correctly detect EOL in chunked uploads
Browse files Browse the repository at this point in the history
Even though the AWS documentation says that they always use "CRLF" as
the EOL indicator, it seems to depend on the system when using async
(native) clients.
Now, we use a BufferedReader to correctly detect and read full lines
where we need to.

MultiPartUploadV2IT#testMultipartUpload_asyncClient failed when using
a file >16KB before this fix, now it is successful.

Fixes #1818
  • Loading branch information
afranken committed May 3, 2024
1 parent a5bea08 commit dc01aff
Show file tree
Hide file tree
Showing 24 changed files with 907 additions and 456 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ Version 3.x is JDK17 LTS bytecode compatible, with Docker and JUnit / direct Jav
* Version updates
* TBD

## 3.7.3 - PLANNED
## 3.7.3
3.x is JDK17 LTS bytecode compatible, with Docker and JUnit / direct Java integration.

* Features and fixes
Expand Down
129 changes: 129 additions & 0 deletions integration-tests/src/test/resources/sampleFile_large.txt

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,16 @@ public static ChecksumAlgorithm fromString(String value) {
};
}

public static ChecksumAlgorithm fromHeader(String value) {
return switch (value) {
case "x-amz-checksum-sha256" -> SHA256;
case "x-amz-checksum-sha1" -> SHA1;
case "x-amz-checksum-crc32" -> CRC32;
case "x-amz-checksum-crc32c" -> CRC32C;
default -> null;
};
}

public Algorithm toAlgorithm() {
return switch (this) {
case CRC32 -> Algorithm.CRC32;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@
import static com.adobe.testing.s3mock.S3Exception.NOT_MODIFIED;
import static com.adobe.testing.s3mock.S3Exception.NO_SUCH_KEY;
import static com.adobe.testing.s3mock.S3Exception.PRECONDITION_FAILED;
import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_CHECKSUM;
import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_DECODED_CONTENT_LENGTH;
import static com.adobe.testing.s3mock.util.AwsHttpHeaders.X_AMZ_TRAILER;
import static com.adobe.testing.s3mock.util.HeaderUtil.checksumAlgorithmFromSdk;
import static com.adobe.testing.s3mock.util.HeaderUtil.isChunked;
import static com.adobe.testing.s3mock.util.HeaderUtil.isChunkedAndV4Signed;
Expand All @@ -37,6 +39,7 @@
import com.adobe.testing.s3mock.dto.Delete;
import com.adobe.testing.s3mock.dto.DeleteResult;
import com.adobe.testing.s3mock.dto.DeletedS3Object;
import com.adobe.testing.s3mock.dto.Error;
import com.adobe.testing.s3mock.dto.LegalHold;
import com.adobe.testing.s3mock.dto.Owner;
import com.adobe.testing.s3mock.dto.Retention;
Expand All @@ -45,9 +48,7 @@
import com.adobe.testing.s3mock.store.BucketStore;
import com.adobe.testing.s3mock.store.ObjectStore;
import com.adobe.testing.s3mock.store.S3ObjectMetadata;
import com.adobe.testing.s3mock.util.AbstractAwsInputStream;
import com.adobe.testing.s3mock.util.AwsChunkedDecodingChecksumInputStream;
import com.adobe.testing.s3mock.util.AwsUnsignedChunkedDecodingChecksumInputStream;
import com.adobe.testing.s3mock.util.AwsChunkedInputStream;
import com.adobe.testing.s3mock.util.DigestUtil;
import java.io.IOException;
import java.io.InputStream;
Expand Down Expand Up @@ -161,7 +162,7 @@ public DeleteResult deleteObjects(String bucketName, Delete delete) {
response.addDeletedObject(DeletedS3Object.from(object));
} catch (IllegalStateException e) {
response.addError(
new com.adobe.testing.s3mock.dto.Error("InternalError",
new Error("InternalError",
object.key(),
"We encountered an internal error. Please try again.",
object.versionId()));
Expand Down Expand Up @@ -266,13 +267,13 @@ public void verifyRetention(Retention retention) {

public Pair<Path, String> toTempFile(InputStream inputStream, HttpHeaders httpHeaders) {
try {
var tempFile = Files.createTempFile("tempObject", "");
var tempFile = Files.createTempFile("ObjectService", "toTempFile");
try (OutputStream os = Files.newOutputStream(tempFile)) {
InputStream wrappedStream = wrapStream(inputStream, httpHeaders);
wrappedStream.transferTo(os);
ChecksumAlgorithm algorithmFromSdk = checksumAlgorithmFromSdk(httpHeaders);
if (algorithmFromSdk != null
&& wrappedStream instanceof AbstractAwsInputStream awsInputStream) {
&& wrappedStream instanceof AwsChunkedInputStream awsInputStream) {
return Pair.of(tempFile, awsInputStream.getChecksum());
}
return Pair.of(tempFile, null);
Expand All @@ -291,11 +292,11 @@ public void verifyChecksum(Path path, String checksum, ChecksumAlgorithm checksu

InputStream wrapStream(InputStream dataStream, HttpHeaders headers) {
var lengthHeader = headers.getFirst(X_AMZ_DECODED_CONTENT_LENGTH);
var trailHeader = headers.getOrEmpty(X_AMZ_TRAILER);
var hasChecksum = trailHeader.stream().anyMatch(h -> h.contains(X_AMZ_CHECKSUM));
var length = lengthHeader == null ? -1 : Long.parseLong(lengthHeader);
if (isChunkedAndV4Signed(headers)) {
return new AwsChunkedDecodingChecksumInputStream(dataStream, length);
} else if (isChunked(headers)) {
return new AwsUnsignedChunkedDecodingChecksumInputStream(dataStream, length);
if (isChunkedAndV4Signed(headers) || isChunked(headers)) {
return new AwsChunkedInputStream(dataStream, length, hasChecksum);
} else {
return dataStream;
}
Expand Down

This file was deleted.

This file was deleted.

Loading

0 comments on commit dc01aff

Please sign in to comment.