Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,6 @@ public class AwsJson1ProtocolTests {
@HttpClientRequestTests
@ProtocolTestFilter(
skipTests = {
// TODO: implement content-encoding
"SDKAppliedContentEncoding_awsJson1_0",
"SDKAppendsGzipAndIgnoresHttpProvidedEncoding_awsJson1_0",

// Skipping top-level input defaults isn't necessary in Smithy-Java given it uses builders and
// the defaults don't impact nullability. This applies to the following tests.
"AwsJson10ClientSkipsTopLevelDefaultValuesInInput",
Expand All @@ -42,8 +38,9 @@ public void requestTest(DataStream expected, DataStream actual) {
Node.parse(new String(ByteBufferUtils.getBytes(expected.asByteBuffer()),
StandardCharsets.UTF_8)));
}
assertEquals(expectedJson, new StringBuildingSubscriber(actual).getResult());

if (expected.contentType() != null) { // Skip request compression tests since they do not have expected body
assertEquals(expectedJson, new StringBuildingSubscriber(actual).getResult());
}
}

@HttpClientResponseTests
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@
skipOperations = {
// We dont ignore defaults on input shapes
"aws.protocoltests.restjson#OperationWithDefaults",
// TODO: support content-encoding
"aws.protocoltests.restjson#PutWithContentEncoding"
})
public class RestJson1ProtocolTests {
private static final String EMPTY_BODY = "";
Expand All @@ -50,7 +48,7 @@ public void requestTest(DataStream expected, DataStream actual) {
} else {
assertEquals(expectedStr, actualStr);
}
} else {
} else if (expected.contentType() != null) { // Skip request compression tests since they do not have expected body
assertEquals(EMPTY_BODY, actualStr);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import software.amazon.smithy.java.protocoltests.harness.HttpClientRequestTests;
import software.amazon.smithy.java.protocoltests.harness.HttpClientResponseTests;
import software.amazon.smithy.java.protocoltests.harness.ProtocolTest;
import software.amazon.smithy.java.protocoltests.harness.ProtocolTestFilter;
import software.amazon.smithy.java.protocoltests.harness.StringBuildingSubscriber;
import software.amazon.smithy.java.protocoltests.harness.TestType;

Expand All @@ -34,11 +33,6 @@
testType = TestType.CLIENT)
public class RestXmlProtocolTests {
@HttpClientRequestTests
@ProtocolTestFilter(
skipTests = {
"SDKAppliedContentEncoding_restXml",
"SDKAppendedGzipAfterProvidedEncoding_restXml",
})
public void requestTest(DataStream expected, DataStream actual) {
if (expected.contentLength() != 0) {
var a = new String(ByteBufferUtils.getBytes(actual.asByteBuffer()), StandardCharsets.UTF_8);
Expand All @@ -51,7 +45,7 @@ public void requestTest(DataStream expected, DataStream actual) {
} else {
assertEquals(a, b);
}
} else {
} else if (expected.contentType() != null) { // Skip request compression tests since they do not have expected body
assertEquals("", new StringBuildingSubscriber(actual).getResult());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import software.amazon.smithy.java.client.http.mock.MockQueue;
import software.amazon.smithy.java.client.http.plugins.ApplyHttpRetryInfoPlugin;
import software.amazon.smithy.java.client.http.plugins.HttpChecksumPlugin;
import software.amazon.smithy.java.client.http.plugins.RequestCompressionPlugin;
import software.amazon.smithy.java.client.http.plugins.UserAgentPlugin;
import software.amazon.smithy.java.core.serde.document.Document;
import software.amazon.smithy.java.dynamicclient.DynamicClient;
Expand Down Expand Up @@ -83,6 +84,7 @@ public class ClientTest {
SimpleAuthDetectionPlugin.class,
UserAgentPlugin.class,
ApplyHttpRetryInfoPlugin.class,
RequestCompressionPlugin.class,
HttpChecksumPlugin.class,
FooPlugin.class);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,17 @@ public final class HttpContext {
public static final Context.Key<HttpHeaders> ENDPOINT_RESOLVER_HTTP_HEADERS = Context.key(
"HTTP headers to use with the request returned from an endpoint resolver");

/**
* The minimum length of bytes threshold for a request body to be compressed. Defaults to 10240 bytes if not set.
*/
public static final Context.Key<Integer> REQUEST_MIN_COMPRESSION_SIZE_BYTES =
Context.key("Minimum bytes size for request compression");

/**
* If request compression is disabled.
*/
public static final Context.Key<Boolean> DISABLE_REQUEST_COMPRESSION =
Context.key("If request compression is disabled");

private HttpContext() {}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0
*/

package software.amazon.smithy.java.client.http.compression;

import software.amazon.smithy.java.io.datastream.DataStream;

/**
* Represents a compression algorithm that can be used to compress request
* bodies.
*/
public interface CompressionAlgorithm {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I wouldn't mind adding a new SPI for this

/**
* The ID of the checksum algorithm. This is matched against the algorithm
* names used in the trait e.g. "gzip"
*/
String algorithmId();

/**
* Compresses content of fixed length
*/
DataStream compress(DataStream data);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0
*/

package software.amazon.smithy.java.client.http.compression;

import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.zip.GZIPOutputStream;
import software.amazon.smithy.java.io.ByteBufferOutputStream;
import software.amazon.smithy.java.io.datastream.DataStream;
import software.amazon.smithy.java.io.datastream.GzipInputStream;

public class Gzip implements CompressionAlgorithm {
@Override
public String algorithmId() {
return "gzip";
}

@Override
public DataStream compress(DataStream data) {
if (!data.hasKnownLength()) { // Using streaming
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why not always stream?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There are two different handling between streaming and non-streaming in request compression SEP, will discuss with Michael on this.

try {
return DataStream.ofInputStream(
new GzipInputStream(data.asInputStream()),
data.contentType(),
-1);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}

try (var bos = new ByteBufferOutputStream();
var gzip = new GZIPOutputStream(bos);
var in = data.asInputStream()) {
in.transferTo(gzip);
gzip.close();
return DataStream.ofBytes(bos.toByteBuffer().array());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0
*/

package software.amazon.smithy.java.client.http.plugins;

import java.util.List;
import software.amazon.smithy.java.client.core.AutoClientPlugin;
import software.amazon.smithy.java.client.core.ClientConfig;
import software.amazon.smithy.java.client.core.interceptors.ClientInterceptor;
import software.amazon.smithy.java.client.core.interceptors.RequestHook;
import software.amazon.smithy.java.client.http.HttpContext;
import software.amazon.smithy.java.client.http.HttpMessageExchange;
import software.amazon.smithy.java.client.http.compression.CompressionAlgorithm;
import software.amazon.smithy.java.client.http.compression.Gzip;
import software.amazon.smithy.java.context.Context;
import software.amazon.smithy.java.core.schema.TraitKey;
import software.amazon.smithy.java.http.api.HttpRequest;
import software.amazon.smithy.java.io.datastream.DataStream;
import software.amazon.smithy.model.traits.RequestCompressionTrait;
import software.amazon.smithy.utils.ListUtils;
import software.amazon.smithy.utils.SmithyInternalApi;

/**
* Compress the request body using provided compression algorithm if @requestCompression trait is applied.
*/
@SmithyInternalApi
public final class RequestCompressionPlugin implements AutoClientPlugin {

@Override
public void configureClient(ClientConfig.Builder config) {
if (config.isUsingMessageExchange(HttpMessageExchange.INSTANCE)) {
config.addInterceptor(RequestCompressionInterceptor.INSTANCE);
config.putConfigIfAbsent(HttpContext.DISABLE_REQUEST_COMPRESSION, false);
}
}

static final class RequestCompressionInterceptor implements ClientInterceptor {

private static final int DEFAULT_MIN_COMPRESSION_SIZE_BYTES = 10240;
private static final int COMPRESSION_SIZE_CAP = 10485760;
private static final String CONTENT_ENCODING_HEADER = "Content-Encoding";
private static final ClientInterceptor INSTANCE = new RequestCompressionInterceptor();
private static final TraitKey<RequestCompressionTrait> REQUEST_COMPRESSION_TRAIT_KEY =
TraitKey.get(RequestCompressionTrait.class);
// Currently only Gzip is supported in Smithy model: https://smithy.io/2.0/spec/behavior-traits.html#requestcompression-trait
private static final List<CompressionAlgorithm> supportedAlgorithms = ListUtils.of(new Gzip());

@Override
public <RequestT> RequestT modifyBeforeTransmit(RequestHook<?, ?, RequestT> hook) {
return hook.mapRequest(HttpRequest.class, RequestCompressionInterceptor::processRequest);
}

private static HttpRequest processRequest(RequestHook<?, ?, HttpRequest> hook) {
if (shouldCompress(hook)) {
RequestCompressionTrait compressionTrait =
hook.operation().schema().getTrait(REQUEST_COMPRESSION_TRAIT_KEY);
var request = hook.request();
// Will pick the first supported algorithm to compress the body.
for (String algorithmId : compressionTrait.getEncodings()) {
for (CompressionAlgorithm algorithm : supportedAlgorithms) {
if (algorithmId.equals(algorithm.algorithmId())) {
var compressed = algorithm.compress(request.body());
return request.toBuilder()
.body(compressed)
.withAddedHeader(CONTENT_ENCODING_HEADER, algorithmId)
.build();
}
}
}
}
return hook.request();
}

private static boolean shouldCompress(RequestHook<?, ?, HttpRequest> hook) {
var context = hook.context();
var operation = hook.operation();
if (!operation.schema().hasTrait(REQUEST_COMPRESSION_TRAIT_KEY)
|| context.getOrDefault(HttpContext.DISABLE_REQUEST_COMPRESSION, false)) {
return false;
}
var requestBody = hook.request().body();
// Streaming should not have known length
if (operation.inputStreamMember() != null && !requestBody.hasKnownLength()) {
return true;
}
return isBodySizeValid(requestBody, context);
}

private static boolean isBodySizeValid(DataStream requestBody, Context context) {
var minCompressionSize = context.getOrDefault(HttpContext.REQUEST_MIN_COMPRESSION_SIZE_BYTES,
DEFAULT_MIN_COMPRESSION_SIZE_BYTES);
validateCompressionSize(minCompressionSize);
return requestBody.contentLength() >= minCompressionSize;
}

private static void validateCompressionSize(int minCompressionSize) {
if (minCompressionSize < 0 || minCompressionSize > COMPRESSION_SIZE_CAP) {
throw new IllegalArgumentException("Min compression size must be between 0 and 10485760");
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why the upper limit?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The request compression SEP required the minimum length should be between 0 and 10485760 bytes.

}
}
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
software.amazon.smithy.java.client.http.plugins.UserAgentPlugin
software.amazon.smithy.java.client.http.plugins.ApplyHttpRetryInfoPlugin
software.amazon.smithy.java.client.http.plugins.RequestCompressionPlugin
software.amazon.smithy.java.client.http.plugins.HttpChecksumPlugin
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0
*/

package software.amazon.smithy.java.client.http.compression;

import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.lessThan;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.nio.charset.StandardCharsets;
import java.util.zip.GZIPInputStream;
import org.junit.jupiter.api.Test;
import software.amazon.smithy.java.io.datastream.DataStream;

public class GzipTest {

private final Gzip gzip = new Gzip();

@Test
public void algorithmIdReturnsGzip() {
assertThat(gzip.algorithmId(), equalTo("gzip"));
}

@Test
public void compressesKnownLengthData() throws Exception {
// Use larger, repetitive data that compresses well
String original = "Hello World! ".repeat(10);
DataStream input = DataStream.ofString(original);

DataStream compressed = gzip.compress(input);

// Verify compressed data is smaller
assertThat(compressed.contentLength(), lessThan((long) original.length()));

// Verify decompression produces original
String decompressed = decompress(compressed.asByteBuffer().array());
assertThat(decompressed, equalTo(original));
}

@Test
public void compressesLargeStreamInChunks() throws Exception {
// Create 100KB of data
StringBuilder sb = new StringBuilder();
for (int i = 0; i < 10000; i++) {
sb.append("0123456789");
}
String original = sb.toString();
byte[] bytes = original.getBytes(StandardCharsets.UTF_8);
DataStream input = DataStream.ofInputStream(new ByteArrayInputStream(bytes));

DataStream compressed = gzip.compress(input);

// Verify decompression produces original
byte[] compressedBytes = compressed.asInputStream().readAllBytes();
String decompressed = decompress(compressedBytes);
assertThat(decompressed, equalTo(original));
}

@Test
public void compressesEmptyData() throws Exception {
DataStream input = DataStream.ofString("");

DataStream compressed = gzip.compress(input);

String decompressed = decompress(compressed.asByteBuffer().array());
assertThat(decompressed, equalTo(""));
}

private String decompress(byte[] compressed) throws Exception {
try (GZIPInputStream gzipIn = new GZIPInputStream(new ByteArrayInputStream(compressed));
ByteArrayOutputStream out = new ByteArrayOutputStream()) {
byte[] buffer = new byte[1024];
int len;
while ((len = gzipIn.read(buffer)) > 0) {
out.write(buffer, 0, len);
}
return out.toString(StandardCharsets.UTF_8);
}
}
}
Loading