diff --git a/.github/workflows/MavenCI.yml b/.github/workflows/MavenCI.yml index c05c20b6..0fc93ab1 100644 --- a/.github/workflows/MavenCI.yml +++ b/.github/workflows/MavenCI.yml @@ -6,9 +6,9 @@ on: pull_request: branches: [ master ] release: - branches: [ master ] - types: [ created ] - + branches: [ master ] + types: [ created ] + jobs: build: runs-on: ${{ matrix.operating-system }} @@ -19,31 +19,25 @@ jobs: fail-fast: false matrix: operating-system: [ubuntu-latest, windows-latest, macos-latest] - java-version: ['8', '9', '11', '13'] + java-version: ['8', '11', '17'] steps: - uses: actions/checkout@v2 - + - name: Set up JDK ${{ matrix.java-version }} uses: actions/setup-java@v1 with: java-version: ${{ matrix.java-version }} - + - name: Cache Maven packages uses: actions/cache@v2 with: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 - - - name: install build-tools - run: mvn --file build-tools/pom.xml clean install - - - name: Clean, build and install + + - name: Clean, build and install run: mvn --file pom.xml clean install - - - name: Run tests - run: mvn --file pom.xml test - + - name: Build package run: mvn -B package --file pom.xml diff --git a/.gitignore b/.gitignore index 011b0874..d9773299 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,13 @@ #Ignore IntelliJ files .idea/ +.DS_Store *.iml #Ignore Maven files **/target +**/.settings +**/*.project +**/*.classpath +**/*.factorypath +**/dependency-reduced-pom.xml \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 55e69564..ab748476 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -62,4 +62,25 @@ GlueSchemaRegistryKafkaSerializer/GlueSchemaRegistryKafkaDeserializer. ## Release 1.1.14 * Upgraded Protobuf dependency version to prevent a CVE -* Upgraded everit-json-schema dependency version to prevent a CVE \ No newline at end of file +* Upgraded everit-json-schema dependency version to prevent a CVE + +## Release 1.1.15 +* Upgrade Avro, Apicurio and Localhost utils versions + +## Release 1.1.16 +* Upgraded Wire version +* Excluded some transitive dependencies that are having vulnerabilities + +## Release 1.1.17 +* Upgraded kafka dependencies version + +## Release 1.1.18 +* Add a dummy class in the serializer-deserializer-msk-iam module for javadoc and source jar generation +* Upgraded Avro and Json dependencies version +* Upgraded AWS SDK v1 and v2 versions to fix vulnerabilities + +## Release 1.1.19 +* Upgraded dependency versions to remove ION dependencies + +## Release 1.1.20 +* Upgrade the dependency version to remove commons:compress dependency diff --git a/README.md b/README.md index 25cbff39..31476b1e 100644 --- a/README.md +++ b/README.md @@ -60,7 +60,7 @@ The recommended way to use the AWS Glue Schema Registry Library for Java is to c software.amazon.glue schema-registry-serde - 1.1.14 + 1.1.20 ``` ### Code Example @@ -381,7 +381,7 @@ Alternatively, a schema registry naming strategy implementation can be provided. properties.put(AWSSchemaRegistryConstants.SCHEMA_NAMING_GENERATION_CLASS, "com.amazonaws.services.schemaregistry.serializers.avro.CustomerProvidedSchemaNamingStrategy"); ``` -An example test implementation class is [here](https://github.com/awslabs/aws-glue-schema-registry/blob/master/avro-serializer-deserializer/src/test/java/com/amazonaws/services/schemaregistry/serializers/avro/CustomerProvidedSchemaNamingStrategy.java). +An example test implementation class is [here](https://github.com/awslabs/aws-glue-schema-registry/blob/master/serializer-deserializer/src/test/java/com/amazonaws/services/schemaregistry/serializers/avro/CustomerProvidedSchemaNamingStrategy.java). ### Providing Registry Description @@ -490,7 +490,7 @@ It should look like this * If using bash, run the below commands to set-up your CLASSPATH in your bash_profile. (For any other shell, update the environment accordingly.) ```bash echo 'export GSR_LIB_BASE_DIR=<>' >>~/.bash_profile - echo 'export GSR_LIB_VERSION=1.1.14' >>~/.bash_profile + echo 'export GSR_LIB_VERSION=1.1.20' >>~/.bash_profile echo 'export KAFKA_HOME=' >>~/.bash_profile echo 'export CLASSPATH=$CLASSPATH:$GSR_LIB_BASE_DIR/avro-kafkaconnect-converter/target/schema-registry-kafkaconnect-converter-$GSR_LIB_VERSION.jar:$GSR_LIB_BASE_DIR/common/target/schema-registry-common-$GSR_LIB_VERSION.jar:$GSR_LIB_BASE_DIR/avro-serializer-deserializer/target/schema-registry-serde-$GSR_LIB_VERSION.jar' >>~/.bash_profile source ~/.bash_profile @@ -549,7 +549,7 @@ It should look like this software.amazon.glue schema-registry-kafkastreams-serde - 1.1.14 + 1.1.20 ``` @@ -587,7 +587,7 @@ repository for the latest support: [Avro SerializationSchema and Deserialization software.amazon.glue schema-registry-flink-serde - 1.1.14 + 1.1.20 ``` ### Code Example diff --git a/avro-flink-serde/README.md b/avro-flink-serde/README.md index a47bc553..d803ac3b 100644 --- a/avro-flink-serde/README.md +++ b/avro-flink-serde/README.md @@ -20,7 +20,7 @@ inside Amazon VPC.](https://docs.aws.amazon.com/kinesisanalytics/latest/java/vpc software.amazon.glue schema-registry-flink-serde - 1.1.8/version> + 1.1.20/version> ``` ### Code Example @@ -67,4 +67,4 @@ inside Amazon VPC.](https://docs.aws.amazon.com/kinesisanalytics/latest/java/vpc GlueSchemaRegistryAvroDeserializationSchema.forGeneric(schema, configs), properties); DataStream stream = env.addSource(consumer); -``` \ No newline at end of file +``` diff --git a/avro-flink-serde/pom.xml b/avro-flink-serde/pom.xml index 1614ce08..1901cbed 100644 --- a/avro-flink-serde/pom.xml +++ b/avro-flink-serde/pom.xml @@ -21,7 +21,7 @@ software.amazon.glue schema-registry-flink-serde - 1.1.14 + 1.1.20 AWS Glue Schema Registry Flink Avro Serialization Deserialization Schema The AWS Glue Schema Registry Library for Apache Flink enables Java developers to easily integrate their Apache Flink applications with AWS Glue Schema Registry @@ -66,7 +66,13 @@ software.amazon.glue schema-registry-serde - 1.1.14 + 1.1.20 + + + org.projectlombok + lombok + 1.18.26 + provided @@ -143,11 +149,6 @@ - - org.apache.commons - commons-compress - 1.21 - org.apache.flink @@ -181,6 +182,7 @@ org.apache.maven.plugins maven-compiler-plugin + 3.10.1 1.8 1.8 @@ -217,7 +219,7 @@ org.jacoco jacoco-maven-plugin - 0.8.5 + 0.8.8 diff --git a/avro-kafkaconnect-converter/pom.xml b/avro-kafkaconnect-converter/pom.xml index 123cebdd..de996006 100644 --- a/avro-kafkaconnect-converter/pom.xml +++ b/avro-kafkaconnect-converter/pom.xml @@ -32,7 +32,7 @@ software.amazon.glue schema-registry-parent - 1.1.14 + 1.1.20 ../pom.xml @@ -69,6 +69,10 @@ org.apache.kafka connect-api + + org.projectlombok + lombok + org.mockito mockito-core diff --git a/build-tools/pom.xml b/build-tools/pom.xml index 20dec21b..e6033638 100644 --- a/build-tools/pom.xml +++ b/build-tools/pom.xml @@ -22,7 +22,7 @@ software.amazon.glue schema-registry-parent - 1.1.14 + 1.1.20 ../pom.xml @@ -62,7 +62,7 @@ org.jacoco jacoco-maven-plugin - 0.8.5 + 0.8.8 verify diff --git a/common/pom.xml b/common/pom.xml index 9a883e0e..14267498 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -22,7 +22,7 @@ software.amazon.glue schema-registry-parent - 1.1.14 + 1.1.20 ../pom.xml @@ -89,10 +89,6 @@ - - org.apache.commons - commons-compress - org.slf4j slf4j-api @@ -109,10 +105,6 @@ org.projectlombok lombok - - org.projectlombok - lombok-utils - org.mockito mockito-core diff --git a/common/src/main/java/com/amazonaws/services/schemaregistry/common/AWSSchemaRegistryClient.java b/common/src/main/java/com/amazonaws/services/schemaregistry/common/AWSSchemaRegistryClient.java index b6157154..949e6563 100644 --- a/common/src/main/java/com/amazonaws/services/schemaregistry/common/AWSSchemaRegistryClient.java +++ b/common/src/main/java/com/amazonaws/services/schemaregistry/common/AWSSchemaRegistryClient.java @@ -31,6 +31,7 @@ import software.amazon.awssdk.core.interceptor.ExecutionAttributes; import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; import software.amazon.awssdk.core.retry.RetryPolicy; +import software.amazon.awssdk.http.urlconnection.ProxyConfiguration; import software.amazon.awssdk.http.urlconnection.UrlConnectionHttpClient; import software.amazon.awssdk.regions.Region; import software.amazon.awssdk.services.glue.GlueClient; @@ -89,12 +90,18 @@ public AWSSchemaRegistryClient(@NonNull AwsCredentialsProvider credentialsProvid .retryPolicy(retryPolicy) .addExecutionInterceptor(new UserAgentRequestInterceptor()) .build(); + UrlConnectionHttpClient.Builder urlConnectionHttpClientBuilder = UrlConnectionHttpClient.builder(); + if (glueSchemaRegistryConfiguration.getProxyUrl() != null) { + log.debug("Creating http client using proxy {}", glueSchemaRegistryConfiguration.getProxyUrl().toString()); + ProxyConfiguration proxy = ProxyConfiguration.builder().endpoint(glueSchemaRegistryConfiguration.getProxyUrl()).build(); + urlConnectionHttpClientBuilder.proxyConfiguration(proxy); + } GlueClientBuilder glueClientBuilder = GlueClient .builder() .credentialsProvider(credentialsProvider) .overrideConfiguration(overrideConfiguration) - .httpClient(UrlConnectionHttpClient.builder().build()) + .httpClient(urlConnectionHttpClientBuilder.build()) .region(Region.of(glueSchemaRegistryConfiguration.getRegion())); if (glueSchemaRegistryConfiguration.getEndPoint() != null) { diff --git a/common/src/main/java/com/amazonaws/services/schemaregistry/common/configs/GlueSchemaRegistryConfiguration.java b/common/src/main/java/com/amazonaws/services/schemaregistry/common/configs/GlueSchemaRegistryConfiguration.java index 80982720..ce7d870d 100644 --- a/common/src/main/java/com/amazonaws/services/schemaregistry/common/configs/GlueSchemaRegistryConfiguration.java +++ b/common/src/main/java/com/amazonaws/services/schemaregistry/common/configs/GlueSchemaRegistryConfiguration.java @@ -25,8 +25,11 @@ import lombok.Data; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.EnumUtils; +import software.amazon.awssdk.core.exception.SdkClientException; +import software.amazon.awssdk.regions.providers.DefaultAwsRegionProviderChain; import software.amazon.awssdk.services.glue.model.Compatibility; +import java.net.URI; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -54,6 +57,7 @@ public class GlueSchemaRegistryConfiguration { private Map tags = new HashMap<>(); private Map metadata; private String secondaryDeserializer; + private URI proxyUrl; /** * Name of the application using the serializer/deserializer. @@ -99,6 +103,7 @@ private void buildSchemaRegistryConfigs(Map configs) { validateAndSetMetadata(configs); validateAndSetUserAgent(configs); validateAndSetSecondaryDeserializer(configs); + validateAndSetProxyUrl(configs); } private void validateAndSetSecondaryDeserializer(Map configs) { @@ -147,7 +152,15 @@ private void validateAndSetAWSRegion(Map configs) { if (isPresent(configs, AWSSchemaRegistryConstants.AWS_REGION)) { this.region = String.valueOf(configs.get(AWSSchemaRegistryConstants.AWS_REGION)); } else { - throw new AWSSchemaRegistryException("Region is not defined in the properties"); + try { + this.region = DefaultAwsRegionProviderChain + .builder() + .build() + .getRegion() + .id(); + } catch (SdkClientException ex) { + throw new AWSSchemaRegistryException("Region is not defined in the properties", ex); + } } } @@ -183,6 +196,18 @@ private void validateAndSetAWSEndpoint(Map configs) { } } + private void validateAndSetProxyUrl(Map configs) { + if (isPresent(configs, AWSSchemaRegistryConstants.PROXY_URL)) { + String value = (String) configs.get(AWSSchemaRegistryConstants.PROXY_URL); + try { + this.proxyUrl = URI.create(value); + } catch (IllegalArgumentException e) { + String message = String.format("Proxy URL property is not a valid URL: %s", value); + throw new AWSSchemaRegistryException(message, e); + } + } + } + private void validateAndSetDescription(Map configs) throws AWSSchemaRegistryException { if (isPresent(configs, AWSSchemaRegistryConstants.DESCRIPTION)) { this.description = String.valueOf(configs.get(AWSSchemaRegistryConstants.DESCRIPTION)); diff --git a/common/src/main/java/com/amazonaws/services/schemaregistry/utils/AWSSchemaRegistryConstants.java b/common/src/main/java/com/amazonaws/services/schemaregistry/utils/AWSSchemaRegistryConstants.java index 21a9b383..5ce30b06 100644 --- a/common/src/main/java/com/amazonaws/services/schemaregistry/utils/AWSSchemaRegistryConstants.java +++ b/common/src/main/java/com/amazonaws/services/schemaregistry/utils/AWSSchemaRegistryConstants.java @@ -18,6 +18,10 @@ import software.amazon.awssdk.services.glue.model.Compatibility; public final class AWSSchemaRegistryConstants { + /** + * Proxy URL to use while connecting to AWS endpoint. + */ + public static final String PROXY_URL = "proxyUrl"; /** * AWS endpoint to use while initializing the client for service. */ diff --git a/common/src/test/java/com/amazonaws/services/schemaregistry/common/configs/GlueSchemaRegistryConfigurationTest.java b/common/src/test/java/com/amazonaws/services/schemaregistry/common/configs/GlueSchemaRegistryConfigurationTest.java index 656f7838..70613e06 100644 --- a/common/src/test/java/com/amazonaws/services/schemaregistry/common/configs/GlueSchemaRegistryConfigurationTest.java +++ b/common/src/test/java/com/amazonaws/services/schemaregistry/common/configs/GlueSchemaRegistryConfigurationTest.java @@ -25,12 +25,14 @@ import org.junit.jupiter.params.provider.EnumSource; import software.amazon.awssdk.services.glue.model.Compatibility; +import java.net.URI; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -95,6 +97,7 @@ public void testBuildConfig_fromMap_succeeds() { public void testBuildConfig_noRegionConfigsSupplied_throwsException() { Map configWithoutRegion = new HashMap<>(); configWithoutRegion.put(AWSSchemaRegistryConstants.AWS_ENDPOINT, "https://test/"); + System.setProperty("aws.profile", ""); Exception exception = assertThrows(AWSSchemaRegistryException.class, () -> new GlueSchemaRegistryConfiguration(configWithoutRegion)); @@ -102,12 +105,28 @@ public void testBuildConfig_noRegionConfigsSupplied_throwsException() { assertEquals("Region is not defined in the properties", exception.getMessage()); } + /** + * Tests configuration for region value via default AWS region provider chain + */ + @Test + public void testBuildConfig_regionConfigsSuppliedUsingAwsProvider_thenUseDefaultAwsRegionProviderChain() { + Map configWithoutRegion = new HashMap<>(); + configWithoutRegion.put(AWSSchemaRegistryConstants.AWS_ENDPOINT, "https://test/"); + System.setProperty("aws.region", "us-west-2"); + + GlueSchemaRegistryConfiguration glueSchemaRegistryConfiguration = new GlueSchemaRegistryConfiguration(configWithoutRegion); + + assertEquals("us-west-2", glueSchemaRegistryConfiguration.getRegion()); + + System.clearProperty("aws.region"); + } + /** * Tests configuration for region value */ @Test public void testBuildConfig_withRegionConfig_Instantiates() { - assertNotNull(new GlueSchemaRegistryConfiguration("us-west-1")); + assertDoesNotThrow(() -> new GlueSchemaRegistryConfiguration("us-west-1")); } /** @@ -368,6 +387,28 @@ public void testValidateAndSetRegistryName_withRegistryConfig_throwsException() assertEquals(expectedRegistryName, glueSchemaRegistryConfiguration.getRegistryName()); } -} + /** + * Tests valid proxy URL value. + */ + @Test + public void testBuildConfig_validProxyUrl_success() { + Properties props = createTestProperties(); + String proxy = "http://proxy.servers.url:8080"; + props.put(AWSSchemaRegistryConstants.PROXY_URL, proxy); + GlueSchemaRegistryConfiguration glueSchemaRegistryConfiguration = new GlueSchemaRegistryConfiguration(props); + assertEquals(URI.create(proxy), glueSchemaRegistryConfiguration.getProxyUrl()); + } + /** + * Tests invalid proxy URL value. + */ + @Test + public void testBuildConfig_invalidProxyUrl_throwsException() { + Properties props = createTestProperties(); + String proxy = "http:// proxy.url: 8080"; + props.put(AWSSchemaRegistryConstants.PROXY_URL, "http:// proxy.url: 8080"); + Exception exception = assertThrows(AWSSchemaRegistryException.class, () -> new GlueSchemaRegistryConfiguration(props)); + assertEquals("Proxy URL property is not a valid URL: "+proxy, exception.getMessage()); + } +} \ No newline at end of file diff --git a/examples/pom.xml b/examples/pom.xml index c0221f1f..dbe548c3 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -31,7 +31,7 @@ software.amazon.glue schema-registry-parent - 1.1.14 + 1.1.20 ../pom.xml @@ -77,10 +77,6 @@ org.projectlombok lombok - - org.projectlombok - lombok-utils - commons-cli commons-cli diff --git a/integration-tests/docker-compose.yml b/integration-tests/docker-compose.yml index 37256812..a7d133aa 100644 --- a/integration-tests/docker-compose.yml +++ b/integration-tests/docker-compose.yml @@ -9,7 +9,7 @@ services: - ALLOW_ANONYMOUS_LOGIN=yes kafka: - image: 'public.ecr.aws/bitnami/kafka:latest' + image: 'public.ecr.aws/bitnami/kafka:2.8' ports: - '9092:9092' links: @@ -20,4 +20,19 @@ services: - KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181 - KAFKA_LISTENERS=PLAINTEXT://0.0.0.0:9092 - KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 - - ALLOW_PLAINTEXT_LISTENER=yes \ No newline at end of file + - ALLOW_PLAINTEXT_LISTENER=yes + + localstack: + container_name: "${LOCALSTACK_DOCKER_NAME-localstack_main}" + image: 'public.ecr.aws/localstack/localstack:latest' + ports: + - "127.0.0.1:4566:4566" + environment: + - SERVICES=cloudwatch, dynamodb, kinesis + - DEBUG=1 + - DOCKER_HOST=unix:///var/run/docker.sock + - DEFAULT_REGION=us-east-2 + - PARITY_AWS_ACCESS_KEY_ID=1 + volumes: + - "${LOCALSTACK_VOLUME_DIR:-./volume}:/var/lib/localstack" + - "/var/run/docker.sock:/var/run/docker.sock" diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 057237bc..05ea2450 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -31,7 +31,7 @@ software.amazon.glue schema-registry-parent - 1.1.14 + 1.1.20 ../pom.xml @@ -71,6 +71,10 @@ schema-registry-kafkastreams-serde ${parent.version} + + org.projectlombok + lombok + software.amazon.awssdk kinesis @@ -90,7 +94,13 @@ com.amazonaws - amazon-kinesis-producer + amazon-kinesis-producer + + + software.amazon.ion + ion-java + + software.amazon.awssdk diff --git a/integration-tests/run-local-tests.sh b/integration-tests/run-local-tests.sh index d9b6cf42..66c797cf 100644 --- a/integration-tests/run-local-tests.sh +++ b/integration-tests/run-local-tests.sh @@ -115,7 +115,7 @@ cleanUpConnectFiles() { cleanUpDockerResources || true # Start Kafka using docker command asynchronously -docker-compose up & +docker-compose up --no-attach localstack & sleep 10 ## Run mvn tests for Kafka and Kinesis Platforms cd .. && mvn --file integration-tests/pom.xml verify -Psurefire -X && cd integration-tests @@ -131,7 +131,7 @@ downloadMongoDBConnector copyGSRConverters runConnectTests() { - docker-compose up & + docker-compose up --no-attach localstack & setUpMongoDBLocal startKafkaConnectTasks ${1} echo "Waiting for Sink task to pick up data.." diff --git a/integration-tests/src/test/java/com/amazonaws/services/schemaregistry/integrationtests/kafka/KafkaHelper.java b/integration-tests/src/test/java/com/amazonaws/services/schemaregistry/integrationtests/kafka/KafkaHelper.java index 18baeb51..83c0996d 100644 --- a/integration-tests/src/test/java/com/amazonaws/services/schemaregistry/integrationtests/kafka/KafkaHelper.java +++ b/integration-tests/src/test/java/com/amazonaws/services/schemaregistry/integrationtests/kafka/KafkaHelper.java @@ -229,7 +229,7 @@ public void doKafkaStreamsProcess(final ProducerProperties producerProperties) t final KafkaStreams streams = new KafkaStreams(builder.build(), properties); streams.cleanUp(); streams.start(); - Thread.sleep(1000L); + Thread.sleep(5000L); streams.close(); log.info("Finish processing {} message streaming via Kafka.", producerProperties.getDataFormat()); @@ -360,9 +360,9 @@ private Properties getKafkaConsumerProperties(final ConsumerProperties consumerP private Properties getKafkaStreamsProperties(final ProducerProperties producerProperties) { Properties properties = new Properties(); - properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "kafka-streams-test"); + properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "kafka-streams-test-"+producerProperties.getDataFormat()); properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapBrokers); - properties.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0); + properties.put(StreamsConfig.STATESTORE_CACHE_MAX_BYTES_CONFIG, 0); properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, GlueSchemaRegistryKafkaStreamsSerde.class); properties.put(AWSSchemaRegistryConstants.DATA_FORMAT, producerProperties.getDataFormat()); diff --git a/integration-tests/src/test/java/com/amazonaws/services/schemaregistry/integrationtests/kinesis/GlueSchemaRegistryKinesisIntegrationTest.java b/integration-tests/src/test/java/com/amazonaws/services/schemaregistry/integrationtests/kinesis/GlueSchemaRegistryKinesisIntegrationTest.java index 68a03098..608b8eaf 100644 --- a/integration-tests/src/test/java/com/amazonaws/services/schemaregistry/integrationtests/kinesis/GlueSchemaRegistryKinesisIntegrationTest.java +++ b/integration-tests/src/test/java/com/amazonaws/services/schemaregistry/integrationtests/kinesis/GlueSchemaRegistryKinesisIntegrationTest.java @@ -17,9 +17,6 @@ import cloud.localstack.Constants; import cloud.localstack.ServiceName; -import cloud.localstack.awssdkv2.TestUtils; -import cloud.localstack.docker.LocalstackDockerExtension; -import cloud.localstack.docker.annotation.LocalstackDockerProperties; import com.amazonaws.services.kinesis.producer.KinesisProducer; import com.amazonaws.services.kinesis.producer.KinesisProducerConfiguration; import com.amazonaws.services.kinesis.producer.UserRecordResult; @@ -49,7 +46,6 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -106,15 +102,11 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; -@ExtendWith(LocalstackDockerExtension.class) -@LocalstackDockerProperties(services = {ServiceName.KINESIS, ServiceName.DYNAMO, ServiceName.CLOUDWATCH}, imageName = - "public.ecr.aws/d4c7g6k3/localstack", imageTag = "0.12.10") public class GlueSchemaRegistryKinesisIntegrationTest { private static final Logger LOGGER = LogManager.getLogger(GlueSchemaRegistryKinesisIntegrationTest.class); - private static final DynamoDbAsyncClient dynamoClient = TestUtils.getClientDyanamoAsyncV2(); - private static final CloudWatchAsyncClient cloudWatchClient = TestUtils.getClientCloudWatchAsyncV2(); private static final String LOCALSTACK_HOSTNAME = "localhost"; private static final int LOCALSTACK_KINESIS_PORT = 4566; + private static final String LOCALSTACK_ENDPOINT = String.format("http://%s:%d",LOCALSTACK_HOSTNAME,LOCALSTACK_KINESIS_PORT); private static final int LOCALSTACK_CLOUDWATCH_PORT = Constants.DEFAULT_PORTS.get(ServiceName.CLOUDWATCH) .intValue(); private static final int KCL_SCHEDULER_START_UP_WAIT_TIME_SECONDS = 15; @@ -140,6 +132,26 @@ public class GlueSchemaRegistryKinesisIntegrationTest { private static AwsCredentialsProvider awsCredentialsProvider = DefaultCredentialsProvider.builder() .build(); + private static final DynamoDbAsyncClient dynamoClient; + private static final CloudWatchAsyncClient cloudWatchClient; + + static { + try { + dynamoClient = DynamoDbAsyncClient.builder() + .endpointOverride(new URI(LOCALSTACK_ENDPOINT)) + .region(Region.of(GlueSchemaRegistryConnectionProperties.REGION)) + .credentialsProvider(awsCredentialsProvider) + .build(); + cloudWatchClient = CloudWatchAsyncClient.builder() + .endpointOverride(new URI(LOCALSTACK_ENDPOINT)) + .region(Region.of(GlueSchemaRegistryConnectionProperties.REGION)) + .credentialsProvider(awsCredentialsProvider) + .build(); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + private static List schemasToCleanUp = new ArrayList<>(); private final TestDataGeneratorFactory testDataGeneratorFactory = new TestDataGeneratorFactory(); private final GlueSchemaRegistrySerializerFactory glueSchemaRegistrySerializerFactory = @@ -219,9 +231,12 @@ private static Stream testSingleKCLKPLDataProvider() { } @BeforeEach - public void setUp() throws InterruptedException, ExecutionException { + public void setUp() throws InterruptedException, ExecutionException, URISyntaxException { System.setProperty(SdkSystemSetting.CBOR_ENABLED.property(), "false"); - kinesisClient = TestUtils.getClientKinesisAsyncV2(); + kinesisClient = KinesisAsyncClient.builder() + .endpointOverride(new URI(LOCALSTACK_ENDPOINT)) + .region(Region.of(GlueSchemaRegistryConnectionProperties.REGION)) + .build(); streamName = String.format("%s%s", TEST_KINESIS_STREAM_PREFIX, RandomStringUtils.randomAlphanumeric(4)); LOGGER.info("Creating Kinesis Stream : {} with {} shards on localStack..", streamName, SHARD_COUNT); @@ -496,7 +511,7 @@ private String produceRecordsWithKPL(String streamName, byte[] serializedBytes = dataFormatSerializer.serialize(record); putFutures.add(producer.addUserRecord(streamName, Long.toString(timestamp.toEpochMilli()), null, - ByteBuffer.wrap(serializedBytes), gsrSchema)); + ByteBuffer.wrap(serializedBytes),gsrSchema)); } String shardId = null; diff --git a/jsonschema-kafkaconnect-converter/pom.xml b/jsonschema-kafkaconnect-converter/pom.xml index 00f2d3a6..b9e480bd 100644 --- a/jsonschema-kafkaconnect-converter/pom.xml +++ b/jsonschema-kafkaconnect-converter/pom.xml @@ -32,7 +32,7 @@ software.amazon.glue schema-registry-parent - 1.1.14 + 1.1.20 ../pom.xml @@ -65,6 +65,10 @@ schema-registry-serde ${parent.version} + + org.projectlombok + lombok + org.apache.kafka connect-api diff --git a/kafkastreams-serde/pom.xml b/kafkastreams-serde/pom.xml index dde8db89..859eb466 100644 --- a/kafkastreams-serde/pom.xml +++ b/kafkastreams-serde/pom.xml @@ -32,7 +32,7 @@ software.amazon.glue schema-registry-parent - 1.1.14 + 1.1.20 ../pom.xml @@ -65,6 +65,10 @@ schema-registry-serde ${parent.version} + + org.projectlombok + lombok + org.mockito mockito-core diff --git a/pom.xml b/pom.xml index 34802a52..dbfe18bb 100644 --- a/pom.xml +++ b/pom.xml @@ -21,7 +21,7 @@ software.amazon.glue schema-registry-parent - 1.1.14 + 1.1.20 pom AWS Glue Schema Registry Library The AWS Glue Schema Registry Library for Java enables Java developers to easily integrate their @@ -66,6 +66,7 @@ build-tools common serializer-deserializer + serializer-deserializer-msk-iam kafkastreams-serde avro-kafkaconnect-converter avro-flink-serde @@ -79,43 +80,41 @@ UTF-8 UTF-8 software.amazon.glue - 2.17.122 - 1.12.151 + 2.22.12 + 1.12.660 2.12 - 2.8.1 - 1.11.0 + 3.6.1 + 1.11.3 1.0.39 - 1.14.1 + 1.14.2 4.8.120 - 1.21 3.8.1 2.12.2 0.6.1 3.19.6 1.7.10 - 3.7.1 + 4.3.0 2.1.3.Final - 1.7.30 2.17.1 - 5.6.2 + 5.6.3 3.3.3 - 1.18.20 - 1.18.12 + 1.18.26 1.1 2.2.9 - LATEST + + 0.15.8 1.6.2 1.3.2 - 30.0-jre + 32.0.0-jre 3.8.1 1.2 3.0.0 - 0.2.11 3.19.6 - 0.2.11 + 0.2.23 + 2.0.3 @@ -168,12 +167,18 @@ org.apache.avro avro ${avro.version} + + + org.apache.commons + commons-compress + + - + org.apache.commons commons-compress - ${commons.compress.version} + provided org.apache.avro @@ -234,11 +239,7 @@ org.projectlombok lombok ${lombok.version} - - - org.projectlombok - lombok-utils - ${lombok.utils.version} + provided com.kjetland @@ -288,6 +289,12 @@ amazon-kinesis-producer ${kinesis.producer.version} test + + + software.amazon.ion + ion-java + + org.awaitility @@ -353,7 +360,12 @@ protobuf-java ${protobuf.java.version} - + + + org.json + json + 20231013 + @@ -374,6 +386,7 @@ org.apache.maven.plugins maven-compiler-plugin + 3.10.1 1.8 1.8 @@ -398,13 +411,6 @@ org.apache.maven.plugins maven-checkstyle-plugin 3.1.2 - - - ${project.groupId} - schema-registry-build-tools - ${project.version} - - **/apicurio/**, **/additionalTypes/**, **/metadata/** checkstyle.suppression.filter @@ -445,7 +451,7 @@ org.jacoco jacoco-maven-plugin - 0.8.5 + 0.8.8 diff --git a/protobuf-kafkaconnect-converter/pom.xml b/protobuf-kafkaconnect-converter/pom.xml index 5c55ea23..911ef558 100644 --- a/protobuf-kafkaconnect-converter/pom.xml +++ b/protobuf-kafkaconnect-converter/pom.xml @@ -22,7 +22,7 @@ software.amazon.glue schema-registry-parent - 1.1.14 + 1.1.20 ../pom.xml @@ -66,7 +66,10 @@ schema-registry-serde ${parent.version} - + + org.projectlombok + lombok + org.apache.kafka connect-api @@ -190,7 +193,7 @@ org.jacoco jacoco-maven-plugin - 0.8.5 + 0.8.8 diff --git a/serializer-deserializer-msk-iam/lombok.config b/serializer-deserializer-msk-iam/lombok.config new file mode 100644 index 00000000..44c1de6d --- /dev/null +++ b/serializer-deserializer-msk-iam/lombok.config @@ -0,0 +1,3 @@ +lombok.addLombokGeneratedAnnotation = true +config.stopBubbling = true +lombok.nonNull.exceptionType = IllegalArgumentException \ No newline at end of file diff --git a/serializer-deserializer-msk-iam/pom.xml b/serializer-deserializer-msk-iam/pom.xml new file mode 100644 index 00000000..bc96ba7f --- /dev/null +++ b/serializer-deserializer-msk-iam/pom.xml @@ -0,0 +1,199 @@ + + + 4.0.0 + + ${parent.groupId} + schema-registry-serde-msk-iam + AWS Glue Schema Registry Serializer Deserializer with MSK IAM Authentication client + The AWS Glue Schema Registry Serializer/Deserializer enables Java developers to easily integrate + their Apache Kafka and AWS Kinesis applications with AWS Glue Schema Registry. MSK IAM Authentication client allows + clients to authenticate to Amazon MSK using IAM. + + https://aws.amazon.com/glue + ${parent.version} + jar + + + software.amazon.glue + schema-registry-parent + 1.1.20 + ../pom.xml + + + + + ossrh + https://aws.oss.sonatype.org/content/repositories/snapshots + + + ossrh + https://aws.oss.sonatype.org/service/local/staging/deploy/maven2/ + + + + + Apache License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + scm:git:https://github.com/aws/aws-glue-schema-registry.git + scm:git:git@github.com:aws/aws-glue-schema-registry.git + https://github.com/awslabs/aws-glue-schema-registry.git + + + + + software.amazon.msk + aws-msk-iam-auth + ${aws.msk.iam.auth} + + + ${parent.groupId} + schema-registry-serde + ${parent.version} + + + + + + maven-shade-plugin + 3.2.1 + + + + + package + + shade + + + + + + kr.motd.maven + os-maven-plugin + 1.6.2 + + + initialize + + detect + + + + + + + org.apache.maven.plugins + maven-jar-plugin + 2.4 + + + + test-jar + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + gencode + generate-sources + + compile + test-compile + + + + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + + + + + + org.apache.maven.plugins + maven-resources-plugin + + + copy-dist + prepare-package + + copy-resources + + + ${project.build.outputDirectory} + + + ${project.basedir}/target/generated-test-sources/protobuf/ + false + + + + + + + + + + + + + + publishing + + + + org.apache.maven.plugins + maven-gpg-plugin + 1.6 + + + sign-artifacts + verify + + sign + + + + + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.8 + true + + sonatype-nexus-staging + https://aws.oss.sonatype.org + false + + + + + + + diff --git a/serializer-deserializer-msk-iam/src/main/java/com/amazonaws/services/schemaregistry/Dummy.java b/serializer-deserializer-msk-iam/src/main/java/com/amazonaws/services/schemaregistry/Dummy.java new file mode 100644 index 00000000..565876f6 --- /dev/null +++ b/serializer-deserializer-msk-iam/src/main/java/com/amazonaws/services/schemaregistry/Dummy.java @@ -0,0 +1,7 @@ +package com.amazonaws.services.schemaregistry; + +/** + * This is just a dummy file created so the Javadoc jar and Source jar will be successfully generated for release + * */ +public class Dummy { +} diff --git a/serializer-deserializer/pom.xml b/serializer-deserializer/pom.xml index 6a5a1038..c4b9f4aa 100644 --- a/serializer-deserializer/pom.xml +++ b/serializer-deserializer/pom.xml @@ -32,7 +32,7 @@ software.amazon.glue schema-registry-parent - 1.1.14 + 1.1.20 ../pom.xml @@ -83,6 +83,10 @@ org.apache.kafka kafka-clients + + org.projectlombok + lombok + com.kjetland mbknor-jackson-jsonschema_2.12 @@ -100,6 +104,13 @@ com.github.erosb everit-json-schema + 1.14.2 + + + commons-collections + commons-collections + + com.google.protobuf @@ -131,6 +142,16 @@ kotlin-scripting-compiler-embeddable ${kotlin.version} + + com.squareup.okio + okio + 3.4.0 + + + com.squareup.okio + okio-fakefilesystem + 3.2.0 + org.jetbrains.kotlinx kotlinx-serialization-core-jvm @@ -152,6 +173,10 @@ com.squareup.wire wire-grpc-client + + com.charleskorn.kaml + kaml + @@ -207,6 +232,12 @@ com.google.protobuf protobuf-java + + + org.apache.commons + commons-collections4 + 4.4 + diff --git a/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/deserializers/json/JsonDeserializer.java b/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/deserializers/json/JsonDeserializer.java index 31130e5c..5c0c60a5 100644 --- a/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/deserializers/json/JsonDeserializer.java +++ b/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/deserializers/json/JsonDeserializer.java @@ -29,7 +29,7 @@ import lombok.NonNull; import lombok.Setter; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import java.io.IOException; import java.nio.ByteBuffer; diff --git a/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/serializers/json/JsonSerializer.java b/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/serializers/json/JsonSerializer.java index dd92ae4c..063bbf11 100644 --- a/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/serializers/json/JsonSerializer.java +++ b/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/serializers/json/JsonSerializer.java @@ -27,7 +27,7 @@ import lombok.NonNull; import lombok.Setter; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections4.CollectionUtils; import java.nio.charset.StandardCharsets; diff --git a/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/utils/apicurio/FileDescriptorUtils.java b/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/utils/apicurio/FileDescriptorUtils.java index 3a8b10c7..c0a41ff6 100644 --- a/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/utils/apicurio/FileDescriptorUtils.java +++ b/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/utils/apicurio/FileDescriptorUtils.java @@ -329,6 +329,7 @@ private static FileDescriptorProto toFileDescriptorProto(String schemaDefinition /** * When schema loader links the schema, it also includes google.protobuf types in it. * We want to ignore all the other types except for the ones that are present in the current file. + * * @return true if a type is a parent type, false otherwise. */ private static boolean isParentLevelType(ProtoType protoType, Optional optionalPackageName) { @@ -865,13 +866,20 @@ private static MessageElement toMessage(FileDescriptorProto file, DescriptorProt descriptor.getOptions().getNoStandardDescriptorAccessor(), false); options.add(option); } - return new MessageElement(DEFAULT_LOCATION, name, "", nested.build(), options.build(), - reserved.build(), fields.build(), + return new MessageElement( + DEFAULT_LOCATION, + name, "", + nested.build(), + options.build(), + reserved.build(), + fields.build(), oneofs.stream() //Ignore oneOfs with no fields (like Proto3 Optional) .filter(e -> e.getValue().build().size() != 0) .map(e -> toOneof(e.getKey(), e.getValue())).collect(Collectors.toList()), - extensions.build(), Collections.emptyList()); + extensions.build(), + Collections.emptyList() + ); } private static OneOfElement toOneof(String name, ImmutableList.Builder fields) { @@ -881,18 +889,24 @@ private static OneOfElement toOneof(String name, ImmutableList.Builder constants = ImmutableList.builder(); + ImmutableList.Builder reserved = ImmutableList.builder(); for (EnumValueDescriptorProto ev : ed.getValueList()) { ImmutableList.Builder options = ImmutableList.builder(); constants.add(new EnumConstantElement(DEFAULT_LOCATION, ev.getName(), ev.getNumber(), "", options.build())); } + for (String reservedName : ed.getReservedNameList()) { + ReservedElement reservedElem = new ReservedElement(DEFAULT_LOCATION, "", + Collections.singletonList(reservedName)); + reserved.add(reservedElem); + } ImmutableList.Builder options = ImmutableList.builder(); if (ed.getOptions().hasAllowAlias()) { OptionElement option = new OptionElement(ALLOW_ALIAS_OPTION, booleanKind, ed.getOptions().getAllowAlias(), false); options.add(option); } - return new EnumElement(DEFAULT_LOCATION, name, "", options.build(), constants.build()); + return new EnumElement(DEFAULT_LOCATION, name, "", options.build(), constants.build(), reserved.build()); } private static ServiceElement toService(ServiceDescriptorProto sv) { diff --git a/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/utils/apicurio/ProtobufSchemaLoader.java b/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/utils/apicurio/ProtobufSchemaLoader.java index 6ab668ec..40b510d9 100644 --- a/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/utils/apicurio/ProtobufSchemaLoader.java +++ b/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/utils/apicurio/ProtobufSchemaLoader.java @@ -22,10 +22,6 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.io.CharStreams; -import com.google.common.jimfs.Configuration; -import com.google.common.jimfs.Feature; -import com.google.common.jimfs.Jimfs; -import com.google.common.jimfs.PathType; import com.squareup.wire.schema.Location; import com.squareup.wire.schema.ProtoFile; import com.squareup.wire.schema.Schema; @@ -34,13 +30,15 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import java.nio.file.FileSystem; -import java.nio.file.Files; -import java.nio.file.Path; import java.util.Collections; import java.util.Optional; import java.util.Set; +import okio.Buffer; +import okio.Path; +import okio.Sink; +import okio.fakefilesystem.FakeFileSystem; + public class ProtobufSchemaLoader { private static final String GOOGLE_API_PATH = "google/type/"; @@ -85,15 +83,10 @@ public class ProtobufSchemaLoader { private final static String METADATA_PROTO = "metadata.proto"; private final static String DECIMAL_PROTO = "decimal.proto"; - private static FileSystem getFileSystem() throws IOException { - final FileSystem inMemoryFileSystem = - Jimfs.newFileSystem( - Configuration.builder(PathType.unix()) - .setRoots("/") - .setWorkingDirectory("/") - .setAttributeViews("basic") - .setSupportedFeatures(Feature.SYMBOLIC_LINKS) - .build()); + private static FakeFileSystem getFileSystem() throws IOException { + final FakeFileSystem inMemoryFileSystem = new FakeFileSystem(); + inMemoryFileSystem.setWorkingDirectory(Path.get("/")); + inMemoryFileSystem.setAllowSymlinks(true); final ClassLoader classLoader = ProtobufSchemaLoader.class.getClassLoader(); @@ -112,35 +105,47 @@ private static FileSystem getFileSystem() throws IOException { return inMemoryFileSystem; } - private static void loadProtoFiles(FileSystem inMemoryFileSystem, ClassLoader classLoader, Set protos, + + private static void loadProtoFiles(FakeFileSystem inMemoryFileSystem, ClassLoader classLoader, Set protos, String protoPath) throws IOException { for (String proto : protos) { //Loads the proto file resource files. final InputStream inputStream = classLoader.getResourceAsStream(protoPath + proto); final String fileContents = CharStreams.toString(new InputStreamReader(inputStream, Charsets.UTF_8)); - final Path path = inMemoryFileSystem.getPath("/", protoPath, proto); - Files.write(path, fileContents.getBytes()); + final Path dir = Path.get("/").resolve(protoPath); + inMemoryFileSystem.createDirectories(dir); + byte[] bytes = fileContents.getBytes(); + Path path = dir.resolve(proto); + writeToFakeFileSystem(inMemoryFileSystem, bytes, path); } } - private static String createDirectory(String[] dirs, FileSystem fileSystem) throws IOException { - String dirPath = ""; + private static void writeToFakeFileSystem(FakeFileSystem inMemoryFileSystem, byte[] bytes, Path path) throws IOException { + Buffer buffer = new Buffer(); + buffer.write(bytes); + Sink sink = inMemoryFileSystem.sink(path); + sink.write( + buffer, + bytes.length); + sink.flush(); + sink.close(); + } + + private static Path createDirectory(String[] dirs, FakeFileSystem fileSystem) throws IOException { + Path path = Path.get("/"); for (String dir: dirs) { - dirPath = dirPath + "/" + dir; - Path path = fileSystem.getPath(dirPath); - if (Files.notExists(path)) { - Files.createDirectory(path); - } + path = path.resolve(dir); } - - return dirPath; + fileSystem.createDirectories(path); + return path; } /** * Creates a schema loader using a in-memory file system. This is required for square wire schema parser and linker * to load the types correctly. See https://github.com/square/wire/issues/2024# * As of now this only supports reading one .proto file but can be extended to support reading multiple files. + * * @param packageName Package name for the .proto if present * @param fileName Name of the .proto file. * @param schemaDefinition Schema Definition to parse. @@ -148,7 +153,7 @@ private static String createDirectory(String[] dirs, FileSystem fileSystem) thro */ public static ProtobufSchemaLoaderContext loadSchema(Optional packageName, String fileName, String schemaDefinition) throws IOException { - final FileSystem inMemoryFileSystem = getFileSystem(); + final FakeFileSystem inMemoryFileSystem = getFileSystem(); String [] dirs = {}; if (packageName.isPresent()) { @@ -156,11 +161,11 @@ public static ProtobufSchemaLoaderContext loadSchema(Optional packageNam } String protoFileName = fileName.endsWith(".proto") ? fileName : fileName + ".proto"; try { - String dirPath = createDirectory(dirs, inMemoryFileSystem); - Path path = inMemoryFileSystem.getPath(dirPath, protoFileName); - Files.write(path, schemaDefinition.getBytes()); + Path dirPath = createDirectory(dirs, inMemoryFileSystem); + Path path = dirPath.resolve(protoFileName); + writeToFakeFileSystem(inMemoryFileSystem, schemaDefinition.getBytes(), path); - try (SchemaLoader schemaLoader = new SchemaLoader(inMemoryFileSystem)) { + SchemaLoader schemaLoader = new SchemaLoader(inMemoryFileSystem); schemaLoader.initRoots(Lists.newArrayList(Location.get("/")), Lists.newArrayList(Location.get("/"))); Schema schema = schemaLoader.loadSchema(); @@ -171,11 +176,8 @@ public static ProtobufSchemaLoaderContext loadSchema(Optional packageNam } return new ProtobufSchemaLoaderContext(schema, protoFile); - } } catch (Exception e) { throw e; - } finally { - inMemoryFileSystem.close(); } }