Skip to content

Commit

Permalink
Merge pull request #28 from emeraldhieu/integration-test-with-testcon…
Browse files Browse the repository at this point in the history
…tainers

Integration-test with testcontainers
  • Loading branch information
emeraldhieu committed Jun 29, 2024
2 parents af6be41 + faad7f4 commit 960a6d1
Show file tree
Hide file tree
Showing 11 changed files with 270 additions and 2 deletions.
1 change: 1 addition & 0 deletions .github/workflows/maven.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ jobs:
java-version: '17'
distribution: 'temurin'
cache: maven

- name: Build bom
run: mvn clean install
working-directory: bom
Expand Down
16 changes: 16 additions & 0 deletions bom/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@
<graphqlCodeGenMavenPluginVersion>1.24</graphqlCodeGenMavenPluginVersion>
<graphDateTimeDgsStarterVersion>6.0.0</graphDateTimeDgsStarterVersion>
<graphqlDatetimeDgsStarterVersion>6.0.0</graphqlDatetimeDgsStarterVersion>
<testContainersVersion>1.19.8</testContainersVersion>
</properties>

<dependencyManagement>
Expand Down Expand Up @@ -332,6 +333,15 @@
<artifactId>graphql-datetime-dgs-starter</artifactId>
<version>${graphqlDatetimeDgsStarterVersion}</version>
</dependency>

<!-- Testcontainers dependency management -->
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers-bom</artifactId>
<version>${testContainersVersion}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>

Expand Down Expand Up @@ -567,6 +577,12 @@
<!-- Bind the goal to the package lifecycle -->
<goal>build-image-no-fork</goal>
</goals>
<!--
Attach the execution to the phase "post-integration-test" of the failsafe plugin.
Otherwise, the phase "repackage" build will run before the phase "integration-test".
See https://stackoverflow.com/questions/50705270/mvn-spring-boot-plugin-breaks-integration-testing#59677153
-->
<phase>post-integration-test</phase>
</execution>
</executions>
<configuration>
Expand Down
28 changes: 28 additions & 0 deletions order/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,34 @@
<groupId>com.tailrocks.graphql</groupId>
<artifactId>graphql-datetime-dgs-starter</artifactId>
</dependency>

<!-- Testcontainers for Postgres -->
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>postgresql</artifactId>
<scope>test</scope>
</dependency>

<!-- Testcontainers for Kafka -->
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>kafka</artifactId>
<scope>test</scope>
</dependency>

<!-- JUnit for Testcontainers -->
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>junit-jupiter</artifactId>
<scope>test</scope>
</dependency>

<!-- Spring Boot with Testcontainers integration -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-testcontainers</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

<build>
Expand Down
2 changes: 0 additions & 2 deletions order/src/main/resources/config/application.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
spring:
profiles:
active: local # Activate profile "local" by default
jackson:
default-property-inclusion: non_null
messages:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
package com.emeraldhieu.vinci.order.logic;

import com.emeraldhieu.vinci.order.logic.config.KafkaTestConfiguration;
import org.springframework.boot.testcontainers.service.connection.ServiceConnection;
import org.springframework.context.annotation.Import;
import org.springframework.test.context.DynamicPropertyRegistry;
import org.springframework.test.context.DynamicPropertySource;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.KafkaContainer;
import org.testcontainers.containers.Network;
import org.testcontainers.containers.PostgreSQLContainer;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import org.testcontainers.utility.DockerImageName;

@Testcontainers
@Import({
KafkaTestConfiguration.class,
})
public class BaseTestContainersTest {

@Container
@ServiceConnection
private static PostgreSQLContainer<?> postgres =
new PostgreSQLContainer<>(DockerImageName.parse("postgres:15.3-alpine"))
.withReuse(true);

private static Network network = Network.newNetwork();

// Cluster ID is created by "kafka-storage random-uuid"
private static String clusterId = "qYoMEZXcS_SKP2PzAl8-WA";

@Container
@ServiceConnection
private static KafkaContainer kafka =
new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.4.0"))
.withNetwork(network)
.withKraft()
.withClusterId(clusterId)
.withReuse(true);

@Container
private static GenericContainer schemaRegistry =
new GenericContainer(DockerImageName.parse("confluentinc/cp-schema-registry:7.4.0"))
.withNetwork(network)
.withExposedPorts(8081) // Exposed port is used to get a mapped port. Otherwise, the error "Container doesn't expose any ports" occurs.
.withEnv("SCHEMA_REGISTRY_HOST_NAME", "schema-registry") // To be resolved by Docker
.withEnv("SCHEMA_REGISTRY_LISTENERS", "http://0.0.0.0:8081") // Seems optional
.withEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS",
"PLAINTEXT://" + kafka.getNetworkAliases().get(0) + ":9092") // A list of Kafka brokers to connect to
.dependsOn(kafka)
.withReuse(true);

@DynamicPropertySource
private static void properties(DynamicPropertyRegistry registry) {
registry.add("spring.kafka.properties.schema.registry.url",
() -> "http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
package com.emeraldhieu.vinci.order.logic;

import com.emeraldhieu.vinci.order.OrderApp;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ActiveProfiles;

import java.util.List;

import static org.assertj.core.api.AssertionsForClassTypes.assertThat;

@ActiveProfiles("test")
@SpringBootTest(
classes = OrderApp.class
)
class OrderServiceIT extends BaseTestContainersTest {

@Autowired
private OrderService orderService;

@Test
public void givenRequest_whenCreate_thenReturnResponse() {
// GIVEN
var products = List.of("pepperoni", "margherita", "marinara");
var request = OrderRequest.builder()
.products(products)
.build();

// WHEN
var createdOrder = orderService.create(request);

// THEN
assertThat(createdOrder.getId()).isNotNull();
assertThat(createdOrder.getProducts()).isEqualTo(products);
assertThat(createdOrder.getCreatedBy()).isNotNull();
assertThat(createdOrder.getCreatedAt()).isNotNull();
assertThat(createdOrder.getUpdatedBy()).isNotNull();
assertThat(createdOrder.getUpdatedAt()).isNotNull();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package com.emeraldhieu.vinci.order.logic.config;

import lombok.RequiredArgsConstructor;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.springframework.boot.autoconfigure.kafka.DefaultKafkaConsumerFactoryCustomizer;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;

import java.util.Map;

/**
* A class that customizes Kafka for testing.
*/
@TestConfiguration
@RequiredArgsConstructor
public class KafkaTestConfiguration {

private final KafkaTestProperties kafkaTestProperties;

@Bean
DefaultKafkaConsumerFactoryCustomizer defaultKafkaConsumerFactoryCustomizer() {
return consumerFactory -> {
Map<String, Object> additionalConfigs = Map.of(
// Set consumer's groupId which hasn't been set anywhere.
ConsumerConfig.GROUP_ID_CONFIG, kafkaTestProperties.getGroupId()
);
consumerFactory.updateConfigs(additionalConfigs);
};
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
package com.emeraldhieu.vinci.order.logic.config;

import com.emeraldhieu.vinci.order.config.KafkaProperties;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.ConfigurationPropertiesScan;

/**
* A properties class used for testing because it contains some properties not existed in {@link KafkaProperties}.
* ---
* {@link ConfigurationProperties} is scanned by {@link ConfigurationPropertiesScan}.
*/
@ConfigurationProperties(prefix = "application.kafka")
@Data
public class KafkaTestProperties {
private String bootstrapAddress;
private String topic;
private int partitions;
private int replicationFactor;
private String groupId;
}
9 changes: 9 additions & 0 deletions order/src/test/resources/config/application-test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
spring:
jpa:
hibernate:
ddl-auto: create-drop

logging:
level:
org.springframework.core.io.support: DEBUG
org.springframework.context.annotation: DEBUG
64 changes: 64 additions & 0 deletions order/src/test/resources/config/application.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
spring:
profiles:
active: test # Activate profile "test" by default
jackson:
default-property-inclusion: non_null
messages:
basename: i18n/messages
mvc:
# Throw NoHandlerFoundException
# See https://stackoverflow.com/questions/36733254/spring-boot-rest-how-to-configure-404-resource-not-found#36734193
throw-exception-if-no-handler-found: true
problemdetails:
enabled: true
web:
resources:
# Throw NoHandlerFoundException
# See https://stackoverflow.com/questions/36733254/spring-boot-rest-how-to-configure-404-resource-not-found#36734193
add-mappings: false
datasource:
type: com.zaxxer.hikari.HikariDataSource
hikari:
poolName: Hikari
auto-commit: false
jpa:
database-platform: org.hibernate.dialect.PostgreSQLDialect
open-in-view: false
liquibase:
change-log: classpath:liquibase/master.yml
kafka:
producer:
key-serializer: io.confluent.kafka.serializers.KafkaAvroSerializer
value-serializer: io.confluent.kafka.serializers.KafkaAvroSerializer
consumer:
key-deserializer: io.confluent.kafka.serializers.KafkaAvroDeserializer
value-deserializer: io.confluent.kafka.serializers.KafkaAvroDeserializer
# Determine which offset to start reading from if the offset doesn't exist on the server.
# See https://docs.confluent.io/platform/current/installation/configuration/consumer-configs.html#auto-offset-reset
auto-offset-reset: latest
properties:
# Deserialize to the generated Avro class rather than a GenericRecord type
specific.avro.reader: true
server:
tomcat:
# Allow pipe character in URL to support passing commas such as "createdAt,asc"
relaxed-query-chars: "|"
management:
endpoints:
web:
exposure:
# Expose all actuator endpoints
include: "*"
endpoint:
health:
# Enable liveness and readiness probes
probes:
enabled: true
# Show full health details
show-details: always
application:
kafka:
topic: orders
partitions: 1
replicationFactor: 1
groupId: consumer
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
testcontainers.reuse.enable=true

0 comments on commit 960a6d1

Please sign in to comment.