From fdc00276b2fb6fceadbb7cf3edfbe4b3afcc29f7 Mon Sep 17 00:00:00 2001 From: Dean Dalianis <dean.dalianis@cern.ch> Date: Thu, 25 Jan 2024 11:06:10 +0200 Subject: [PATCH 01/15] Usage of new common-lib-ci & updated pom.xml --- .gitlab-ci.yml | 49 ++------------------ ci_settings.xml | 16 ------- pom.xml | 119 ++++++++++++++++++++++++++++++++++++++++-------- 3 files changed, 105 insertions(+), 79 deletions(-) delete mode 100644 ci_settings.xml diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ab61b35..f80b94e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,45 +1,4 @@ -variables: - MAVEN_OPTS: >- - -Dhttps.protocols=TLSv1.2 - -Dmaven.repo.local=$CI_PROJECT_DIR/.m2/repository - -Dorg.slf4j.simpleLogger.showDateTime=true - -Djava.awt.headless=true - - MAVEN_CLI_OPTS: >- - --batch-mode - --errors - --fail-at-end - --show-version - --no-transfer-progress - -DinstallAtEnd=true - -DdeployAtEnd=true - -image: maven:3.8-openjdk-11 - -cache: - paths: - - .m2/repository - -before_script: - - VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec) - - if [ "$CI_COMMIT_BRANCH" == "qa" ]; then - export VERSION="${VERSION}-QA"; - elif [ "$CI_COMMIT_BRANCH" != "master" ]; then - export VERSION="${VERSION}-${CI_COMMIT_BRANCH}-SNAPSHOT"; - fi - - mvn versions:set -DnewVersion=$VERSION - -.verify: - stage: test - script: - - 'mvn $MAVEN_CLI_OPTS test' - - if [ ! -f ci_settings.xml ]; then - echo "CI settings missing! Please create ci_settings.xml file."; - exit 1; - fi - -deploy:jdk11: - stage: deploy - script: - - 'mvn $MAVEN_CLI_OPTS deploy --settings ci_settings.xml' - when: on_success +include: + - project: nile/java-build-tools + ref: master + file: common-lib-ci.yml diff --git a/ci_settings.xml b/ci_settings.xml deleted file mode 100644 index 57951e9..0000000 --- a/ci_settings.xml +++ /dev/null @@ -1,16 +0,0 @@ -<settings xmlns="http://maven.apache.org/SETTINGS/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.1.0 http://maven.apache.org/xsd/settings-1.1.0.xsd"> - <servers> - <server> - <id>gitlab-maven</id> - <configuration> - <httpHeaders> - <property> - <name>Job-Token</name> - <value>${CI_JOB_TOKEN}</value> - </property> - </httpHeaders> - </configuration> - </server> - </servers> -</settings> diff --git a/pom.xml b/pom.xml index f8c16da..790b2e3 100644 --- a/pom.xml +++ b/pom.xml @@ -6,27 +6,12 @@ <groupId>ch.cern.nile</groupId> <artifactId>nile-common</artifactId> - <version>1.0.1</version> - - <repositories> - <repository> - <id>gitlab-maven</id> - <url>${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/maven</url> - </repository> - </repositories> - <distributionManagement> - <repository> - <id>gitlab-maven</id> - <url>${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/maven</url> - </repository> - <snapshotRepository> - <id>gitlab-maven</id> - <url>${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/maven</url> - </snapshotRepository> - </distributionManagement> + <version>1.0.0</version> <properties> <maven.compiler.release>11</maven.compiler.release> + <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <kafka.version>2.0.0</kafka.version> </properties> @@ -83,11 +68,75 @@ <build> <plugins> + <!-- Checkstyle plugin --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-checkstyle-plugin</artifactId> + <version>3.3.1</version> + <configuration> + <configLocation>https://gitlab.cern.ch/nile/java-build-tools/-/raw/master/src/main/resources/checkstyle.xml?ref_type=heads</configLocation> + <suppressionsLocation>https://gitlab.cern.ch/nile/java-build-tools/-/raw/master/src/main/resources/checkstyle-suppressions.xml?ref_type=heads</suppressionsLocation> + <consoleOutput>true</consoleOutput> + <failsOnError>true</failsOnError> + <linkXRef>false</linkXRef> + </configuration> + </plugin> + + <!-- PMD plugin --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-pmd-plugin</artifactId> + <version>3.21.1-pmd-7.0.0-SNAPSHOT</version> + <dependencies> + <dependency> + <groupId>net.sourceforge.pmd</groupId> + <artifactId>pmd-core</artifactId> + <version>7.0.0-rc4</version> + </dependency> + <dependency> + <groupId>net.sourceforge.pmd</groupId> + <artifactId>pmd-java</artifactId> + <version>7.0.0-rc4</version> + </dependency> + </dependencies> + <configuration> + <linkXRef>false</linkXRef> + <rulesets> + <ruleset>https://gitlab.cern.ch/nile/java-build-tools/-/raw/master/src/main/resources/pmd_java_ruleset.xml?ref_type=heads</ruleset> + </rulesets> + <includeTests>true</includeTests> + <failOnViolation>true</failOnViolation> + <printFailingErrors>true</printFailingErrors> + </configuration> + </plugin> + + <!-- SpotBugs plugin --> + <plugin> + <groupId>com.github.spotbugs</groupId> + <artifactId>spotbugs-maven-plugin</artifactId> + <version>4.8.1.0</version> + <configuration> + <effort>Max</effort> + <xmlOutput>false</xmlOutput> + <htmlOutput>true</htmlOutput> + <plugins> + <plugin> + <groupId>com.h3xstream.findsecbugs</groupId> + <artifactId>findsecbugs-plugin</artifactId> + <version>1.12.0</version> + </plugin> + </plugins> + </configuration> + </plugin> + + <!-- Compiler plugin --> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>3.8.0</version> </plugin> + + <!-- Surefire plugin for testing --> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> @@ -95,4 +144,38 @@ </plugin> </plugins> </build> + + + <repositories> + <repository> + <id>gitlab-maven</id> + <url>${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/maven</url> + </repository> + </repositories> + + <pluginRepositories> + <pluginRepository> + <id>apache.snapshots</id> + <name>Apache Snapshot Repository</name> + <url>https://repository.apache.org/snapshots</url> + <releases> + <enabled>false</enabled> + </releases> + <snapshots> + <enabled>true</enabled> + </snapshots> + </pluginRepository> + </pluginRepositories> + + <distributionManagement> + <repository> + <id>gitlab-maven</id> + <url>${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/maven</url> + </repository> + <snapshotRepository> + <id>gitlab-maven</id> + <url>${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/maven</url> + </snapshotRepository> + </distributionManagement> + </project> -- GitLab From f5189b7a016205422c3cbb4f89e0909464d15937 Mon Sep 17 00:00:00 2001 From: Dean Dalianis <dean.dalianis@cern.ch> Date: Thu, 25 Jan 2024 12:46:18 +0200 Subject: [PATCH 02/15] Basic formatting, test renaming, new tests --- pom.xml | 6 + src/main/java/ch/cern/nile/common/Main.java | 91 +++--- .../common/clients/KafkaStreamsClient.java | 129 ++++---- .../cern/nile/common/configs/Configure.java | 2 +- .../nile/common/configs/PropertiesCheck.java | 96 +++--- .../nile/common/configs/StreamConfig.java | 129 ++++---- .../cern/nile/common/configs/StreamType.java | 6 +- .../common/exceptions/DecodingException.java | 12 +- .../InvalidStreamTypeException.java | 9 - .../exceptions/MissingPropertyException.java | 8 +- .../exceptions/ReverseDnsLookupException.java | 8 +- .../UnknownStreamTypeException.java | 8 +- .../common/json/JsonPojoDeserializer.java | 72 ++--- .../nile/common/json/JsonPojoSerializer.java | 60 ++-- .../ch/cern/nile/common/json/JsonSerde.java | 52 ++-- .../cern/nile/common/models/Application.java | 4 +- .../ch/cern/nile/common/models/Topic.java | 2 +- .../ch/cern/nile/common/probes/Health.java | 97 +++--- .../nile/common/probes/HttpServerFactory.java | 13 + .../ch/cern/nile/common/schema/JsonType.java | 57 ++-- .../nile/common/schema/SchemaInjector.java | 110 +++---- .../nile/common/streams/AbstractStream.java | 277 +++++++++++------- .../cern/nile/common/streams/Streaming.java | 4 +- src/main/resources/log4j.properties | 3 +- .../clients/KafkaStreamsClientTest.java | 115 ++++++++ .../common/configs/PropertiesCheckTest.java | 117 +++++--- .../nile/common/configs/StreamConfigTest.java | 105 ++++--- .../nile/common/configs/StreamTypeTest.java | 44 +-- .../common/json/JsonPojoDeserializerTest.java | 90 +++--- .../common/json/JsonPojoSerializerTest.java | 50 ++-- .../cern/nile/common/json/JsonSerdeTest.java | 24 +- .../cern/nile/common/probes/HealthTest.java | 96 ++++++ .../common/schema/SchemaInjectorTest.java | 103 ++++--- .../nile/common/schema/SchemaTestBase.java | 33 ++- 34 files changed, 1221 insertions(+), 811 deletions(-) delete mode 100644 src/main/java/ch/cern/nile/common/exceptions/InvalidStreamTypeException.java create mode 100644 src/main/java/ch/cern/nile/common/probes/HttpServerFactory.java create mode 100644 src/test/java/ch/cern/nile/common/clients/KafkaStreamsClientTest.java create mode 100644 src/test/java/ch/cern/nile/common/probes/HealthTest.java diff --git a/pom.xml b/pom.xml index 790b2e3..faef136 100644 --- a/pom.xml +++ b/pom.xml @@ -64,6 +64,12 @@ <version>${kafka.version}</version> <scope>test</scope> </dependency> + <dependency> + <groupId>org.mockito</groupId> + <artifactId>mockito-core</artifactId> + <version>5.2.0</version> + <scope>test</scope> + </dependency> </dependencies> <build> diff --git a/src/main/java/ch/cern/nile/common/Main.java b/src/main/java/ch/cern/nile/common/Main.java index 6724ee6..2d0fb8d 100644 --- a/src/main/java/ch/cern/nile/common/Main.java +++ b/src/main/java/ch/cern/nile/common/Main.java @@ -1,58 +1,59 @@ package ch.cern.nile.common; +import java.io.FileInputStream; +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.util.Properties; + import ch.cern.nile.common.clients.KafkaStreamsClient; import ch.cern.nile.common.configs.PropertiesCheck; import ch.cern.nile.common.configs.StreamConfig; import ch.cern.nile.common.configs.StreamType; import ch.cern.nile.common.streams.Streaming; -import java.io.FileInputStream; -import java.io.IOException; -import java.lang.reflect.InvocationTargetException; -import java.util.Properties; - public class Main { - /** - * Main method. - * - * @param args the properties files - */ - public static void main(String[] args) { - // Check if properties file was passed - if (args.length < 1) { - throw new RuntimeException("Expecting args[0] to be the path to the configuration file"); - } - - // Loading properties file - String configsPath = args[0]; - final Properties configs = new Properties(); - try { - configs.load(new FileInputStream(configsPath)); - } catch (IOException e) { - e.printStackTrace(); - throw new RuntimeException(e); - } - - StreamType sType = StreamType.valueOf(configs.getProperty(StreamConfig.CommonProperties.STREAM_TYPE.getValue(), null)); - - PropertiesCheck.validateProperties(configs, sType); - - // Initialize Kafka Client - final KafkaStreamsClient client = new KafkaStreamsClient(); - client.configure(configs); - - // Start Streaming - try { - Class<?> clazz = Class.forName(configs.getProperty(StreamConfig.CommonProperties.STREAM_CLASS.getValue())); - final Streaming streaming; - streaming = (Streaming) clazz.getDeclaredConstructor().newInstance(); - streaming.configure(configs); - streaming.stream(client); - } catch (ClassNotFoundException | IllegalAccessException | InstantiationException | ClassCastException - | InvocationTargetException | NoSuchMethodException e) { - e.printStackTrace(); + /** + * Main method. + * + * @param args the properties files + */ + public static void main(String[] args) { + // Check if properties file was passed + if (args.length < 1) { + throw new RuntimeException("Expecting args[0] to be the path to the configuration file"); + } + + // Loading properties file + String configsPath = args[0]; + final Properties configs = new Properties(); + try { + configs.load(new FileInputStream(configsPath)); + } catch (IOException e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + + StreamType sType = + StreamType.valueOf(configs.getProperty(StreamConfig.CommonProperties.STREAM_TYPE.getValue(), null)); + + PropertiesCheck.validateProperties(configs, sType); + + // Initialize Kafka Client + final KafkaStreamsClient client = new KafkaStreamsClient(); + client.configure(configs); + + // Start Streaming + try { + Class<?> clazz = Class.forName(configs.getProperty(StreamConfig.CommonProperties.STREAM_CLASS.getValue())); + final Streaming streaming; + streaming = (Streaming) clazz.getDeclaredConstructor().newInstance(); + streaming.configure(configs); + streaming.stream(client); + } catch (ClassNotFoundException | IllegalAccessException | InstantiationException | ClassCastException + | InvocationTargetException | NoSuchMethodException e) { + e.printStackTrace(); + } } - } } diff --git a/src/main/java/ch/cern/nile/common/clients/KafkaStreamsClient.java b/src/main/java/ch/cern/nile/common/clients/KafkaStreamsClient.java index 74b490d..af07197 100644 --- a/src/main/java/ch/cern/nile/common/clients/KafkaStreamsClient.java +++ b/src/main/java/ch/cern/nile/common/clients/KafkaStreamsClient.java @@ -4,8 +4,6 @@ import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Properties; -import ch.cern.nile.common.configs.Configure; -import ch.cern.nile.common.exceptions.ReverseDnsLookupException; import org.apache.kafka.common.config.SaslConfigs; import org.apache.kafka.common.config.SslConfigs; import org.apache.kafka.common.serialization.Serdes; @@ -14,72 +12,93 @@ import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.Topology; import org.apache.kafka.streams.errors.DefaultProductionExceptionHandler; import org.apache.kafka.streams.errors.LogAndContinueExceptionHandler; + +import ch.cern.nile.common.configs.Configure; import ch.cern.nile.common.configs.StreamConfig; +import ch.cern.nile.common.exceptions.ReverseDnsLookupException; import ch.cern.nile.common.json.JsonSerde; /** - * This class is responsible for creating and configuring KafkaStreams instances. + * A client for creating KafkaStreams instances. */ public class KafkaStreamsClient implements Configure { - private Properties properties; + private Properties properties; + + /** + * Configures the KafkaStreams instance using the provided properties. + * + * @param configs the properties to be used for the configuration + */ + @Override + public void configure(Properties configs) { + final String clientId = configs.getProperty(StreamConfig.ClientProperties.CLIENT_ID.getValue()); + properties = new Properties(); + properties.put(StreamsConfig.APPLICATION_ID_CONFIG, clientId); + properties.put(StreamsConfig.CLIENT_ID_CONFIG, clientId); - @Override - public void configure(Properties configs) { - final String clientId = configs.getProperty(StreamConfig.ClientProperties.CLIENT_ID.getValue()); - properties = new Properties(); - properties.put(StreamsConfig.APPLICATION_ID_CONFIG, clientId); - properties.put(StreamsConfig.CLIENT_ID_CONFIG, clientId); + String kafkaCluster = configs.getProperty(StreamConfig.ClientProperties.KAFKA_CLUSTER.getValue()); - String kafkaCluster = configs.getProperty(StreamConfig.ClientProperties.KAFKA_CLUSTER.getValue()); + if (!kafkaCluster.equals("test")) { + properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, this.reverseDnsLookup(kafkaCluster)); + properties.put(StreamsConfig.SECURITY_PROTOCOL_CONFIG, "SASL_SSL"); + properties.put(SaslConfigs.SASL_MECHANISM, "GSSAPI"); + properties.put(SaslConfigs.SASL_KERBEROS_SERVICE_NAME, "kafka"); + properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, + configs.getProperty(StreamConfig.ClientProperties.TRUSTSTORE_LOCATION.getValue())); + } else { + properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, configs.getProperty("bootstrap.servers")); + } - if (!kafkaCluster.equals("test")) { - properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, this.reverseDnsLookup(kafkaCluster)); - properties.put(StreamsConfig.SECURITY_PROTOCOL_CONFIG, "SASL_SSL"); - properties.put(SaslConfigs.SASL_MECHANISM, "GSSAPI"); - properties.put(SaslConfigs.SASL_KERBEROS_SERVICE_NAME, "kafka"); - properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, configs.getProperty(StreamConfig.ClientProperties.TRUSTSTORE_LOCATION.getValue())); - } else { - properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, configs.getProperty("bootstrap.servers")); + properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); + properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class.getName()); + properties.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, + LogAndContinueExceptionHandler.class.getName()); + properties.put(StreamsConfig.DEFAULT_PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG, + DefaultProductionExceptionHandler.class.getName()); } - properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); - properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class.getName()); - properties.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, LogAndContinueExceptionHandler.class.getName()); - properties.put(StreamsConfig.DEFAULT_PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG, DefaultProductionExceptionHandler.class.getName()); - } + /** + * Creates a KafkaStreams instance using the provided topology. + * + * @param topology the topology to be used for the KafkaStreams instance + * @return a configured KafkaStreams instance + */ + public KafkaStreams create(Topology topology) { + return new KafkaStreams(topology, properties); + } - /** - * Creates a KafkaStreams instance using the provided topology. - * - * @param topology the topology to be used for the KafkaStreams instance - * @return a configured KafkaStreams instance - */ - public KafkaStreams create(Topology topology) { - return new KafkaStreams(topology, properties); - } + /** + * Resolves the provided Kafka cluster domain to a comma-separated list of hostnames with port 9093. + * + * @param kafkaCluster the domain of the Kafka cluster + * @return a comma-separated list of hostnames with port 9093 + * @throws RuntimeException if the hostname resolution fails + */ + private String reverseDnsLookup(String kafkaCluster) { + try { + return performDnsLookup(kafkaCluster); + } catch (UnknownHostException e) { + throw new ReverseDnsLookupException( + "Failed to perform reverse DNS lookup for the Kafka cluster: " + kafkaCluster, e); + } + } - /** - * Resolves the provided Kafka cluster domain to a comma-separated list of - * hostnames with port 9093. - * - * @param kafkaCluster the domain of the Kafka cluster - * @return a comma-separated list of hostnames with port 9093 - * @throws RuntimeException if the hostname resolution fails - */ - private String reverseDnsLookup(String kafkaCluster) { - try { - StringBuilder sb = new StringBuilder(); - InetAddress[] address = InetAddress.getAllByName(kafkaCluster); - for (InetAddress host : address) { - final String hostName = InetAddress.getByName(host.getHostAddress()).getHostName(); - // FIXME: add configuration for the port - sb.append(hostName).append(":9093,"); - } - sb.deleteCharAt(sb.length() - 1); - return sb.toString(); - } catch (UnknownHostException e) { - throw new ReverseDnsLookupException("Failed to perform reverse DNS lookup for the Kafka cluster: " + kafkaCluster, e); + /** + * Perform the DNS lookup. This method can be overridden in tests. + * + * @param kafkaCluster the domain of the Kafka cluster + * @return a comma-separated list of hostnames with port 9093 + * @throws UnknownHostException if the hostname resolution fails + */ + protected String performDnsLookup(String kafkaCluster) throws UnknownHostException { + StringBuilder sb = new StringBuilder(); + InetAddress[] address = InetAddress.getAllByName(kafkaCluster); + for (InetAddress host : address) { + final String hostName = InetAddress.getByName(host.getHostAddress()).getHostName(); + sb.append(hostName).append(":9093,"); + } + sb.deleteCharAt(sb.length() - 1); + return sb.toString(); } - } } diff --git a/src/main/java/ch/cern/nile/common/configs/Configure.java b/src/main/java/ch/cern/nile/common/configs/Configure.java index b0b3c4d..f520461 100644 --- a/src/main/java/ch/cern/nile/common/configs/Configure.java +++ b/src/main/java/ch/cern/nile/common/configs/Configure.java @@ -6,5 +6,5 @@ import java.util.Properties; * Interface for classes that can be configured with a Properties object. */ public interface Configure { - void configure(Properties configs); + void configure(Properties configs); } diff --git a/src/main/java/ch/cern/nile/common/configs/PropertiesCheck.java b/src/main/java/ch/cern/nile/common/configs/PropertiesCheck.java index bb95779..19f10c3 100644 --- a/src/main/java/ch/cern/nile/common/configs/PropertiesCheck.java +++ b/src/main/java/ch/cern/nile/common/configs/PropertiesCheck.java @@ -12,60 +12,62 @@ import ch.cern.nile.common.exceptions.UnknownStreamTypeException; */ public final class PropertiesCheck { - private PropertiesCheck(){} + private PropertiesCheck() { + } - private static final Set<String> CLIENT_PROPERTIES = StreamConfig.ClientProperties.getValues(); - private static final Set<String> COMMON_PROPERTIES = StreamConfig.CommonProperties.getValues(); - private static final Set<String> DECODING_PROPERTIES = StreamConfig.DecodingProperties.getValues(); - private static final Set<String> ROUTING_PROPERTIES = StreamConfig.RoutingProperties.getValues(); - private static final Set<String> ENRICHMENT_PROPERTIES = StreamConfig.EnrichmentProperties.getValues(); + private static final Set<String> CLIENT_PROPERTIES = StreamConfig.ClientProperties.getValues(); + private static final Set<String> COMMON_PROPERTIES = StreamConfig.CommonProperties.getValues(); + private static final Set<String> DECODING_PROPERTIES = StreamConfig.DecodingProperties.getValues(); + private static final Set<String> ROUTING_PROPERTIES = StreamConfig.RoutingProperties.getValues(); + private static final Set<String> ENRICHMENT_PROPERTIES = StreamConfig.EnrichmentProperties.getValues(); - /** - * Validates the properties file based on the type of stream. - * - * @param properties - properties already loaded from file into java.util.Properties object. - * @param streamType - type of stream defined in the properties file. - * @throws MissingPropertyException if a required property is missing from the properties object. - * @throws UnknownStreamTypeException if the stream type is unknown. - */ - public static void validateProperties(Properties properties, StreamType streamType) { - Objects.requireNonNull(properties, "Properties object cannot be null"); - Objects.requireNonNull(streamType, "Properties file is missing stream.type property"); + /** + * Validates the properties file based on the type of stream. + * + * @param properties - properties already loaded from file into java.util.Properties object. + * @param streamType - type of stream defined in the properties file. + * @throws MissingPropertyException if a required property is missing from the properties object. + * @throws UnknownStreamTypeException if the stream type is unknown. + */ + public static void validateProperties(Properties properties, StreamType streamType) { + Objects.requireNonNull(properties, "Properties object cannot be null"); + Objects.requireNonNull(streamType, "Properties file is missing stream.type property"); - validateRequiredProperties(properties, CLIENT_PROPERTIES); - validateRequiredProperties(properties, COMMON_PROPERTIES); + validateRequiredProperties(properties, CLIENT_PROPERTIES); + validateRequiredProperties(properties, COMMON_PROPERTIES); - switch (streamType) { - case DECODING: - validateRequiredProperties(properties, DECODING_PROPERTIES); - break; - case ROUTING: - validateRequiredProperties(properties, ROUTING_PROPERTIES); - break; - case ENRICHMENT: - validateRequiredProperties(properties, ENRICHMENT_PROPERTIES); - break; - default: - throw new UnknownStreamTypeException(String.format("Stream type unknown: %s.", streamType)); + switch (streamType) { + case DECODING: + validateRequiredProperties(properties, DECODING_PROPERTIES); + break; + case ROUTING: + validateRequiredProperties(properties, ROUTING_PROPERTIES); + break; + case ENRICHMENT: + validateRequiredProperties(properties, ENRICHMENT_PROPERTIES); + break; + default: + // Cannot happen as the stream type is validated before this switch statement. + throw new UnknownStreamTypeException(String.format("Stream type unknown: %s.", streamType)); + } } - } - /** - * Validates the required properties within the given properties object. - * - * @param props - properties object to check for required properties. - * @param propsToCheck - set of required property keys. - * @throws MissingPropertyException if a required property is missing from the properties object. - */ - private static void validateRequiredProperties(Properties props, Set<String> propsToCheck) { - Objects.requireNonNull(props, "Properties object cannot be null"); - Objects.requireNonNull(propsToCheck, "Properties to check cannot be null"); + /** + * Validates the required properties within the given properties object. + * + * @param props - properties object to check for required properties. + * @param propsToCheck - set of required property keys. + * @throws MissingPropertyException if a required property is missing from the properties object. + */ + private static void validateRequiredProperties(Properties props, Set<String> propsToCheck) { + Objects.requireNonNull(props, "Properties object cannot be null"); + Objects.requireNonNull(propsToCheck, "Properties to check cannot be null"); - for (String prop : propsToCheck) { - if (!props.containsKey(prop)) { - throw new MissingPropertyException(String.format("Properties file is missing: %s property.", prop)); - } + for (String prop : propsToCheck) { + if (!props.containsKey(prop)) { + throw new MissingPropertyException(String.format("Properties file is missing: %s property.", prop)); + } + } } - } } diff --git a/src/main/java/ch/cern/nile/common/configs/StreamConfig.java b/src/main/java/ch/cern/nile/common/configs/StreamConfig.java index e0312f9..c332c99 100644 --- a/src/main/java/ch/cern/nile/common/configs/StreamConfig.java +++ b/src/main/java/ch/cern/nile/common/configs/StreamConfig.java @@ -1,5 +1,7 @@ package ch.cern.nile.common.configs; +import lombok.Getter; + import java.util.Arrays; import java.util.Set; import java.util.stream.Collectors; @@ -9,101 +11,86 @@ import java.util.stream.Collectors; */ public class StreamConfig { - public enum ClientProperties { - SOURCE_TOPIC("source.topic"), - KAFKA_CLUSTER("kafka.cluster"), - CLIENT_ID("client.id"), - TRUSTSTORE_LOCATION("truststore.location"); - - private final String value; + @Getter + public enum ClientProperties { + SOURCE_TOPIC("source.topic"), + KAFKA_CLUSTER("kafka.cluster"), + CLIENT_ID("client.id"), + TRUSTSTORE_LOCATION("truststore.location"); - ClientProperties(String value) { - this.value = value; - } + private final String value; - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } + ClientProperties(String value) { + this.value = value; + } - public String getValue() { - return value; + public static Set<String> getValues() { + return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); + } } - } - public enum CommonProperties { - STREAM_TYPE("stream.type"), - STREAM_CLASS("stream.class"); + @Getter + public enum CommonProperties { + STREAM_TYPE("stream.type"), + STREAM_CLASS("stream.class"); - private final String value; + private final String value; - CommonProperties(String value) { - this.value = value; - } - - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } + CommonProperties(String value) { + this.value = value; + } - public String getValue() { - return value; + public static Set<String> getValues() { + return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); + } } - } - - public enum DecodingProperties { - SINK_TOPIC("sink.topic"); - private final String value; + @Getter + public enum DecodingProperties { + SINK_TOPIC("sink.topic"); - DecodingProperties(String value) { - this.value = value; - } + private final String value; - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } + DecodingProperties(String value) { + this.value = value; + } - public String getValue() { - return value; + public static Set<String> getValues() { + return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); + } } - } - public enum RoutingProperties { - ROUTING_CONFIG_PATH("routing.config.path"), - DLQ_TOPIC("dlq.topic"); + @Getter + public enum RoutingProperties { + ROUTING_CONFIG_PATH("routing.config.path"), + DLQ_TOPIC("dlq.topic"); - private final String value; - - RoutingProperties(String value) { - this.value = value; - } + private final String value; - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } + RoutingProperties(String value) { + this.value = value; + } - public String getValue() { - return value; + public static Set<String> getValues() { + return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); + } } - } - - public enum EnrichmentProperties { - ENRICHMENT_CONFIG_PATH("enrichment.config.path"), - SINK_TOPIC("sink.topic"); - private final String value; + @Getter + public enum EnrichmentProperties { + ENRICHMENT_CONFIG_PATH("enrichment.config.path"), + SINK_TOPIC("sink.topic"); - EnrichmentProperties(String value) { - this.value = value; - } + private final String value; - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } + EnrichmentProperties(String value) { + this.value = value; + } - public String getValue() { - return value; + public static Set<String> getValues() { + return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); + } } - } } diff --git a/src/main/java/ch/cern/nile/common/configs/StreamType.java b/src/main/java/ch/cern/nile/common/configs/StreamType.java index b3b3565..7f9dfdc 100644 --- a/src/main/java/ch/cern/nile/common/configs/StreamType.java +++ b/src/main/java/ch/cern/nile/common/configs/StreamType.java @@ -5,8 +5,8 @@ package ch.cern.nile.common.configs; */ public enum StreamType { - ROUTING, - DECODING, - ENRICHMENT + ROUTING, + DECODING, + ENRICHMENT } diff --git a/src/main/java/ch/cern/nile/common/exceptions/DecodingException.java b/src/main/java/ch/cern/nile/common/exceptions/DecodingException.java index 7b02130..67808ad 100644 --- a/src/main/java/ch/cern/nile/common/exceptions/DecodingException.java +++ b/src/main/java/ch/cern/nile/common/exceptions/DecodingException.java @@ -2,12 +2,12 @@ package ch.cern.nile.common.exceptions; public class DecodingException extends RuntimeException { - public DecodingException(String message, Throwable err) { - super(message, err); - } + public DecodingException(String message, Throwable err) { + super(message, err); + } - public DecodingException(String message) { - super(message); - } + public DecodingException(String message) { + super(message); + } } diff --git a/src/main/java/ch/cern/nile/common/exceptions/InvalidStreamTypeException.java b/src/main/java/ch/cern/nile/common/exceptions/InvalidStreamTypeException.java deleted file mode 100644 index 47c5651..0000000 --- a/src/main/java/ch/cern/nile/common/exceptions/InvalidStreamTypeException.java +++ /dev/null @@ -1,9 +0,0 @@ -package ch.cern.nile.common.exceptions; - -public class InvalidStreamTypeException extends IllegalArgumentException { - - public InvalidStreamTypeException(String message) { - super(message); - } - -} \ No newline at end of file diff --git a/src/main/java/ch/cern/nile/common/exceptions/MissingPropertyException.java b/src/main/java/ch/cern/nile/common/exceptions/MissingPropertyException.java index 1765166..41d6006 100644 --- a/src/main/java/ch/cern/nile/common/exceptions/MissingPropertyException.java +++ b/src/main/java/ch/cern/nile/common/exceptions/MissingPropertyException.java @@ -2,8 +2,8 @@ package ch.cern.nile.common.exceptions; public class MissingPropertyException extends RuntimeException { - public MissingPropertyException(String message) { - super(message); - } + public MissingPropertyException(String message) { + super(message); + } -} \ No newline at end of file +} diff --git a/src/main/java/ch/cern/nile/common/exceptions/ReverseDnsLookupException.java b/src/main/java/ch/cern/nile/common/exceptions/ReverseDnsLookupException.java index e97b759..b104a0a 100644 --- a/src/main/java/ch/cern/nile/common/exceptions/ReverseDnsLookupException.java +++ b/src/main/java/ch/cern/nile/common/exceptions/ReverseDnsLookupException.java @@ -2,8 +2,8 @@ package ch.cern.nile.common.exceptions; public class ReverseDnsLookupException extends RuntimeException { - public ReverseDnsLookupException(String message, Throwable cause) { - super(message, cause); - } + public ReverseDnsLookupException(String message, Throwable cause) { + super(message, cause); + } -} \ No newline at end of file +} diff --git a/src/main/java/ch/cern/nile/common/exceptions/UnknownStreamTypeException.java b/src/main/java/ch/cern/nile/common/exceptions/UnknownStreamTypeException.java index 4de9951..1748730 100644 --- a/src/main/java/ch/cern/nile/common/exceptions/UnknownStreamTypeException.java +++ b/src/main/java/ch/cern/nile/common/exceptions/UnknownStreamTypeException.java @@ -2,8 +2,8 @@ package ch.cern.nile.common.exceptions; public class UnknownStreamTypeException extends RuntimeException { - public UnknownStreamTypeException(String message) { - super(message); - } + public UnknownStreamTypeException(String message) { + super(message); + } -} \ No newline at end of file +} diff --git a/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java b/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java index d54fe36..f5203a7 100644 --- a/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java +++ b/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java @@ -1,50 +1,52 @@ package ch.cern.nile.common.json; -import com.google.gson.Gson; import java.nio.charset.StandardCharsets; import java.util.Map; + +import com.google.gson.Gson; + import org.apache.kafka.common.serialization.Deserializer; public class JsonPojoDeserializer<T> implements Deserializer<T> { - private static final Gson gson = new Gson(); - Class<T> tClass; + private static final Gson gson = new Gson(); + Class<T> tClass; - /** - * Default constructor needed by Kafka. - */ - public JsonPojoDeserializer() { - } + /** + * Default constructor needed by Kafka. + */ + public JsonPojoDeserializer() { + } - JsonPojoDeserializer(Class<T> clazz) { - this.tClass = clazz; - } + JsonPojoDeserializer(Class<T> clazz) { + this.tClass = clazz; + } - @Override - @SuppressWarnings("unchecked") - public void configure(Map<String, ?> props, boolean isKey) { - if (tClass == null) { - tClass = (Class<T>) props.get("JsonPOJOClass"); + @Override + @SuppressWarnings("unchecked") + public void configure(Map<String, ?> props, boolean isKey) { + if (tClass == null) { + tClass = (Class<T>) props.get("JsonPOJOClass"); + } } - } - - /** - * Deserialize the provided byte array into an object of type T. - * - * @param topic The topic associated with the data. - * @param bytes The byte array to be deserialized. - * @return The deserialized object of type T or null if the byte array is null. - */ - @Override - public T deserialize(String topic, byte[] bytes) { - if (bytes == null) { - return null; + + /** + * Deserialize the provided byte array into an object of type T. + * + * @param topic The topic associated with the data + * @param bytes The byte array to be deserialized + * @return The deserialized object of type T or null if the byte array is null + */ + @Override + public T deserialize(String topic, byte[] bytes) { + if (bytes == null) { + return null; + } + return gson.fromJson(new String(bytes, StandardCharsets.UTF_8), tClass); } - return gson.fromJson(new String(bytes, StandardCharsets.UTF_8), tClass); - } - @Override - public void close() { - } + @Override + public void close() { + } -} \ No newline at end of file +} diff --git a/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java b/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java index b11221e..dae6338 100644 --- a/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java +++ b/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java @@ -1,41 +1,43 @@ package ch.cern.nile.common.json; -import com.google.gson.Gson; import java.nio.charset.StandardCharsets; import java.util.Map; + +import com.google.gson.Gson; + import org.apache.kafka.common.serialization.Serializer; public class JsonPojoSerializer<T> implements Serializer<T> { - private static final Gson gson = new Gson(); - - /** - * Default constructor needed by Kafka. - */ - public JsonPojoSerializer() { - } - - @Override - public void configure(Map<String, ?> props, boolean isKey) { - } - - /** - * Serialize the provided data as a JSON string and convert it to bytes. - * - * @param topic The topic associated with the data. - * @param data The data to be serialized. - * @return The serialized data as bytes or null if the data is null. - */ - @Override - public byte[] serialize(String topic, T data) { - if (data == null) { - return null; + private static final Gson gson = new Gson(); + + /** + * Default constructor needed by Kafka. + */ + public JsonPojoSerializer() { } - return gson.toJson(data).getBytes(StandardCharsets.UTF_8); - } - @Override - public void close() { - } + @Override + public void configure(Map<String, ?> props, boolean isKey) { + } + + /** + * Serialize the provided data as a JSON string and convert it to bytes. + * + * @param topic The topic associated with the data. + * @param data The data to be serialized. + * @return The serialized data as bytes or null if the data is null. + */ + @Override + public byte[] serialize(String topic, T data) { + if (data == null) { + return null; + } + return gson.toJson(data).getBytes(StandardCharsets.UTF_8); + } + + @Override + public void close() { + } } diff --git a/src/main/java/ch/cern/nile/common/json/JsonSerde.java b/src/main/java/ch/cern/nile/common/json/JsonSerde.java index 1afd583..1d8fcc9 100644 --- a/src/main/java/ch/cern/nile/common/json/JsonSerde.java +++ b/src/main/java/ch/cern/nile/common/json/JsonSerde.java @@ -1,34 +1,36 @@ package ch.cern.nile.common.json; -import com.google.gson.JsonObject; import java.util.Map; + +import com.google.gson.JsonObject; + import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serializer; public class JsonSerde implements Serde<JsonObject> { - final JsonPojoSerializer<JsonObject> serializer = new JsonPojoSerializer<>(); - final JsonPojoDeserializer<JsonObject> deserializer = new JsonPojoDeserializer<>(JsonObject.class); - - @Override - public void configure(Map<String, ?> configs, boolean isKey) { - serializer.configure(configs, isKey); - deserializer.configure(configs, isKey); - } - - @Override - public void close() { - serializer.close(); - deserializer.close(); - } - - @Override - public Serializer<JsonObject> serializer() { - return serializer; - } - - @Override - public Deserializer<JsonObject> deserializer() { - return deserializer; - } + final JsonPojoSerializer<JsonObject> serializer = new JsonPojoSerializer<>(); + final JsonPojoDeserializer<JsonObject> deserializer = new JsonPojoDeserializer<>(JsonObject.class); + + @Override + public void configure(Map<String, ?> configs, boolean isKey) { + serializer.configure(configs, isKey); + deserializer.configure(configs, isKey); + } + + @Override + public void close() { + serializer.close(); + deserializer.close(); + } + + @Override + public Serializer<JsonObject> serializer() { + return serializer; + } + + @Override + public Deserializer<JsonObject> deserializer() { + return deserializer; + } } diff --git a/src/main/java/ch/cern/nile/common/models/Application.java b/src/main/java/ch/cern/nile/common/models/Application.java index b91c0af..ed573ca 100644 --- a/src/main/java/ch/cern/nile/common/models/Application.java +++ b/src/main/java/ch/cern/nile/common/models/Application.java @@ -11,7 +11,7 @@ import lombok.ToString; @ToString public class Application { - private String name; - private Topic topic; + private String name; + private Topic topic; } diff --git a/src/main/java/ch/cern/nile/common/models/Topic.java b/src/main/java/ch/cern/nile/common/models/Topic.java index a65ec39..cfe06db 100644 --- a/src/main/java/ch/cern/nile/common/models/Topic.java +++ b/src/main/java/ch/cern/nile/common/models/Topic.java @@ -11,6 +11,6 @@ import lombok.ToString; @ToString public class Topic { - private String name; + private String name; } diff --git a/src/main/java/ch/cern/nile/common/probes/Health.java b/src/main/java/ch/cern/nile/common/probes/Health.java index 532cba0..5dff849 100644 --- a/src/main/java/ch/cern/nile/common/probes/Health.java +++ b/src/main/java/ch/cern/nile/common/probes/Health.java @@ -1,45 +1,74 @@ package ch.cern.nile.common.probes; -import com.sun.net.httpserver.HttpServer; import java.io.IOException; import java.net.InetSocketAddress; + +import com.sun.net.httpserver.HttpServer; + import org.apache.kafka.streams.KafkaStreams; +/** + * A simple HTTP server that responds to health checks with a 200 if the KafkaStreams instance is running, + * or a 500 if it is not running. + */ public class Health { - private static final int OK = 200; - private static final int ERROR = 500; - private static final int PORT = 8899; - - private final KafkaStreams streams; - private HttpServer server; - - public Health(KafkaStreams streams) { - this.streams = streams; - } - - /** - * Start the Health http server. - */ - public void start() { - try { - server = HttpServer.create(new InetSocketAddress(PORT), 0); - } catch (IOException ioe) { - throw new RuntimeException("Could not setup http server: ", ioe); + private static final int OK = 200; + private static final int ERROR = 500; + private static final int PORT = 8899; + + private final KafkaStreams streams; + private HttpServer server; + private final HttpServerFactory httpServerFactory; + + /** + * Creates a new Health instance that will respond to health checks on port 8899. + * + * @param streams the KafkaStreams instance to check the state of + */ + public Health(KafkaStreams streams) { + this(streams, new DefaultHttpServerFactory()); } - server.createContext("/health", exchange -> { - int responseCode = streams.state().isRunning() ? OK : ERROR; - exchange.sendResponseHeaders(responseCode, 0); - exchange.close(); - }); - server.start(); - } - - /** - * Stops the Health HTTP server. - */ - public void stop() { - server.stop(0); - } + /** + * Creates a new Health instance that will respond to health checks on port 8899. To be used for testing. + * + * @param streams the KafkaStreams instance to check the state of + * @param httpServerFactory the factory to use to create the HttpServer instance + */ + public Health(KafkaStreams streams, HttpServerFactory httpServerFactory) { + this.streams = streams; + this.httpServerFactory = httpServerFactory; + } + + /** + * Start the Health http server. + */ + public void start() { + try { + server = httpServerFactory.createHttpServer(new InetSocketAddress(PORT), 0); + } catch (IOException ioe) { + throw new RuntimeException("Could not setup http server: ", ioe); + } + server.createContext("/health", exchange -> { + int responseCode = streams.state().isRunning() ? OK : ERROR; + exchange.sendResponseHeaders(responseCode, 0); + exchange.close(); + }); + server.start(); + } + + /** + * Stops the Health HTTP server. + */ + public void stop() { + server.stop(0); + } + + private static class DefaultHttpServerFactory implements HttpServerFactory { + @Override + public HttpServer createHttpServer(InetSocketAddress address, int backlog) throws IOException { + return HttpServer.create(address, backlog); + } + } } diff --git a/src/main/java/ch/cern/nile/common/probes/HttpServerFactory.java b/src/main/java/ch/cern/nile/common/probes/HttpServerFactory.java new file mode 100644 index 0000000..4744b2e --- /dev/null +++ b/src/main/java/ch/cern/nile/common/probes/HttpServerFactory.java @@ -0,0 +1,13 @@ +package ch.cern.nile.common.probes; + +import java.io.IOException; +import java.net.InetSocketAddress; + +import com.sun.net.httpserver.HttpServer; + +/** + * Factory for creating HttpServer instances. Used to allow mocking of HttpServer in tests. + */ +public interface HttpServerFactory { + HttpServer createHttpServer(InetSocketAddress address, int backlog) throws IOException; +} diff --git a/src/main/java/ch/cern/nile/common/schema/JsonType.java b/src/main/java/ch/cern/nile/common/schema/JsonType.java index f157543..d2a4fc0 100644 --- a/src/main/java/ch/cern/nile/common/schema/JsonType.java +++ b/src/main/java/ch/cern/nile/common/schema/JsonType.java @@ -1,41 +1,36 @@ package ch.cern.nile.common.schema; +import lombok.Getter; + import java.util.Date; +@Getter enum JsonType { - BYTE(Byte.class, "int8"), - SHORT(Short.class, "int16"), - INTEGER(Integer.class, "int32"), - LONG(Long.class, "int64"), - FLOAT(Float.class, "float"), - DOUBLE(Double.class, "double"), - BOOLEAN(Boolean.class, "boolean"), - STRING(String.class, "string"), - DATE(Date.class, "int64"), - BYTE_ARRAY(byte[].class, "bytes"); - - private final Class<?> clazz; - private final String type; + BYTE(Byte.class, "int8"), + SHORT(Short.class, "int16"), + INTEGER(Integer.class, "int32"), + LONG(Long.class, "int64"), + FLOAT(Float.class, "float"), + DOUBLE(Double.class, "double"), + BOOLEAN(Boolean.class, "boolean"), + STRING(String.class, "string"), + DATE(Date.class, "int64"), + BYTE_ARRAY(byte[].class, "bytes"); - JsonType(Class<?> clazz, String type) { - this.clazz = clazz; - this.type = type; - } + private final Class<?> clazz; + private final String type; - public Class<?> getClazz() { - return clazz; - } - - public String getType() { - return type; - } + JsonType(Class<?> clazz, String type) { + this.clazz = clazz; + this.type = type; + } - public static JsonType fromClass(Class<?> clazz) { - for (JsonType jsonType : JsonType.values()) { - if (jsonType.getClazz().equals(clazz)) { - return jsonType; - } + public static JsonType fromClass(Class<?> clazz) { + for (JsonType jsonType : JsonType.values()) { + if (jsonType.getClazz().equals(clazz)) { + return jsonType; + } + } + throw new IllegalArgumentException("Unsupported class: " + clazz.getSimpleName()); } - throw new IllegalArgumentException("Unsupported class: " + clazz.getSimpleName()); - } } diff --git a/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java b/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java index be2b36e..c48f319 100644 --- a/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java +++ b/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java @@ -7,72 +7,72 @@ import java.util.stream.Collectors; public final class SchemaInjector { - private SchemaInjector() { - } - - /** - * Injects a Connect schema into the given data. - * - * @param data Data to inject the schema into. - * @return Data with the schema injected. - */ - public static Map<String, Object> inject(Map<String, Object> data) { - Map<String, Object> dataCopy = new HashMap<>(data); - Map<String, Object> schemaMap = generateSchemaMap(dataCopy); - - Map<String, Object> result = new HashMap<>(); - result.put("schema", schemaMap); - result.put("payload", dataCopy); + private SchemaInjector() { + } - return result; - } + /** + * Injects a Connect schema into the given data. + * + * @param data Data to inject the schema into + * @return Data with the schema injected + */ + public static Map<String, Object> inject(Map<String, Object> data) { + Map<String, Object> dataCopy = new HashMap<>(data); + Map<String, Object> schemaMap = generateSchemaMap(dataCopy); + + Map<String, Object> result = new HashMap<>(); + result.put("schema", schemaMap); + result.put("payload", dataCopy); + + return result; + } - private static Map<String, Object> generateSchemaMap(Map<String, Object> data) { - Map<String, Object> schemaMap = new HashMap<>(); - schemaMap.put("type", "struct"); - schemaMap.put("fields", generateFieldMaps(data)); + private static Map<String, Object> generateSchemaMap(Map<String, Object> data) { + Map<String, Object> schemaMap = new HashMap<>(); + schemaMap.put("type", "struct"); + schemaMap.put("fields", generateFieldMaps(data)); - return schemaMap; - } + return schemaMap; + } - private static Iterable<Map<String, Object>> generateFieldMaps(Map<String, Object> data) { - return data.entrySet().stream().map(SchemaInjector::generateFieldMap).collect(Collectors.toList()); - } + private static Iterable<Map<String, Object>> generateFieldMaps(Map<String, Object> data) { + return data.entrySet().stream().map(SchemaInjector::generateFieldMap).collect(Collectors.toList()); + } - private static Map<String, Object> generateFieldMap(Map.Entry<String, Object> entry) { - Map<String, Object> fieldMap = new HashMap<>(); - String key = entry.getKey(); - Object value = entry.getValue(); + private static Map<String, Object> generateFieldMap(Map.Entry<String, Object> entry) { + Map<String, Object> fieldMap = new HashMap<>(); + String key = entry.getKey(); + Object value = entry.getValue(); - validateValue(value); + validateValue(value); - JsonType type = JsonType.fromClass(value.getClass()); + JsonType type = JsonType.fromClass(value.getClass()); - fieldMap.put("field", key); - fieldMap.put("type", type.getType()); - fieldMap.put("optional", !key.toLowerCase().contains("timestamp")); + fieldMap.put("field", key); + fieldMap.put("type", type.getType()); + fieldMap.put("optional", !key.toLowerCase().contains("timestamp")); - addTimestampAndDateFields(fieldMap, key, type); + addTimestampAndDateFields(fieldMap, key, type); - return fieldMap; - } + return fieldMap; + } - private static void validateValue(Object value) { - if (value == null) { - throw new IllegalArgumentException("Null values are not allowed in the data map."); + private static void validateValue(Object value) { + if (value == null) { + throw new IllegalArgumentException("Null values are not allowed in the data map."); + } } - } - - private static void addTimestampAndDateFields(Map<String, Object> fieldMap, String key, JsonType type) { - boolean isTimestampField = key.toLowerCase().contains("timestamp"); - boolean isDateType = type.getClazz().equals(Date.class); - - if (isTimestampField) { - fieldMap.put("name", "org.apache.kafka.connect.data.Timestamp"); - fieldMap.put("version", 1); - } else if (isDateType) { - fieldMap.put("name", "org.apache.kafka.connect.data.Date"); - fieldMap.put("version", 1); + + private static void addTimestampAndDateFields(Map<String, Object> fieldMap, String key, JsonType type) { + boolean isTimestampField = key.toLowerCase().contains("timestamp"); + boolean isDateType = type.getClazz().equals(Date.class); + + if (isTimestampField) { + fieldMap.put("name", "org.apache.kafka.connect.data.Timestamp"); + fieldMap.put("version", 1); + } else if (isDateType) { + fieldMap.put("name", "org.apache.kafka.connect.data.Date"); + fieldMap.put("version", 1); + } } - } } diff --git a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java index 8ba7298..9ade1ba 100644 --- a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java +++ b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java @@ -1,144 +1,209 @@ package ch.cern.nile.common.streams; -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; import java.time.DateTimeException; import java.time.Instant; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.CountDownLatch; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.Topology; import org.apache.kafka.streams.kstream.ValueTransformer; import org.apache.kafka.streams.processor.ProcessorContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import ch.cern.nile.common.clients.KafkaStreamsClient; import ch.cern.nile.common.configs.StreamConfig; import ch.cern.nile.common.probes.Health; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; + +import lombok.Getter; +import lombok.Setter; public abstract class AbstractStream implements Streaming { - private final Logger LOGGER = LoggerFactory.getLogger(getClass()); - - KafkaStreams streams; - protected Properties configs; - protected String sourceTopic; - protected String sinkTopic; - protected long lastReadOffset = -2; - private Health health; - private CountDownLatch latch; - - @Override - public void configure(Properties configs) { - this.configs = configs; - } - - @Override - public void stream(KafkaStreamsClient kafkaStreamsClient) { - init(kafkaStreamsClient); - Runtime.getRuntime().addShutdownHook(new Thread(this::shutDown, "streams-shutdown-hook")); - start(); - System.exit(0); - } - - public String getProperty(String key) { - return configs.getProperty(key); - } - - protected static void addTimestamp(JsonArray gatewayInfo, Map<String, Object> map) throws DateTimeException { - final String timestampKey = "timestamp"; - final String timeKey = "time"; - - for (JsonElement element : gatewayInfo) { - if (element.isJsonObject()) { - JsonObject entry = element.getAsJsonObject(); - if (entry.has(timeKey)) { - map.put(timestampKey, Instant.parse(entry.get(timeKey).getAsString()).toEpochMilli()); - break; - } - } - } - if (!map.containsKey(timestampKey)) { - throw new DateTimeException(String.format("No '%s' field found in gateway info (dropping the message): %s", timeKey, gatewayInfo)); - } - } + private final Logger LOGGER = LoggerFactory.getLogger(getClass()); + + @Getter + private KafkaStreams streams; + @Getter + @Setter + private String sourceTopic; - protected static boolean filterNull(String k, Object v) { - return v != null; - } + @Getter + @Setter + private String sinkTopic; - protected static boolean filterEmpty(String k, Object v) { - return !(v instanceof List && ((List<?>) v).isEmpty()); - } + @Getter + @Setter + private long lastReadOffset = -2; + + private Properties configs; + private Health health; + private CountDownLatch latch; + + /** + * Configure the stream with the given properties. + * + * @param configs the properties to configure the stream with + */ + @Override + public void configure(final Properties configs) { + this.configs = configs; + } + + /** + * Start the stream. + * + * @param kafkaStreamsClient the client to use to create the stream + */ + @Override + public void stream(final KafkaStreamsClient kafkaStreamsClient) { + init(kafkaStreamsClient); + Runtime.getRuntime().addShutdownHook(new Thread(this::shutDown, "streams-shutdown-hook")); + start(); + System.exit(0); + } - protected boolean filterRecord(String k, JsonObject v) { - return v != null && v.get("applicationID") != null && v.get("applicationName") != null && v.get("deviceName") != null && v.get("devEUI") != null - && v.get("data") != null; - } + /** + * Get a property from the stream's configuration. + * + * @param key the key of the property to get + * @return the value of the property + */ + public String getProperty(final String key) { + return configs.getProperty(key); + } - protected void logStreamsException(Exception e) { - LOGGER.warn(String.format("Error reading from topic %s. Last read offset %s:", sourceTopic, lastReadOffset), e); - if (streams != null) { - LOGGER.info(String.format("Streams state is: %s", streams.state().toString())); + protected static void addTimestamp(final JsonArray gatewayInfo, final Map<String, Object> map) + throws DateTimeException { + final String timestampKey = "timestamp"; + final String timeKey = "time"; + + for (JsonElement element : gatewayInfo) { + if (element.isJsonObject()) { + JsonObject entry = element.getAsJsonObject(); + if (entry.has(timeKey)) { + map.put(timestampKey, Instant.parse(entry.get(timeKey).getAsString()).toEpochMilli()); + break; + } + } + } + if (!map.containsKey(timestampKey)) { + throw new DateTimeException( + String.format("No '%s' field found in gateway info (dropping the message): %s", timeKey, + gatewayInfo)); + } } - } - - public abstract void createTopology(StreamsBuilder builder); - - private void init(KafkaStreamsClient kafkaStreamsClient) { - final StreamsBuilder builder = new StreamsBuilder(); - sourceTopic = configs.getProperty(StreamConfig.ClientProperties.SOURCE_TOPIC.getValue()); - sinkTopic = configs.getProperty(StreamConfig.DecodingProperties.SINK_TOPIC.getValue()); - createTopology(builder); - final Topology topology = builder.build(); - streams = kafkaStreamsClient.create(topology); - health = new Health(streams); - latch = new CountDownLatch(1); - } - - private void start() { - LOGGER.info("Starting streams..."); - try { - streams.start(); - health.start(); - latch.await(); - } catch (Exception e) { - LOGGER.error("Could not start streams.", e); - System.exit(1); + + + protected static boolean filterNull(final String k, final Object v) { + return v != null; } - } - private void shutDown() { - LOGGER.info("Shutting down streams..."); - streams.close(); - health.stop(); - latch.countDown(); - } + protected static boolean filterEmpty(final String k, final Object v) { + if (v instanceof List) { + return !((List<?>) v).isEmpty(); + } else if (v instanceof Map) { + return !((Map<?, ?>) v).isEmpty(); + } + return false; + } - public static class InjectOffsetTransformer implements ValueTransformer<JsonObject, JsonObject> { + /** + * Filter out records that do not have the required fields. + * + * @param k the key + * @param v the value + * @return true if the record has the required fields, false otherwise + */ + protected boolean filterRecord(String k, JsonObject v) { + return v != null && v.get("applicationID") != null && v.get("applicationName") != null && + v.get("deviceName") != null && v.get("devEUI") != null + && v.get("data") != null; + } - private ProcessorContext context; + /** + * Log an exception that occurred while reading from the source topic. + * + * @param e the exception + */ + protected void logStreamsException(Exception e) { + LOGGER.warn(String.format("Error reading from topic %s. Last read offset %s:", sourceTopic, lastReadOffset), e); + if (streams != null) { + LOGGER.info(String.format("Streams state is: %s", streams.state().toString())); + } + } - @Override - public void init(ProcessorContext context) { - this.context = context; + public abstract void createTopology(StreamsBuilder builder); + + private void init(KafkaStreamsClient kafkaStreamsClient) { + final StreamsBuilder builder = new StreamsBuilder(); + sourceTopic = configs.getProperty(StreamConfig.ClientProperties.SOURCE_TOPIC.getValue()); + sinkTopic = configs.getProperty(StreamConfig.DecodingProperties.SINK_TOPIC.getValue()); + createTopology(builder); + final Topology topology = builder.build(); + streams = kafkaStreamsClient.create(topology); + health = new Health(streams); + latch = new CountDownLatch(1); } - @Override - public JsonObject transform(JsonObject value) { - value.addProperty("offset", context.offset()); - return value; + private void start() { + LOGGER.info("Starting streams..."); + try { + streams.start(); + health.start(); + latch.await(); + } catch (Exception e) { + LOGGER.error("Could not start streams.", e); + System.exit(1); + } } - @Override - public void close() { + private void shutDown() { + LOGGER.info("Shutting down streams..."); + streams.close(); + health.stop(); + latch.countDown(); } - } + public static class InjectOffsetTransformer implements ValueTransformer<JsonObject, JsonObject> { + + private ProcessorContext context; + + /** + * Initialize this transformer. + * + * @param context the context of this processor + */ + @Override + public void init(final ProcessorContext context) { + this.context = context; + } + + /** + * Transform the given value. + * + * @param value the value to be transformed + * @return the transformed value + */ + @Override + public JsonObject transform(final JsonObject value) { + value.addProperty("offset", context.offset()); + return value; + } + + @Override + public void close() { + } + + } } diff --git a/src/main/java/ch/cern/nile/common/streams/Streaming.java b/src/main/java/ch/cern/nile/common/streams/Streaming.java index 1aa171e..36fb7a3 100644 --- a/src/main/java/ch/cern/nile/common/streams/Streaming.java +++ b/src/main/java/ch/cern/nile/common/streams/Streaming.java @@ -1,10 +1,10 @@ package ch.cern.nile.common.streams; -import ch.cern.nile.common.configs.Configure; import ch.cern.nile.common.clients.KafkaStreamsClient; +import ch.cern.nile.common.configs.Configure; public interface Streaming extends Configure { - void stream(KafkaStreamsClient kafkaStreamsClient); + void stream(KafkaStreamsClient kafkaStreamsClient); } diff --git a/src/main/resources/log4j.properties b/src/main/resources/log4j.properties index 9ba3a46..a5459ae 100644 --- a/src/main/resources/log4j.properties +++ b/src/main/resources/log4j.properties @@ -1,8 +1,7 @@ # Root logger option log4j.rootLogger=INFO, stdout - # Direct log messages to stdout log4j.appender.stdout=org.apache.log4j.ConsoleAppender log4j.appender.stdout.Target=System.out log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1} - %m%n \ No newline at end of file +log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1} - %m%n diff --git a/src/test/java/ch/cern/nile/common/clients/KafkaStreamsClientTest.java b/src/test/java/ch/cern/nile/common/clients/KafkaStreamsClientTest.java new file mode 100644 index 0000000..5c5c63d --- /dev/null +++ b/src/test/java/ch/cern/nile/common/clients/KafkaStreamsClientTest.java @@ -0,0 +1,115 @@ +package ch.cern.nile.common.clients; + +import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.net.UnknownHostException; +import java.util.Properties; + +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.Topology; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; + +import ch.cern.nile.common.configs.StreamConfig; +import ch.cern.nile.common.exceptions.ReverseDnsLookupException; + +class KafkaStreamsClientTest { + + private KafkaStreamsClient client; + private Properties properties; + private Topology topology; + + @Mock + private KafkaStreams kafkaStreams; + + private AutoCloseable closeable; + + + @BeforeEach + public void setup() { + closeable = MockitoAnnotations.openMocks(this); + client = new KafkaStreamsClient() { + + @Override + @SuppressWarnings("checkstyle:HiddenField") + public KafkaStreams create(Topology topology) { + return kafkaStreams; + } + + @Override + protected String performDnsLookup(String kafkaCluster) throws UnknownHostException { + if (kafkaCluster.equals("invalidCluster")) { + throw new UnknownHostException("Invalid cluster"); + } + return "localhost:9092"; + } + }; + properties = new Properties(); + topology = Mockito.mock(Topology.class); + } + + @AfterEach + public void tearDown() throws Exception { + closeable.close(); + } + + @Test + void givenNonTestCluster_whenConfigure_thenKafkaStreamsCreated() { + properties.setProperty(StreamConfig.ClientProperties.CLIENT_ID.getValue(), "testClientId"); + properties.setProperty(StreamConfig.ClientProperties.KAFKA_CLUSTER.getValue(), "nonTestCluster"); + properties.setProperty(StreamConfig.ClientProperties.TRUSTSTORE_LOCATION.getValue(), "/path/to/truststore"); + properties.setProperty(StreamsConfig.SECURITY_PROTOCOL_CONFIG, "PLAINTEXT"); + + client.configure(properties); + + KafkaStreams streams = client.create(topology); + assertNotNull(streams, "KafkaStreams object should not be null"); + } + + @Test + void givenTestCluster_whenConfigure_thenKafkaStreamsCreated() { + properties.setProperty(StreamConfig.ClientProperties.CLIENT_ID.getValue(), "testClientId"); + properties.setProperty(StreamConfig.ClientProperties.KAFKA_CLUSTER.getValue(), "test"); + properties.setProperty("bootstrap.servers", "localhost:9092"); + + client.configure(properties); + + KafkaStreams streams = client.create(topology); + assertNotNull(streams, "KafkaStreams object should not be null"); + } + + @Test + void givenInvalidCluster_whenConfigure_thenReverseDnsLookupExceptionThrown() { + properties.setProperty(StreamConfig.ClientProperties.CLIENT_ID.getValue(), "testClientId"); + properties.setProperty(StreamConfig.ClientProperties.KAFKA_CLUSTER.getValue(), "invalidCluster"); + + assertThrows(ReverseDnsLookupException.class, () -> client.configure(properties), + "Should throw ReverseDnsLookupException"); + } + + @Test + void givenKnownDomain_whenPerformDnsLookup_thenResultContainsPort9093() throws UnknownHostException { + String domain = "www.google.com"; + String result = new KafkaStreamsClient().performDnsLookup(domain); + + assertNotNull(result, "Result should not be null"); + assertTrue("Result should contain port 9093", result.contains(":9093")); + } + + @Test + void givenLocalhost_whenPerformDnsLookup_thenResultContainsPort9093() throws UnknownHostException { + String domain = "localhost"; + String result = new KafkaStreamsClient().performDnsLookup(domain); + + assertNotNull(result); + assertTrue("Result should contain port 9093", result.contains(":9093")); + } + +} diff --git a/src/test/java/ch/cern/nile/common/configs/PropertiesCheckTest.java b/src/test/java/ch/cern/nile/common/configs/PropertiesCheckTest.java index 523fa22..2c50497 100644 --- a/src/test/java/ch/cern/nile/common/configs/PropertiesCheckTest.java +++ b/src/test/java/ch/cern/nile/common/configs/PropertiesCheckTest.java @@ -1,58 +1,81 @@ package ch.cern.nile.common.configs; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.Properties; import org.junit.jupiter.api.Test; +import ch.cern.nile.common.exceptions.MissingPropertyException; + class PropertiesCheckTest { - @Test - void validateProperties_ThrowsRuntimeException_forIllegalArguments() { - final Properties properties = new Properties(); - - assertThrows(RuntimeException.class, - () -> PropertiesCheck.validateProperties(null, StreamType.ROUTING), - "Properties object cannot be null"); - - assertThrows(RuntimeException.class, - () -> PropertiesCheck.validateProperties(properties, null), - "Properties file is missing stream.type property"); - } - - @Test - void validateProperties_PassesValidation_forDecoding() { - final Properties properties = new Properties(); - initClientAndCommonProperties(properties); - properties.put(StreamConfig.DecodingProperties.SINK_TOPIC.getValue(), ""); - PropertiesCheck.validateProperties(properties, StreamType.DECODING); - } - - @Test - void validateProperties_PassesValidation_forRouting() { - final Properties properties = new Properties(); - initClientAndCommonProperties(properties); - properties.put(StreamConfig.RoutingProperties.ROUTING_CONFIG_PATH.getValue(), ""); - properties.put(StreamConfig.RoutingProperties.DLQ_TOPIC.getValue(), ""); - PropertiesCheck.validateProperties(properties, StreamType.ROUTING); - } - - @Test - void validateProperties_PassesValidation_forEnrichment() { - final Properties properties = new Properties(); - initClientAndCommonProperties(properties); - properties.put(StreamConfig.EnrichmentProperties.ENRICHMENT_CONFIG_PATH.getValue(), ""); - properties.put(StreamConfig.EnrichmentProperties.SINK_TOPIC.getValue(), ""); - PropertiesCheck.validateProperties(properties, StreamType.ENRICHMENT); - } - - private void initClientAndCommonProperties(Properties properties) { - properties.put(StreamConfig.ClientProperties.CLIENT_ID.getValue(), ""); - properties.put(StreamConfig.ClientProperties.KAFKA_CLUSTER.getValue(), ""); - properties.put(StreamConfig.ClientProperties.SOURCE_TOPIC.getValue(), ""); - properties.put(StreamConfig.ClientProperties.TRUSTSTORE_LOCATION.getValue(), ""); - properties.put(StreamConfig.CommonProperties.STREAM_CLASS.getValue(), ""); - properties.put(StreamConfig.CommonProperties.STREAM_TYPE.getValue(), ""); - } + @Test + void givenNullProperties_whenValidateProperties_thenThrowsRuntimeException() { + assertThrows(RuntimeException.class, () -> PropertiesCheck.validateProperties(null, StreamType.ROUTING), + "Properties object cannot be null"); + } + + @Test + void givenNullStreamType_whenValidateProperties_thenThrowsRuntimeException() { + final Properties properties = new Properties(); + + assertThrows(RuntimeException.class, () -> PropertiesCheck.validateProperties(properties, null), + "Properties file is missing stream.type property"); + } + + @Test + void givenValidDecodingProperties_whenValidateProperties_thenPassesValidation() { + final Properties properties = new Properties(); + initClientAndCommonProperties(properties); + properties.put(StreamConfig.DecodingProperties.SINK_TOPIC.getValue(), ""); + + assertDoesNotThrow(() -> PropertiesCheck.validateProperties(properties, StreamType.DECODING), + "Should not throw exception"); + } + + @Test + void givenValidRoutingProperties_whenValidateProperties_thenPassesValidation() { + final Properties properties = new Properties(); + initClientAndCommonProperties(properties); + properties.put(StreamConfig.RoutingProperties.ROUTING_CONFIG_PATH.getValue(), ""); + properties.put(StreamConfig.RoutingProperties.DLQ_TOPIC.getValue(), ""); + + assertDoesNotThrow(() -> PropertiesCheck.validateProperties(properties, StreamType.ROUTING), + "Should not throw exception"); + } + + @Test + void givenValidEnrichmentProperties_whenValidateProperties_thenPassesValidation() { + final Properties properties = new Properties(); + initClientAndCommonProperties(properties); + properties.put(StreamConfig.EnrichmentProperties.ENRICHMENT_CONFIG_PATH.getValue(), ""); + properties.put(StreamConfig.EnrichmentProperties.SINK_TOPIC.getValue(), ""); + + assertDoesNotThrow(() -> PropertiesCheck.validateProperties(properties, StreamType.ENRICHMENT), + "Should not throw exception"); + } + + @Test + void givenMissingRequiredProperty_whenValidateProperties_thenThrowsMissingPropertyException() { + final Properties properties = new Properties(); + initClientAndCommonProperties(properties); + // Remove a required property for routing, for example + properties.remove(StreamConfig.RoutingProperties.ROUTING_CONFIG_PATH.getValue()); + + assertThrows(MissingPropertyException.class, + () -> PropertiesCheck.validateProperties(properties, StreamType.ROUTING), + "Properties file is missing: routing.config.path property."); + } + + private void initClientAndCommonProperties(Properties properties) { + properties.put(StreamConfig.ClientProperties.CLIENT_ID.getValue(), ""); + properties.put(StreamConfig.ClientProperties.KAFKA_CLUSTER.getValue(), ""); + properties.put(StreamConfig.ClientProperties.SOURCE_TOPIC.getValue(), ""); + properties.put(StreamConfig.ClientProperties.TRUSTSTORE_LOCATION.getValue(), ""); + properties.put(StreamConfig.CommonProperties.STREAM_CLASS.getValue(), ""); + properties.put(StreamConfig.CommonProperties.STREAM_TYPE.getValue(), ""); + } + } diff --git a/src/test/java/ch/cern/nile/common/configs/StreamConfigTest.java b/src/test/java/ch/cern/nile/common/configs/StreamConfigTest.java index e2a7674..728fa31 100644 --- a/src/test/java/ch/cern/nile/common/configs/StreamConfigTest.java +++ b/src/test/java/ch/cern/nile/common/configs/StreamConfigTest.java @@ -4,43 +4,80 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.Set; + import org.junit.jupiter.api.Test; class StreamConfigTest { - @Test - void testClientProperties() { - Set<String> expectedConfigs = Set.of("source.topic", "kafka.cluster", "client.id", "truststore.location"); - assertEquals(expectedConfigs, StreamConfig.ClientProperties.getValues()); - assertThrows(IllegalArgumentException.class, () -> StreamConfig.ClientProperties.valueOf("unknown.property")); - } - - @Test - void testCommonProperties() { - Set<String> expectedConfigs = Set.of("stream.type", "stream.class"); - assertEquals(expectedConfigs, StreamConfig.CommonProperties.getValues()); - assertThrows(IllegalArgumentException.class, () -> StreamConfig.CommonProperties.valueOf("unknown.property")); - } - - @Test - void testDecodingProperties() { - Set<String> expectedConfigs = Set.of("sink.topic"); - assertEquals(expectedConfigs, StreamConfig.DecodingProperties.getValues()); - assertThrows(IllegalArgumentException.class, () -> StreamConfig.DecodingProperties.valueOf("unknown.property")); - } - - @Test - void testRoutingProperties() { - Set<String> expectedConfigs = Set.of("routing.config.path", "dlq.topic"); - assertEquals(expectedConfigs, StreamConfig.RoutingProperties.getValues()); - assertThrows(IllegalArgumentException.class, () -> StreamConfig.RoutingProperties.valueOf("unknown.property")); - } - - @Test - void testEnrichmentProperties() { - Set<String> expectedConfigs = Set.of("enrichment.config.path", "sink.topic"); - assertEquals(expectedConfigs, StreamConfig.EnrichmentProperties.getValues()); - assertThrows(IllegalArgumentException.class, () -> StreamConfig.EnrichmentProperties.valueOf("unknown.property")); - } + @Test + void givenClientPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { + Set<String> expectedConfigs = Set.of("source.topic", "kafka.cluster", "client.id", "truststore.location"); + Set<String> actualConfigs = StreamConfig.ClientProperties.getValues(); + + assertEquals(expectedConfigs, actualConfigs, "Should return expected set of configs"); + } + + @Test + void givenClientPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { + assertThrows(IllegalArgumentException.class, () -> StreamConfig.ClientProperties.valueOf("unknown.property"), + "Should throw IllegalArgumentException"); + } + + @Test + void givenCommonPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { + Set<String> expectedConfigs = Set.of("stream.type", "stream.class"); + Set<String> actualConfigs = StreamConfig.CommonProperties.getValues(); + + assertEquals(expectedConfigs, actualConfigs, "Should return expected set of configs"); + } + + @Test + void givenCommonPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { + assertThrows(IllegalArgumentException.class, () -> StreamConfig.CommonProperties.valueOf("unknown.property"), + "Should throw IllegalArgumentException"); + } + + @Test + void givenDecodingPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { + Set<String> expectedConfigs = Set.of("sink.topic"); + Set<String> actualConfigs = StreamConfig.DecodingProperties.getValues(); + + assertEquals(expectedConfigs, actualConfigs, "Should return expected set of configs"); + } + + @Test + void givenDecodingPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { + assertThrows(IllegalArgumentException.class, () -> StreamConfig.DecodingProperties.valueOf("unknown.property"), + "Should throw IllegalArgumentException"); + } + + @Test + void givenRoutingPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { + Set<String> expectedConfigs = Set.of("routing.config.path", "dlq.topic"); + Set<String> actualConfigs = StreamConfig.RoutingProperties.getValues(); + + assertEquals(expectedConfigs, actualConfigs, "Should return expected set of configs"); + } + + @Test + void givenRoutingPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { + assertThrows(IllegalArgumentException.class, () -> StreamConfig.RoutingProperties.valueOf("unknown.property"), + "Should throw IllegalArgumentException"); + } + + @Test + void givenEnrichmentPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { + Set<String> expectedConfigs = Set.of("enrichment.config.path", "sink.topic"); + Set<String> actualConfigs = StreamConfig.EnrichmentProperties.getValues(); + + assertEquals(expectedConfigs, actualConfigs, "Should return expected set of configs"); + } + + @Test + void givenEnrichmentPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { + assertThrows(IllegalArgumentException.class, + () -> StreamConfig.EnrichmentProperties.valueOf("unknown.property"), + "Should throw IllegalArgumentException"); + } } diff --git a/src/test/java/ch/cern/nile/common/configs/StreamTypeTest.java b/src/test/java/ch/cern/nile/common/configs/StreamTypeTest.java index 5dd6bf9..974c7d2 100644 --- a/src/test/java/ch/cern/nile/common/configs/StreamTypeTest.java +++ b/src/test/java/ch/cern/nile/common/configs/StreamTypeTest.java @@ -7,23 +7,29 @@ import org.junit.jupiter.api.Test; class StreamTypeTest { - @Test - void findByValue_MapsToRouting_forValueRouting() { - assertEquals(StreamType.ROUTING, StreamType.valueOf("ROUTING")); - } - - @Test - void findByValue_MapsToDecoding_forValueDecoding() { - assertEquals(StreamType.DECODING, StreamType.valueOf("DECODING")); - } - - @Test - void findByValue_MapsToEnrichment_forValueEnrichment() { - assertEquals(StreamType.ENRICHMENT, StreamType.valueOf("ENRICHMENT")); - } - - @Test - void findByValue_ThrowsRuntimeException_forUnknownStreamType() { - assertThrows(IllegalArgumentException.class, () -> StreamType.valueOf("Unknown")); - } + @Test + void givenKnownStreamTypeRouting_whenFindByValue_thenMapsToRouting() { + StreamType result = StreamType.valueOf("ROUTING"); + + assertEquals(StreamType.ROUTING, result, "Should return expected stream type"); + } + + @Test + void givenKnownStreamTypeDecoding_whenFindByValue_thenMapsToDecoding() { + StreamType result = StreamType.valueOf("DECODING"); + + assertEquals(StreamType.DECODING, result, "Should return expected stream type"); + } + + @Test + void givenKnownStreamTypeEnrichment_whenFindByValue_thenMapsToEnrichment() { + StreamType result = StreamType.valueOf("ENRICHMENT"); + + assertEquals(StreamType.ENRICHMENT, result, "Should return expected stream type"); + } + + @Test + void givenUnknownStreamType_whenFindByValue_thenThrowsIllegalArgumentException() { + assertThrows(IllegalArgumentException.class, () -> StreamType.valueOf("Unknown"), "Should throw exception"); + } } diff --git a/src/test/java/ch/cern/nile/common/json/JsonPojoDeserializerTest.java b/src/test/java/ch/cern/nile/common/json/JsonPojoDeserializerTest.java index f5f1261..9375943 100644 --- a/src/test/java/ch/cern/nile/common/json/JsonPojoDeserializerTest.java +++ b/src/test/java/ch/cern/nile/common/json/JsonPojoDeserializerTest.java @@ -4,52 +4,60 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; import java.util.Map; + +import org.junit.jupiter.api.Test; + import ch.cern.nile.common.models.Application; import ch.cern.nile.common.models.Topic; -import org.junit.jupiter.api.Test; class JsonPojoDeserializerTest { - private final JsonPojoDeserializer<Application> applicationDeserializer = new JsonPojoDeserializer<>(Application.class); - private final JsonPojoDeserializer<Topic> topicDeserializer = new JsonPojoDeserializer<>(Topic.class); - - @Test - void deserialize_Application_ReturnsApplication() { - String json = "{\"name\":\"my-app\",\"topic\":{\"name\":\"my-topic\"}}"; - Application expected = new Application(); - expected.setName("my-app"); - expected.setTopic(new Topic()); - expected.getTopic().setName("my-topic"); - Application actual = applicationDeserializer.deserialize("test-topic", json.getBytes()); - assertEquals(expected.toString(), actual.toString()); - } - - @Test - void deserialize_Topic_ReturnsTopic() { - String json = "{\"name\":\"my-topic\"}"; - Topic expected = new Topic(); - expected.setName("my-topic"); - Topic actual = topicDeserializer.deserialize("test-topic", json.getBytes()); - assertEquals(expected.toString(), actual.toString()); - } - - @Test - void deserialize_NullBytes_ReturnsNull() { - assertNull(applicationDeserializer.deserialize("test-topic", null)); - } - - @Test - void deserialize_NullJson_ReturnsNull() { - assertNull(applicationDeserializer.deserialize("test-topic", "null".getBytes())); - } - - @Test - void configure_SetJsonPOJOClass_SetsClass() { - try (JsonPojoDeserializer<Topic> deserializer = new JsonPojoDeserializer<>()) { - assertNull(deserializer.tClass); - deserializer.configure(Map.of("JsonPOJOClass", Topic.class), true); - assertEquals(Topic.class, deserializer.tClass); + private final JsonPojoDeserializer<Application> applicationDeserializer = + new JsonPojoDeserializer<>(Application.class); + private final JsonPojoDeserializer<Topic> topicDeserializer = new JsonPojoDeserializer<>(Topic.class); + + @Test + void givenJsonWithApplication_whenDeserialize_thenReturnsApplication() { + String json = "{\"name\":\"my-app\",\"topic\":{\"name\":\"my-topic\"}}"; + + Application expected = new Application(); + expected.setName("my-app"); + expected.setTopic(new Topic()); + expected.getTopic().setName("my-topic"); + Application actual = applicationDeserializer.deserialize("test-topic", json.getBytes()); + + assertEquals(expected.toString(), actual.toString(), "Application deserialized incorrectly"); } - } + @Test + void givenJsonWithTopic_whenDeserialize_thenReturnsTopic() { + String json = "{\"name\":\"my-topic\"}"; + + Topic expected = new Topic(); + expected.setName("my-topic"); + Topic actual = topicDeserializer.deserialize("test-topic", json.getBytes()); + + assertEquals(expected.toString(), actual.toString(), "Topic deserialized incorrectly"); + } + + @Test + void givenNullBytes_whenDeserialize_thenReturnsNull() { + assertNull(applicationDeserializer.deserialize("test-topic", null), "Null bytes should return null"); + } + + @Test + void givenNullJson_whenDeserialize_thenReturnsNull() { + assertNull(applicationDeserializer.deserialize("test-topic", "null".getBytes()), + "Null json should return null"); + } + + @Test + void givenConfigureWithJsonPOJOClass_whenConfigure_thenSetsClass() { + try (JsonPojoDeserializer<Topic> deserializer = new JsonPojoDeserializer<>()) { + assertNull(deserializer.tClass, "Class should be null"); + deserializer.configure(Map.of("JsonPOJOClass", Topic.class), true); + + assertEquals(Topic.class, deserializer.tClass, "Class not set correctly"); + } + } } diff --git a/src/test/java/ch/cern/nile/common/json/JsonPojoSerializerTest.java b/src/test/java/ch/cern/nile/common/json/JsonPojoSerializerTest.java index 4f7d9b7..d995c9b 100644 --- a/src/test/java/ch/cern/nile/common/json/JsonPojoSerializerTest.java +++ b/src/test/java/ch/cern/nile/common/json/JsonPojoSerializerTest.java @@ -6,40 +6,42 @@ import static org.junit.jupiter.api.Assertions.assertNull; import java.util.Collections; import java.util.HashMap; import java.util.Map; + import org.junit.jupiter.api.Test; class JsonPojoSerializerTest { - @Test - void configure_doesNotThrowException() { - try (JsonPojoSerializer<Object> serializer = new JsonPojoSerializer<>()) { - serializer.configure(Collections.emptyMap(), true); + @Test + void givenEmptyConfig_whenConfigure_thenDoesNotThrowException() { + try (JsonPojoSerializer<Object> serializer = new JsonPojoSerializer<>()) { + serializer.configure(Collections.emptyMap(), true); + } } - } - @Test - void serialize_withNullData_ReturnsNull() { - try (JsonPojoSerializer<Object> serializer = new JsonPojoSerializer<>()) { - assertNull(serializer.serialize("topic", null)); + @Test + void givenNullData_whenSerialize_thenReturnsNull() { + try (JsonPojoSerializer<Object> serializer = new JsonPojoSerializer<>()) { + assertNull(serializer.serialize("topic", null)); + } } - } - @Test - void serialize_withNonNullData_ReturnsJsonBytes() { - Map<String, String> data = new HashMap<>(); - data.put("key", "value"); + @Test + void givenNonNullData_whenSerialize_thenReturnsJsonBytes() { + Map<String, String> data = new HashMap<>(); + data.put("key", "value"); - byte[] expectedBytes = "{\"key\":\"value\"}".getBytes(); + byte[] expectedBytes = "{\"key\":\"value\"}".getBytes(); - try (JsonPojoSerializer<Map<String, String>> serializer = new JsonPojoSerializer<>()) { - assertArrayEquals(expectedBytes, serializer.serialize("topic", data)); - } - } + try (JsonPojoSerializer<Map<String, String>> serializer = new JsonPojoSerializer<>()) { + byte[] actualBytes = serializer.serialize("topic", data); - @Test - void close_doesNotThrowException() { - JsonPojoSerializer<Object> serializer = new JsonPojoSerializer<>(); - serializer.close(); - } + assertArrayEquals(expectedBytes, actualBytes); + } + } + @Test + void givenSerializer_whenClosed_thenDoesNotThrowException() { + JsonPojoSerializer<Object> serializer = new JsonPojoSerializer<>(); + serializer.close(); + } } diff --git a/src/test/java/ch/cern/nile/common/json/JsonSerdeTest.java b/src/test/java/ch/cern/nile/common/json/JsonSerdeTest.java index 228333c..5f8658d 100644 --- a/src/test/java/ch/cern/nile/common/json/JsonSerdeTest.java +++ b/src/test/java/ch/cern/nile/common/json/JsonSerdeTest.java @@ -4,19 +4,21 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import java.util.HashMap; import java.util.Map; + import org.junit.jupiter.api.Test; -public class JsonSerdeTest { +class JsonSerdeTest { - @Test - public void testConfigure() { - try (JsonSerde jsonSerde = new JsonSerde()) { - Map<String, Object> configs = new HashMap<>(); - configs.put("config-key", "config-value"); - jsonSerde.configure(configs, true); - assertNotNull(jsonSerde.serializer()); - assertNotNull(jsonSerde.deserializer()); - } - } + @Test + void givenEmptyConfigs_whenConfigure_thenSerializerAndDeserializerNotNull() { + try (JsonSerde jsonSerde = new JsonSerde()) { + Map<String, Object> configs = new HashMap<>(); + configs.put("config-key", "config-value"); + jsonSerde.configure(configs, true); + + assertNotNull(jsonSerde.serializer()); + assertNotNull(jsonSerde.deserializer()); + } + } } diff --git a/src/test/java/ch/cern/nile/common/probes/HealthTest.java b/src/test/java/ch/cern/nile/common/probes/HealthTest.java new file mode 100644 index 0000000..793fbc4 --- /dev/null +++ b/src/test/java/ch/cern/nile/common/probes/HealthTest.java @@ -0,0 +1,96 @@ +package ch.cern.nile.common.probes; + +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyInt; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.net.InetSocketAddress; + +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpServer; + +import org.apache.kafka.streams.KafkaStreams; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; + +class HealthTest { + + private static final int PORT = 8899; + private static final int OK = 200; + private static final int ERROR = 500; + + private KafkaStreams mockStreams; + private HttpServer mockServer; + private HttpServerFactory mockFactory; + + private Health health; + + @BeforeEach + void before() throws IOException { + mockStreams = mock(KafkaStreams.class); + mockServer = mock(HttpServer.class); + mockFactory = mock(HttpServerFactory.class); + when(mockFactory.createHttpServer(any(InetSocketAddress.class), anyInt())).thenReturn(mockServer); + + health = new Health(mockStreams, mockFactory); + } + + @Test + void givenHealthServer_whenStart_thenServerStartsAndCreatesHealthContext() throws IOException { + health.start(); + + verify(mockFactory).createHttpServer(new InetSocketAddress(PORT), 0); + verify(mockServer).createContext(eq("/health"), any(HttpHandler.class)); + verify(mockServer).start(); + } + + @Test + void givenHealthServer_whenStop_thenServerStops() { + health.start(); + health.stop(); + + verify(mockServer).stop(0); + } + + @Test + void givenKafkaStreamsRunning_whenHealthCheck_thenResponseStatus200() throws IOException { + when(mockStreams.state()).thenReturn(KafkaStreams.State.RUNNING); + health.start(); + + ArgumentCaptor<HttpHandler> handlerCaptor = ArgumentCaptor.forClass(HttpHandler.class); + verify(mockServer).createContext(eq("/health"), handlerCaptor.capture()); + + HttpExchange mockExchange = mock(HttpExchange.class); + handlerCaptor.getValue().handle(mockExchange); + verify(mockExchange).sendResponseHeaders(OK, 0); + verify(mockExchange).close(); + } + + @Test + void givenKafkaStreamsNotRunning_whenHealthCheck_thenResponseStatus500() throws IOException { + when(mockStreams.state()).thenReturn(KafkaStreams.State.NOT_RUNNING); + health.start(); + + ArgumentCaptor<HttpHandler> handlerCaptor = ArgumentCaptor.forClass(HttpHandler.class); + verify(mockServer).createContext(eq("/health"), handlerCaptor.capture()); + + HttpExchange mockExchange = mock(HttpExchange.class); + handlerCaptor.getValue().handle(mockExchange); + verify(mockExchange).sendResponseHeaders(ERROR, 0); + verify(mockExchange).close(); + } + + @Test + void givenHttpServerCreationFails_whenStart_thenThrowsRuntimeException() throws IOException { + when(mockFactory.createHttpServer(any(InetSocketAddress.class), anyInt())).thenThrow(IOException.class); + + assertThrows(RuntimeException.class, () -> health.start(), "Should throw RuntimeException"); + } +} diff --git a/src/test/java/ch/cern/nile/common/schema/SchemaInjectorTest.java b/src/test/java/ch/cern/nile/common/schema/SchemaInjectorTest.java index 0b0ae4b..c8d83fa 100644 --- a/src/test/java/ch/cern/nile/common/schema/SchemaInjectorTest.java +++ b/src/test/java/ch/cern/nile/common/schema/SchemaInjectorTest.java @@ -1,60 +1,67 @@ package ch.cern.nile.common.schema; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.List; import java.util.Map; -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; class SchemaInjectorTest extends SchemaTestBase { - @Test - @SuppressWarnings("unchecked") - void inject_ReturnsCorrectSchemaAndPayload_WhenInputDataIsValid() { - final Map<String, Object> result = SchemaInjector.inject(data); - assertNotNull(result); - assertTrue(result.containsKey("schema")); - assertTrue(result.containsKey("payload")); - final Map<String, Object> schema = (Map<String, Object>) result.get("schema"); - assertEquals("struct", schema.get("type")); - final List<Map<String, Object>> fields = (List<Map<String, Object>>) schema.get("fields"); - assertEquals(data.size(), fields.size()); - - for (Map<String, Object> field : fields) { - final String fieldName = (String) field.get("field"); - assertTrue(data.containsKey(fieldName)); - assertNotNull(field.get("type")); - - - if (fieldName.equals("timestamp_col")) { - assertFalse(Boolean.parseBoolean(field.get("optional").toString())); - } else { - assertTrue(Boolean.parseBoolean(field.get("optional").toString())); - } - - if (fieldName.equals("timestamp_col")) { - assertEquals("org.apache.kafka.connect.data.Timestamp", field.get("name")); - assertEquals(1, field.get("version")); - } else if (fieldName.equals("date_col")) { - assertEquals("org.apache.kafka.connect.data.Date", field.get("name")); - assertEquals(1, field.get("version")); - } + @Test + void givenValidInputData_whenInject_thenReturnsCorrectSchemaAndPayload() { + final Map<String, Object> result = SchemaInjector.inject(data); + + assertNotNull(result); + + assertTrue(result.containsKey("schema")); + assertTrue(result.containsKey("payload")); + + final Map<String, Object> schema = (Map<String, Object>) result.get("schema"); + assertEquals("struct", schema.get("type")); + final List<Map<String, Object>> fields = (List<Map<String, Object>>) schema.get("fields"); + assertEquals(data.size(), fields.size()); + + for (Map<String, Object> field : fields) { + final String fieldName = (String) field.get("field"); + assertTrue(data.containsKey(fieldName)); + assertNotNull(field.get("type")); + + if (fieldName.equals("timestamp_col")) { + assertFalse(Boolean.parseBoolean(field.get("optional").toString())); + } else { + assertTrue(Boolean.parseBoolean(field.get("optional").toString())); + } + + if (fieldName.equals("timestamp_col")) { + assertEquals("org.apache.kafka.connect.data.Timestamp", field.get("name")); + assertEquals(1, field.get("version")); + } else if (fieldName.equals("date_col")) { + assertEquals("org.apache.kafka.connect.data.Date", field.get("name")); + assertEquals(1, field.get("version")); + } + } + + final Map<String, Object> payload = (Map<String, Object>) result.get("payload"); + assertEquals(data, payload); } - final Map<String, Object> payload = (Map<String, Object>) result.get("payload"); - assertEquals(data, payload); - } - - @Test - void inject_ThrowsIllegalArgumentException_WhenNullValuePresent() { - data.put("nullValue", null); - assertThrows(IllegalArgumentException.class, () -> SchemaInjector.inject(data)); - } - - @Test - void inject_ThrowsIllegalArgumentException_WhenUnsupportedTypePresent() { - data.put("unsupportedType", new Object()); - assertThrows(IllegalArgumentException.class, () -> SchemaInjector.inject(data)); - } + @Test + void givenDataWithNullValue_whenInject_thenThrowsIllegalArgumentException() { + data.put("nullValue", null); + + assertThrows(IllegalArgumentException.class, () -> SchemaInjector.inject(data)); + } + + @Test + void givenDataWithUnsupportedType_whenInject_thenThrowsIllegalArgumentException() { + data.put("unsupportedType", new Object()); + + assertThrows(IllegalArgumentException.class, () -> SchemaInjector.inject(data)); + } } diff --git a/src/test/java/ch/cern/nile/common/schema/SchemaTestBase.java b/src/test/java/ch/cern/nile/common/schema/SchemaTestBase.java index 9eca0fa..e9e358f 100644 --- a/src/test/java/ch/cern/nile/common/schema/SchemaTestBase.java +++ b/src/test/java/ch/cern/nile/common/schema/SchemaTestBase.java @@ -3,24 +3,25 @@ package ch.cern.nile.common.schema; import java.util.Date; import java.util.HashMap; import java.util.Map; + import org.junit.jupiter.api.BeforeEach; public class SchemaTestBase { - public Map<String, Object> data; + public Map<String, Object> data; - @BeforeEach - void setUp() { - data = new HashMap<>(); - data.put("byte_col", (byte) 1); - data.put("short_col", (short) 2); - data.put("int_col", 3); - data.put("long_col", (long) 4); - data.put("float_col", 5.0f); - data.put("double_col", 6.0); - data.put("boolean_col", true); - data.put("string_col", "test"); - data.put("timestamp_col", 1501834166000L); - data.put("date_col", new Date()); - data.put("bytes_col", new byte[]{1, 2, 3}); - } + @BeforeEach + void setUp() { + data = new HashMap<>(); + data.put("byte_col", (byte) 1); + data.put("short_col", (short) 2); + data.put("int_col", 3); + data.put("long_col", (long) 4); + data.put("float_col", 5.0f); + data.put("double_col", 6.0); + data.put("boolean_col", true); + data.put("string_col", "test"); + data.put("timestamp_col", 1501834166000L); + data.put("date_col", new Date()); + data.put("bytes_col", new byte[]{1, 2, 3}); + } } -- GitLab From 458469f8267ca5a682f72104bb4694d58052bbef Mon Sep 17 00:00:00 2001 From: Dean Dalianis <dean.dalianis@cern.ch> Date: Fri, 26 Jan 2024 10:45:12 +0200 Subject: [PATCH 03/15] Checkstyle fixes --- src/main/java/ch/cern/nile/common/Main.java | 23 ++++---- .../nile/common/configs/StreamConfig.java | 4 +- .../common/exceptions/DecodingException.java | 4 +- .../common/exceptions/StreamingException.java | 12 ++++ .../common/json/JsonPojoDeserializer.java | 20 ++++++- .../nile/common/json/JsonPojoSerializer.java | 11 +++- .../ch/cern/nile/common/json/JsonSerde.java | 27 ++++++++- .../ch/cern/nile/common/schema/JsonType.java | 4 +- .../nile/common/streams/AbstractStream.java | 58 +++++++++++++------ .../common/json/JsonPojoDeserializerTest.java | 11 ---- 10 files changed, 123 insertions(+), 51 deletions(-) create mode 100644 src/main/java/ch/cern/nile/common/exceptions/StreamingException.java diff --git a/src/main/java/ch/cern/nile/common/Main.java b/src/main/java/ch/cern/nile/common/Main.java index 2d0fb8d..3722908 100644 --- a/src/main/java/ch/cern/nile/common/Main.java +++ b/src/main/java/ch/cern/nile/common/Main.java @@ -10,29 +10,30 @@ import ch.cern.nile.common.clients.KafkaStreamsClient; import ch.cern.nile.common.configs.PropertiesCheck; import ch.cern.nile.common.configs.StreamConfig; import ch.cern.nile.common.configs.StreamType; +import ch.cern.nile.common.exceptions.StreamingException; import ch.cern.nile.common.streams.Streaming; public class Main { + private Main() { + } + /** * Main method. * - * @param args the properties files + * @param args the properties file */ public static void main(String[] args) { - // Check if properties file was passed if (args.length < 1) { - throw new RuntimeException("Expecting args[0] to be the path to the configuration file"); + throw new IllegalArgumentException("Properties file not passed"); } - // Loading properties file - String configsPath = args[0]; + String configPath = args[0]; final Properties configs = new Properties(); try { - configs.load(new FileInputStream(configsPath)); + configs.load(new FileInputStream(configPath)); } catch (IOException e) { - e.printStackTrace(); - throw new RuntimeException(e); + throw new StreamingException(e); } StreamType sType = @@ -40,11 +41,9 @@ public class Main { PropertiesCheck.validateProperties(configs, sType); - // Initialize Kafka Client final KafkaStreamsClient client = new KafkaStreamsClient(); client.configure(configs); - // Start Streaming try { Class<?> clazz = Class.forName(configs.getProperty(StreamConfig.CommonProperties.STREAM_CLASS.getValue())); final Streaming streaming; @@ -53,7 +52,9 @@ public class Main { streaming.stream(client); } catch (ClassNotFoundException | IllegalAccessException | InstantiationException | ClassCastException | InvocationTargetException | NoSuchMethodException e) { - e.printStackTrace(); + String message = "Error while starting the stream"; + throw new StreamingException(message, e); } } + } diff --git a/src/main/java/ch/cern/nile/common/configs/StreamConfig.java b/src/main/java/ch/cern/nile/common/configs/StreamConfig.java index c332c99..f4da20f 100644 --- a/src/main/java/ch/cern/nile/common/configs/StreamConfig.java +++ b/src/main/java/ch/cern/nile/common/configs/StreamConfig.java @@ -1,11 +1,11 @@ package ch.cern.nile.common.configs; -import lombok.Getter; - import java.util.Arrays; import java.util.Set; import java.util.stream.Collectors; +import lombok.Getter; + /** * A class containing enums representing various stream configuration property categories. */ diff --git a/src/main/java/ch/cern/nile/common/exceptions/DecodingException.java b/src/main/java/ch/cern/nile/common/exceptions/DecodingException.java index 67808ad..c24feaa 100644 --- a/src/main/java/ch/cern/nile/common/exceptions/DecodingException.java +++ b/src/main/java/ch/cern/nile/common/exceptions/DecodingException.java @@ -2,8 +2,8 @@ package ch.cern.nile.common.exceptions; public class DecodingException extends RuntimeException { - public DecodingException(String message, Throwable err) { - super(message, err); + public DecodingException(String message, Throwable cause) { + super(message, cause); } public DecodingException(String message) { diff --git a/src/main/java/ch/cern/nile/common/exceptions/StreamingException.java b/src/main/java/ch/cern/nile/common/exceptions/StreamingException.java new file mode 100644 index 0000000..aa9ecdd --- /dev/null +++ b/src/main/java/ch/cern/nile/common/exceptions/StreamingException.java @@ -0,0 +1,12 @@ +package ch.cern.nile.common.exceptions; + +public class StreamingException extends RuntimeException { + + public StreamingException(Throwable cause) { + super(cause); + } + + public StreamingException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java b/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java index f5203a7..6e36502 100644 --- a/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java +++ b/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java @@ -10,7 +10,11 @@ import org.apache.kafka.common.serialization.Deserializer; public class JsonPojoDeserializer<T> implements Deserializer<T> { private static final Gson gson = new Gson(); - Class<T> tClass; + + /** + * Class type for the deserialization. + */ + private Class<T> tClass; /** * Default constructor needed by Kafka. @@ -18,10 +22,21 @@ public class JsonPojoDeserializer<T> implements Deserializer<T> { public JsonPojoDeserializer() { } + /** + * Constructor for the deserializer. + * + * @param clazz Class type for the deserialization + */ JsonPojoDeserializer(Class<T> clazz) { this.tClass = clazz; } + /** + * Configure this class. + * + * @param props Properties from the consumer configuration + * @param isKey Ignored + */ @Override @SuppressWarnings("unchecked") public void configure(Map<String, ?> props, boolean isKey) { @@ -45,6 +60,9 @@ public class JsonPojoDeserializer<T> implements Deserializer<T> { return gson.fromJson(new String(bytes, StandardCharsets.UTF_8), tClass); } + /** + * Needed due to the implementation of the Serializer interface. + */ @Override public void close() { } diff --git a/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java b/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java index dae6338..08ed1d4 100644 --- a/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java +++ b/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java @@ -17,6 +17,12 @@ public class JsonPojoSerializer<T> implements Serializer<T> { public JsonPojoSerializer() { } + /** + * Needed due to the implementation of the Serializer interface. + * + * @param props Ignored + * @param isKey Ignored + */ @Override public void configure(Map<String, ?> props, boolean isKey) { } @@ -26,7 +32,7 @@ public class JsonPojoSerializer<T> implements Serializer<T> { * * @param topic The topic associated with the data. * @param data The data to be serialized. - * @return The serialized data as bytes or null if the data is null. + * @return The serialized data as bytes or null if the data is null */ @Override public byte[] serialize(String topic, T data) { @@ -36,6 +42,9 @@ public class JsonPojoSerializer<T> implements Serializer<T> { return gson.toJson(data).getBytes(StandardCharsets.UTF_8); } + /** + * Needed due to the implementation of the Serializer interface. + */ @Override public void close() { } diff --git a/src/main/java/ch/cern/nile/common/json/JsonSerde.java b/src/main/java/ch/cern/nile/common/json/JsonSerde.java index 1d8fcc9..5e1fdfc 100644 --- a/src/main/java/ch/cern/nile/common/json/JsonSerde.java +++ b/src/main/java/ch/cern/nile/common/json/JsonSerde.java @@ -8,27 +8,50 @@ import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serializer; +/** + * A Serde for JSON objects. + */ public class JsonSerde implements Serde<JsonObject> { - final JsonPojoSerializer<JsonObject> serializer = new JsonPojoSerializer<>(); - final JsonPojoDeserializer<JsonObject> deserializer = new JsonPojoDeserializer<>(JsonObject.class); + private final JsonPojoSerializer<JsonObject> serializer = new JsonPojoSerializer<>(); + private final JsonPojoDeserializer<JsonObject> deserializer = new JsonPojoDeserializer<>(JsonObject.class); + + /** + * Configure this class. + * + * @param configs Properties from the consumer configuration + * @param isKey Ignored + */ @Override public void configure(Map<String, ?> configs, boolean isKey) { serializer.configure(configs, isKey); deserializer.configure(configs, isKey); } + /** + * Close this class. + */ @Override public void close() { serializer.close(); deserializer.close(); } + /** + * Get the serializer. + * + * @return The serializer + */ @Override public Serializer<JsonObject> serializer() { return serializer; } + /** + * Get the deserializer. + * + * @return The deserializer + */ @Override public Deserializer<JsonObject> deserializer() { return deserializer; diff --git a/src/main/java/ch/cern/nile/common/schema/JsonType.java b/src/main/java/ch/cern/nile/common/schema/JsonType.java index d2a4fc0..828f423 100644 --- a/src/main/java/ch/cern/nile/common/schema/JsonType.java +++ b/src/main/java/ch/cern/nile/common/schema/JsonType.java @@ -1,9 +1,9 @@ package ch.cern.nile.common.schema; -import lombok.Getter; - import java.util.Date; +import lombok.Getter; + @Getter enum JsonType { BYTE(Byte.class, "int8"), diff --git a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java index 9ade1ba..152fe05 100644 --- a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java +++ b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java @@ -28,7 +28,7 @@ import lombok.Setter; public abstract class AbstractStream implements Streaming { - private final Logger LOGGER = LoggerFactory.getLogger(getClass()); + private static final Logger LOGGER = LoggerFactory.getLogger(AbstractStream.class); @Getter private KafkaStreams streams; @@ -55,6 +55,7 @@ public abstract class AbstractStream implements Streaming { * @param configs the properties to configure the stream with */ @Override + @SuppressWarnings("HiddenField") public void configure(final Properties configs) { this.configs = configs; } @@ -103,16 +104,29 @@ public abstract class AbstractStream implements Streaming { } } - - protected static boolean filterNull(final String k, final Object v) { - return v != null; + /** + * Filter out records that are null. + * + * @param key is ignored + * @param value the value + * @return true if the record is not null, false otherwise + */ + protected static boolean filterNull(final String key, final Object value) { + return value != null; } - protected static boolean filterEmpty(final String k, final Object v) { - if (v instanceof List) { - return !((List<?>) v).isEmpty(); - } else if (v instanceof Map) { - return !((Map<?, ?>) v).isEmpty(); + /** + * Filter out records that are empty. + * + * @param key is ignored + * @param value the value + * @return true if the record is not empty, false otherwise + */ + protected static boolean filterEmpty(final String key, final Object value) { + if (value instanceof List) { + return !((List<?>) value).isEmpty(); + } else if (value instanceof Map) { + return !((Map<?, ?>) value).isEmpty(); } return false; } @@ -120,28 +134,33 @@ public abstract class AbstractStream implements Streaming { /** * Filter out records that do not have the required fields. * - * @param k the key - * @param v the value + * @param key the key + * @param value the value * @return true if the record has the required fields, false otherwise */ - protected boolean filterRecord(String k, JsonObject v) { - return v != null && v.get("applicationID") != null && v.get("applicationName") != null && - v.get("deviceName") != null && v.get("devEUI") != null - && v.get("data") != null; + protected boolean filterRecord(String key, JsonObject value) { + return value != null && value.get("applicationID") != null && value.get("applicationName") != null + && value.get("deviceName") != null && value.get("devEUI") != null + && value.get("data") != null; } /** * Log an exception that occurred while reading from the source topic. * - * @param e the exception + * @param exception the exception */ - protected void logStreamsException(Exception e) { - LOGGER.warn(String.format("Error reading from topic %s. Last read offset %s:", sourceTopic, lastReadOffset), e); + protected void logStreamsException(Exception exception) { + LOGGER.warn("Error reading from topic {}. Last read offset {}", sourceTopic, lastReadOffset, exception); if (streams != null) { - LOGGER.info(String.format("Streams state is: %s", streams.state().toString())); + LOGGER.info("Streams state is: {}", streams.state().toString()); } } + /** + * Implement this method to create the topology. + * + * @param builder the streams builder + */ public abstract void createTopology(StreamsBuilder builder); private void init(KafkaStreamsClient kafkaStreamsClient) { @@ -184,6 +203,7 @@ public abstract class AbstractStream implements Streaming { * @param context the context of this processor */ @Override + @SuppressWarnings("HiddenField") public void init(final ProcessorContext context) { this.context = context; } diff --git a/src/test/java/ch/cern/nile/common/json/JsonPojoDeserializerTest.java b/src/test/java/ch/cern/nile/common/json/JsonPojoDeserializerTest.java index 9375943..6b455a5 100644 --- a/src/test/java/ch/cern/nile/common/json/JsonPojoDeserializerTest.java +++ b/src/test/java/ch/cern/nile/common/json/JsonPojoDeserializerTest.java @@ -3,8 +3,6 @@ package ch.cern.nile.common.json; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; -import java.util.Map; - import org.junit.jupiter.api.Test; import ch.cern.nile.common.models.Application; @@ -51,13 +49,4 @@ class JsonPojoDeserializerTest { "Null json should return null"); } - @Test - void givenConfigureWithJsonPOJOClass_whenConfigure_thenSetsClass() { - try (JsonPojoDeserializer<Topic> deserializer = new JsonPojoDeserializer<>()) { - assertNull(deserializer.tClass, "Class should be null"); - deserializer.configure(Map.of("JsonPOJOClass", Topic.class), true); - - assertEquals(Topic.class, deserializer.tClass, "Class not set correctly"); - } - } } -- GitLab From 425931389541b702ca3be78e341f3f198f39ef63 Mon Sep 17 00:00:00 2001 From: Konstantinos Dalianis <dean.dalianis@cern.ch> Date: Fri, 26 Jan 2024 20:58:28 +0200 Subject: [PATCH 04/15] PMD Fixes --- CHANGELOG.MD | 235 ++++++++++++------ pom.xml | 18 +- src/main/java/ch/cern/nile/common/Main.java | 60 ----- .../nile/common/StreamingApplication.java | 77 ++++++ .../common/clients/KafkaStreamsClient.java | 64 ++--- .../cern/nile/common/configs/Configure.java | 10 - .../nile/common/configs/StreamConfig.java | 96 ------- .../nile/common/configuration/Configure.java | 16 ++ .../PropertiesCheck.java | 25 +- .../StreamType.java | 2 +- .../properties/ClientProperties.java | 33 +++ .../properties/CommonProperties.java | 31 +++ .../properties/DecodingProperties.java | 30 +++ .../properties/EnrichmentProperties.java | 31 +++ .../properties/RoutingProperties.java | 31 +++ .../common/exceptions/DecodingException.java | 6 +- .../exceptions/HealthProbeException.java | 11 + .../exceptions/MissingPropertyException.java | 4 +- .../exceptions/ReverseDnsLookupException.java | 4 +- .../common/exceptions/StreamingException.java | 6 +- .../UnknownStreamTypeException.java | 4 +- .../common/json/JsonPojoDeserializer.java | 27 +- .../nile/common/json/JsonPojoSerializer.java | 26 +- .../ch/cern/nile/common/json/JsonSerde.java | 18 +- .../cern/nile/common/models/Application.java | 3 + .../ch/cern/nile/common/models/Topic.java | 3 + .../ch/cern/nile/common/probes/Health.java | 23 +- .../nile/common/probes/HttpServerFactory.java | 9 + .../ch/cern/nile/common/schema/JsonType.java | 21 +- .../nile/common/schema/SchemaInjector.java | 39 +-- .../nile/common/streams/AbstractStream.java | 191 ++++---------- .../streams/InjectOffsetTransformer.java | 64 +++++ .../cern/nile/common/streams/StreamUtils.java | 106 ++++++++ .../cern/nile/common/streams/Streaming.java | 15 +- .../clients/KafkaStreamsClientTest.java | 50 ++-- .../nile/common/configs/StreamConfigTest.java | 83 ------- .../PropertiesCheckTest.java | 33 +-- .../StreamTypeTest.java | 8 +- .../properties/StreamConfigTest.java | 87 +++++++ .../common/json/JsonPojoDeserializerTest.java | 18 +- .../common/json/JsonPojoSerializerTest.java | 21 +- .../cern/nile/common/json/JsonSerdeTest.java | 6 +- .../cern/nile/common/probes/HealthTest.java | 22 +- .../common/schema/SchemaInjectorTest.java | 69 +++-- .../nile/common/schema/SchemaTestBase.java | 27 -- .../nile/common/streams/StreamUtilsTest.java | 95 +++++++ 46 files changed, 1154 insertions(+), 704 deletions(-) delete mode 100644 src/main/java/ch/cern/nile/common/Main.java create mode 100644 src/main/java/ch/cern/nile/common/StreamingApplication.java delete mode 100644 src/main/java/ch/cern/nile/common/configs/Configure.java delete mode 100644 src/main/java/ch/cern/nile/common/configs/StreamConfig.java create mode 100644 src/main/java/ch/cern/nile/common/configuration/Configure.java rename src/main/java/ch/cern/nile/common/{configs => configuration}/PropertiesCheck.java (68%) rename src/main/java/ch/cern/nile/common/{configs => configuration}/StreamType.java (77%) create mode 100644 src/main/java/ch/cern/nile/common/configuration/properties/ClientProperties.java create mode 100644 src/main/java/ch/cern/nile/common/configuration/properties/CommonProperties.java create mode 100644 src/main/java/ch/cern/nile/common/configuration/properties/DecodingProperties.java create mode 100644 src/main/java/ch/cern/nile/common/configuration/properties/EnrichmentProperties.java create mode 100644 src/main/java/ch/cern/nile/common/configuration/properties/RoutingProperties.java create mode 100644 src/main/java/ch/cern/nile/common/exceptions/HealthProbeException.java create mode 100644 src/main/java/ch/cern/nile/common/streams/InjectOffsetTransformer.java create mode 100644 src/main/java/ch/cern/nile/common/streams/StreamUtils.java delete mode 100644 src/test/java/ch/cern/nile/common/configs/StreamConfigTest.java rename src/test/java/ch/cern/nile/common/{configs => configuration}/PropertiesCheckTest.java (64%) rename src/test/java/ch/cern/nile/common/{configs => configuration}/StreamTypeTest.java (79%) create mode 100644 src/test/java/ch/cern/nile/common/configuration/properties/StreamConfigTest.java delete mode 100644 src/test/java/ch/cern/nile/common/schema/SchemaTestBase.java create mode 100644 src/test/java/ch/cern/nile/common/streams/StreamUtilsTest.java diff --git a/CHANGELOG.MD b/CHANGELOG.MD index a9545d5..7fda231 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,4 +1,5 @@ # Changelog of nile-common + - Product: nile/streams/libs/nile-common - Origin: Refactored from `kafka-streams` version `2.7.2`. - All notable changes to this project will be documented in this file. @@ -6,178 +7,254 @@ - Entry types: Added, Changed, Deprecated, Removed, Fixed, Security ## [1.0.1] - 2023-12-07 + ## Added - - Added Main class from `kafka-streams` version `2.7.2` - - Temporarily removed checkstyle -- will be added back in a future release + +- Added Main class from `kafka-streams` version `2.7.2` +- Temporarily removed checkstyle -- will be added back in a future release ## [1.0.0] - 2023-10-04 + ## Added - - Initial release of nile-common - - Extracted from `kafka-streams` version `2.7.2`: - - clients - - configs - - json - - models - - probes - - streams/AbstractStream - - streams/Streaming - - resources/log4j.properties - - schema (excluding `schema/db`) + +- Initial release of nile-common +- Extracted from `kafka-streams` version `2.7.2`: + - clients + - configs + - json + - models + - probes + - streams/AbstractStream + - streams/Streaming + - resources/log4j.properties + - schema (excluding `schema/db`) ________________________________________________________________________________ # Old Changelog of kafka-streams ## [2.7.2] - 2023-09-21 + ### Fixed - - Fixed GenerateTableCreationCommandMain DEVEUI type + +- Fixed GenerateTableCreationCommandMain DEVEUI type ## [2.7.1] - 2023-08-09 + ### Fixed - - [NILE-946] SchemaInjector should match upper & lowe case for the timestamp field + +- [NILE-946] SchemaInjector should match upper & lowe case for the timestamp field ## [2.7.0] - 2023-08-09 + ### Added - - [NILE-964] New ConnectSchemaToTableCreationCommandGenerator to generate SQL commands for creating tables - - [NILE-964] New schema injector to inject schemas in streaming records (if required) - - [NILE-964] Containerized tests for the SchemaInjector with the Aiven JDBC connector and Oracle + +- [NILE-964] New ConnectSchemaToTableCreationCommandGenerator to generate SQL commands for creating tables +- [NILE-964] New schema injector to inject schemas in streaming records (if required) +- [NILE-964] Containerized tests for the SchemaInjector with the Aiven JDBC connector and Oracle + ### Changed - - [NILE-946] LoraEnAccessControlDecode now uses the new schema injector - - [NILE-946] New Kaitai struct for LoraEnAccessControlDecode - - [NILE-946] Added new main class for table creation command generation + +- [NILE-946] LoraEnAccessControlDecode now uses the new schema injector +- [NILE-946] New Kaitai struct for LoraEnAccessControlDecode +- [NILE-946] Added new main class for table creation command generation ## [2.6.9] - 2023-07-12 + ### Fixed - - [NILE-948] DifferentialPressure should be signed instead of unsigned + +- [NILE-948] DifferentialPressure should be signed instead of unsigned ## [2.6.8] - 2023-07-12 + ### Fixed - - [NILE-948] Ignore unsupported frames for LoraItComputerCenterTempCayenneLpp + +- [NILE-948] Ignore unsupported frames for LoraItComputerCenterTempCayenneLpp ## [2.6.7] - 2023-07-12 + ### Added - - [NILE-948] New decoder and streaming for LoraItComputerCenterTempCayenneLpp sensors + +- [NILE-948] New decoder and streaming for LoraItComputerCenterTempCayenneLpp sensors ## [2.6.6] - 2023-07-07 + ### Fixed - - Fixed division by zero in LoraHumTempBatmon + +- Fixed division by zero in LoraHumTempBatmon ## [2.6.5] - 2023-06-29 + ### Added - - [NILE-977] Add StatusFlag in LoraHumTempBatmon + +- [NILE-977] Add StatusFlag in LoraHumTempBatmon ## [2.6.4] - 2023-05-12 + ### Fixed - - LoraHumTempBatmon fix (changed temperature_gain_adc_conv calculation formula) + +- LoraHumTempBatmon fix (changed temperature_gain_adc_conv calculation formula) ## [2.6.3] - 2023-05-12 + ### Fixed - - LoraHumTempBatmon fix (changed field from int to double) + +- LoraHumTempBatmon fix (changed field from int to double) ## [2.6.2] - 2023-05-11 + ### Fixed - - LoraHumTempBatmon fix (removed deviceName) + +- LoraHumTempBatmon fix (removed deviceName) ## [2.6.1] - 2023-05-11 + ### Fixed - - LoraRisingHfDecoder fix (rssi should be float to match the InfluxDb measurements) + +- LoraRisingHfDecoder fix (rssi should be float to match the InfluxDb measurements) ## [2.6.0] - 2023-05-11 + ### Added - - New abstract test class for Kaitai Structs - - New generic streams decoder for Kaitai Structs - - New Lora Humidity Temperature Batmon sensor routing & decoding + +- New abstract test class for Kaitai Structs +- New generic streams decoder for Kaitai Structs +- New Lora Humidity Temperature Batmon sensor routing & decoding + ### Changed - - Updated LoraRouting to use the new generic Kaitai Struct Decoder - - Updated tests to use the new generic test class - - Schema injection for LoraEnAccessControl - - artifactId from cerndb-nile-kafka-streams to nile-kafka-streams - - deployment image path from gitlab-registry.cern.ch/db/kafka-streams to gitlab-registry.cern.ch/nile/kafka-streams - - QA lora-routing from lora-mqtt-qa to lora-mqtt - - QA topic names that didn't fit the convention production-topic-name-qa -### Fixed - - Access Control sensors decoding + +- Updated LoraRouting to use the new generic Kaitai Struct Decoder +- Updated tests to use the new generic test class +- Schema injection for LoraEnAccessControl +- artifactId from cerndb-nile-kafka-streams to nile-kafka-streams +- deployment image path from gitlab-registry.cern.ch/db/kafka-streams to gitlab-registry.cern.ch/nile/kafka-streams +- QA lora-routing from lora-mqtt-qa to lora-mqtt +- QA topic names that didn't fit the convention production-topic-name-qa + +### Fixed + +- Access Control sensors decoding + ### Removed - - Custom decoders: LoraBatmonDecoder + +- Custom decoders: LoraBatmonDecoder ## [2.5.0] - 2023-04-05 + ### Added - - New generic Kaitai Struct Decoder - - New abstract test class for Kaitai Structs - - New generic streams decoder for Kaitai Structs - - New LoraEnAccessControl routing & decoding - - New LoraEnParkingControl routing & decoding + +- New generic Kaitai Struct Decoder +- New abstract test class for Kaitai Structs +- New generic streams decoder for Kaitai Structs +- New LoraEnAccessControl routing & decoding +- New LoraEnParkingControl routing & decoding + ### Changed - - Updated already existing Kaitai Structs to do the decoding themselves - - Updated LoraRouting to use the new generic Kaitai Struct Decoder - - Updated tests to use the new generic test class - - RpCalibration now uses Kaitai Structs and the new generic streams decoder + +- Updated already existing Kaitai Structs to do the decoding themselves +- Updated LoraRouting to use the new generic Kaitai Struct Decoder +- Updated tests to use the new generic test class +- RpCalibration now uses Kaitai Structs and the new generic streams decoder + ### Removed - - Custom decoders: RpCalibration, LoraBatmonDecoder, LoraLoraBeItTempRisingHfDecoder, LoraCrackSensorsDecoder + +- Custom decoders: RpCalibration, LoraBatmonDecoder, LoraLoraBeItTempRisingHfDecoder, LoraCrackSensorsDecoder ## [2.4.6] - 2023-01-26 + ### Fixed - - Bugfix: LoraBatmonDecode: maxSnr first value set to Double.MIN_VALUE instead of 0.0 + +- Bugfix: LoraBatmonDecode: maxSnr first value set to Double.MIN_VALUE instead of 0.0 ## [2.4.6]-[2.4.7] - 2023-01-26 + ### Fixed - - Bugfix: LoraBatmonDecode: maxSnr first value set to Double.MIN_VALUE instead of 0.0 + +- Bugfix: LoraBatmonDecode: maxSnr first value set to Double.MIN_VALUE instead of 0.0 ## [2.4.5] - 2022-11-30 + ### Added - - [NILE-926] Added GM ASG's crack sensors routing & decoding + +- [NILE-926] Added GM ASG's crack sensors routing & decoding + ### Fixed - - Bugfix: [NILE-913] Stream applications shouldn't return null on failure + +- Bugfix: [NILE-913] Stream applications shouldn't return null on failure ## [2.4.4] - 2022-09-22 + ### Fixed - - Fixed GeolocationDecode null pointer exception bug (missing offset injection) + +- Fixed GeolocationDecode null pointer exception bug (missing offset injection) ## [2.4.3] - 2022-09-21 + ### Fixed - - Fixed null pointer exception in LoraContact + +- Fixed null pointer exception in LoraContact ## [2.4.2] - 2022-09-21 + ### Fixed - - Fixed constructors access modifier + +- Fixed constructors access modifier ## [2.4.1] - 2022-09-21 + ### Fixed - - Added empty constructor in streams classes that had one with arguments -- keeps Main.java:49 happy + +- Added empty constructor in streams classes that had one with arguments -- keeps Main.java:49 happy ## [2.4.0] - 2022-09-21 + ### Added - - [NILE-885] Added offset logging when there is a failure - + +- [NILE-885] Added offset logging when there is a failure + ## [2.3.1] - 2022-07-18 + ### Added - - Added more information in Batmon streams application + +- Added more information in Batmon streams application ## [2.3.0] - 2022-05-24 + ### Added - - [NILE-887] Added Batmon routing & decoding + +- [NILE-887] Added Batmon routing & decoding ## [2.2.0] - 2022-06-04 + ### Added + - Routing for new applications (lora-SY-temp-humi-isolde & BE-it-temp) - [NILE-861] Added LoRaWAN environmental sensors decoding + ### Removed - - Removed Cranes project + +- Removed Cranes project ## [1.0.0] - 2021-11-25 + ### Added - - [NILE-846] Cranes project in routing - - [NILE-846] Cranes decoder and CranesDecode - - [NILE-692] - - JUnit5, kafka-streams-test-utils test & mockito dependencies, - - CHANGELOG.MD, - - Tests & test resources for LoraRouting topology - - Tests for Health - - Tests for configs - - Overload NewDecoder#decode(JsonObject, long) -> NewDecoder#decode(JsonObject) + +- [NILE-846] Cranes project in routing +- [NILE-846] Cranes decoder and CranesDecode +- [NILE-692] + - JUnit5, kafka-streams-test-utils test & mockito dependencies, + - CHANGELOG.MD, + - Tests & test resources for LoraRouting topology + - Tests for Health + - Tests for configs + - Overload NewDecoder#decode(JsonObject, long) -> NewDecoder#decode(JsonObject) + ### Changed - - [NILE-692] - - Updated LoraRouting & Utils, to route using the applicationName instead of deviceName, - - Updated some already existing tests with JUnit + +- [NILE-692] + - Updated LoraRouting & Utils, to route using the applicationName instead of deviceName, + - Updated some already existing tests with JUnit + ### Removed - - Unused decoders (Decoder.java, NewDecoder.java) and the corresponding streams + +- Unused decoders (Decoder.java, NewDecoder.java) and the corresponding streams diff --git a/pom.xml b/pom.xml index faef136..ff61604 100644 --- a/pom.xml +++ b/pom.xml @@ -70,6 +70,12 @@ <version>5.2.0</version> <scope>test</scope> </dependency> + <dependency> + <groupId>com.github.spotbugs</groupId> + <artifactId>spotbugs-annotations</artifactId> + <version>4.8.1</version> + <scope>test</scope> + </dependency> </dependencies> <build> @@ -80,8 +86,12 @@ <artifactId>maven-checkstyle-plugin</artifactId> <version>3.3.1</version> <configuration> - <configLocation>https://gitlab.cern.ch/nile/java-build-tools/-/raw/master/src/main/resources/checkstyle.xml?ref_type=heads</configLocation> - <suppressionsLocation>https://gitlab.cern.ch/nile/java-build-tools/-/raw/master/src/main/resources/checkstyle-suppressions.xml?ref_type=heads</suppressionsLocation> + <configLocation> + https://gitlab.cern.ch/nile/java-build-tools/-/raw/master/src/main/resources/checkstyle.xml?ref_type=heads + </configLocation> + <suppressionsLocation> + https://gitlab.cern.ch/nile/java-build-tools/-/raw/master/src/main/resources/checkstyle-suppressions.xml?ref_type=heads + </suppressionsLocation> <consoleOutput>true</consoleOutput> <failsOnError>true</failsOnError> <linkXRef>false</linkXRef> @@ -108,7 +118,9 @@ <configuration> <linkXRef>false</linkXRef> <rulesets> - <ruleset>https://gitlab.cern.ch/nile/java-build-tools/-/raw/master/src/main/resources/pmd_java_ruleset.xml?ref_type=heads</ruleset> + <ruleset> + https://gitlab.cern.ch/nile/java-build-tools/-/raw/master/src/main/resources/pmd_java_ruleset.xml?ref_type=heads + </ruleset> </rulesets> <includeTests>true</includeTests> <failOnViolation>true</failOnViolation> diff --git a/src/main/java/ch/cern/nile/common/Main.java b/src/main/java/ch/cern/nile/common/Main.java deleted file mode 100644 index 3722908..0000000 --- a/src/main/java/ch/cern/nile/common/Main.java +++ /dev/null @@ -1,60 +0,0 @@ -package ch.cern.nile.common; - - -import java.io.FileInputStream; -import java.io.IOException; -import java.lang.reflect.InvocationTargetException; -import java.util.Properties; - -import ch.cern.nile.common.clients.KafkaStreamsClient; -import ch.cern.nile.common.configs.PropertiesCheck; -import ch.cern.nile.common.configs.StreamConfig; -import ch.cern.nile.common.configs.StreamType; -import ch.cern.nile.common.exceptions.StreamingException; -import ch.cern.nile.common.streams.Streaming; - -public class Main { - - private Main() { - } - - /** - * Main method. - * - * @param args the properties file - */ - public static void main(String[] args) { - if (args.length < 1) { - throw new IllegalArgumentException("Properties file not passed"); - } - - String configPath = args[0]; - final Properties configs = new Properties(); - try { - configs.load(new FileInputStream(configPath)); - } catch (IOException e) { - throw new StreamingException(e); - } - - StreamType sType = - StreamType.valueOf(configs.getProperty(StreamConfig.CommonProperties.STREAM_TYPE.getValue(), null)); - - PropertiesCheck.validateProperties(configs, sType); - - final KafkaStreamsClient client = new KafkaStreamsClient(); - client.configure(configs); - - try { - Class<?> clazz = Class.forName(configs.getProperty(StreamConfig.CommonProperties.STREAM_CLASS.getValue())); - final Streaming streaming; - streaming = (Streaming) clazz.getDeclaredConstructor().newInstance(); - streaming.configure(configs); - streaming.stream(client); - } catch (ClassNotFoundException | IllegalAccessException | InstantiationException | ClassCastException - | InvocationTargetException | NoSuchMethodException e) { - String message = "Error while starting the stream"; - throw new StreamingException(message, e); - } - } - -} diff --git a/src/main/java/ch/cern/nile/common/StreamingApplication.java b/src/main/java/ch/cern/nile/common/StreamingApplication.java new file mode 100644 index 0000000..ff57de6 --- /dev/null +++ b/src/main/java/ch/cern/nile/common/StreamingApplication.java @@ -0,0 +1,77 @@ +package ch.cern.nile.common; + + +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Properties; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import ch.cern.nile.common.clients.KafkaStreamsClient; +import ch.cern.nile.common.configuration.PropertiesCheck; +import ch.cern.nile.common.configuration.StreamType; +import ch.cern.nile.common.configuration.properties.CommonProperties; +import ch.cern.nile.common.exceptions.StreamingException; +import ch.cern.nile.common.streams.Streaming; + +/** + * {@link StreamingApplication} is the entry point for initializing and starting a Kafka Streams application. + */ +public final class StreamingApplication { + + private static final Logger LOGGER = LoggerFactory.getLogger(StreamingApplication.class); + private static final int MIN_ARGS_LENGTH = 1; + + private StreamingApplication() { + } + + /** + * The main method for the StreamingApplication. It is the entry point of the application. + * + * @param args Command-line arguments, expecting the path to the properties file as the first argument. + * @throws IllegalArgumentException If the properties file path is not provided. + * @throws StreamingException If there are issues loading the properties file, validating properties, + * or starting the streaming process. + */ + public static void main(final String[] args) { + if (args.length < MIN_ARGS_LENGTH) { + throw new IllegalArgumentException("Properties file not passed"); + } + + final String configPath = args[0]; + final Properties configs = new Properties(); + try { + configs.load(Files.newInputStream(Paths.get(configPath))); + } catch (IOException e) { + final String message = "Error while loading the properties file"; + LOGGER.error(message, e); + throw new StreamingException(message, e); + } + + final StreamType sType = + StreamType.valueOf(configs.getProperty(CommonProperties.STREAM_TYPE.getValue(), null)); + + PropertiesCheck.validateProperties(configs, sType); + + final KafkaStreamsClient client = new KafkaStreamsClient(); + client.configure(configs); + + try { + final Class<?> clazz = + Class.forName(configs.getProperty(CommonProperties.STREAM_CLASS.getValue())); + final Streaming streaming; + streaming = (Streaming) clazz.getDeclaredConstructor().newInstance(); + streaming.configure(configs); + streaming.stream(client); + } catch (ClassNotFoundException | IllegalAccessException | InstantiationException | ClassCastException + | InvocationTargetException | NoSuchMethodException e) { + final String message = "Error while starting the stream"; + LOGGER.error(message, e); + throw new StreamingException(message, e); + } + } + +} diff --git a/src/main/java/ch/cern/nile/common/clients/KafkaStreamsClient.java b/src/main/java/ch/cern/nile/common/clients/KafkaStreamsClient.java index af07197..099b34d 100644 --- a/src/main/java/ch/cern/nile/common/clients/KafkaStreamsClient.java +++ b/src/main/java/ch/cern/nile/common/clients/KafkaStreamsClient.java @@ -13,8 +13,8 @@ import org.apache.kafka.streams.Topology; import org.apache.kafka.streams.errors.DefaultProductionExceptionHandler; import org.apache.kafka.streams.errors.LogAndContinueExceptionHandler; -import ch.cern.nile.common.configs.Configure; -import ch.cern.nile.common.configs.StreamConfig; +import ch.cern.nile.common.configuration.Configure; +import ch.cern.nile.common.configuration.properties.ClientProperties; import ch.cern.nile.common.exceptions.ReverseDnsLookupException; import ch.cern.nile.common.json.JsonSerde; @@ -23,38 +23,40 @@ import ch.cern.nile.common.json.JsonSerde; */ public class KafkaStreamsClient implements Configure { + private static final String TEST_CLUSTER_NAME = "test"; + private Properties properties; /** * Configures the KafkaStreams instance using the provided properties. * - * @param configs the properties to be used for the configuration + * @param props the properties to be used for the configuration */ @Override - public void configure(Properties configs) { - final String clientId = configs.getProperty(StreamConfig.ClientProperties.CLIENT_ID.getValue()); - properties = new Properties(); - properties.put(StreamsConfig.APPLICATION_ID_CONFIG, clientId); - properties.put(StreamsConfig.CLIENT_ID_CONFIG, clientId); + public void configure(final Properties props) { + final String clientId = props.getProperty(ClientProperties.CLIENT_ID.getValue()); + this.properties = new Properties(); + this.properties.put(StreamsConfig.APPLICATION_ID_CONFIG, clientId); + this.properties.put(StreamsConfig.CLIENT_ID_CONFIG, clientId); - String kafkaCluster = configs.getProperty(StreamConfig.ClientProperties.KAFKA_CLUSTER.getValue()); + final String kafkaCluster = props.getProperty(ClientProperties.KAFKA_CLUSTER.getValue()); - if (!kafkaCluster.equals("test")) { - properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, this.reverseDnsLookup(kafkaCluster)); - properties.put(StreamsConfig.SECURITY_PROTOCOL_CONFIG, "SASL_SSL"); - properties.put(SaslConfigs.SASL_MECHANISM, "GSSAPI"); - properties.put(SaslConfigs.SASL_KERBEROS_SERVICE_NAME, "kafka"); - properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, - configs.getProperty(StreamConfig.ClientProperties.TRUSTSTORE_LOCATION.getValue())); + if (TEST_CLUSTER_NAME.equals(kafkaCluster)) { + this.properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, props.getProperty("bootstrap.servers")); } else { - properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, configs.getProperty("bootstrap.servers")); + this.properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, this.reverseDnsLookup(kafkaCluster)); + this.properties.put(StreamsConfig.SECURITY_PROTOCOL_CONFIG, "SASL_SSL"); + this.properties.put(SaslConfigs.SASL_MECHANISM, "GSSAPI"); + this.properties.put(SaslConfigs.SASL_KERBEROS_SERVICE_NAME, "kafka"); + this.properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, + props.getProperty(ClientProperties.TRUSTSTORE_LOCATION.getValue())); } - properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); - properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class.getName()); - properties.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, + this.properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); + this.properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, JsonSerde.class.getName()); + this.properties.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, LogAndContinueExceptionHandler.class.getName()); - properties.put(StreamsConfig.DEFAULT_PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG, + this.properties.put(StreamsConfig.DEFAULT_PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG, DefaultProductionExceptionHandler.class.getName()); } @@ -64,7 +66,7 @@ public class KafkaStreamsClient implements Configure { * @param topology the topology to be used for the KafkaStreams instance * @return a configured KafkaStreams instance */ - public KafkaStreams create(Topology topology) { + public KafkaStreams create(final Topology topology) { return new KafkaStreams(topology, properties); } @@ -75,7 +77,7 @@ public class KafkaStreamsClient implements Configure { * @return a comma-separated list of hostnames with port 9093 * @throws RuntimeException if the hostname resolution fails */ - private String reverseDnsLookup(String kafkaCluster) { + private String reverseDnsLookup(final String kafkaCluster) { try { return performDnsLookup(kafkaCluster); } catch (UnknownHostException e) { @@ -85,20 +87,20 @@ public class KafkaStreamsClient implements Configure { } /** - * Perform the DNS lookup. This method can be overridden in tests. + * Performs the actual DNS lookup. * * @param kafkaCluster the domain of the Kafka cluster * @return a comma-separated list of hostnames with port 9093 * @throws UnknownHostException if the hostname resolution fails */ - protected String performDnsLookup(String kafkaCluster) throws UnknownHostException { - StringBuilder sb = new StringBuilder(); - InetAddress[] address = InetAddress.getAllByName(kafkaCluster); - for (InetAddress host : address) { + protected String performDnsLookup(final String kafkaCluster) throws UnknownHostException { + final StringBuilder stringBuilder = new StringBuilder(); + final InetAddress[] address = InetAddress.getAllByName(kafkaCluster); + for (final InetAddress host : address) { final String hostName = InetAddress.getByName(host.getHostAddress()).getHostName(); - sb.append(hostName).append(":9093,"); + stringBuilder.append(hostName).append(":9093,"); } - sb.deleteCharAt(sb.length() - 1); - return sb.toString(); + stringBuilder.deleteCharAt(stringBuilder.length() - 1); + return stringBuilder.toString(); } } diff --git a/src/main/java/ch/cern/nile/common/configs/Configure.java b/src/main/java/ch/cern/nile/common/configs/Configure.java deleted file mode 100644 index f520461..0000000 --- a/src/main/java/ch/cern/nile/common/configs/Configure.java +++ /dev/null @@ -1,10 +0,0 @@ -package ch.cern.nile.common.configs; - -import java.util.Properties; - -/** - * Interface for classes that can be configured with a Properties object. - */ -public interface Configure { - void configure(Properties configs); -} diff --git a/src/main/java/ch/cern/nile/common/configs/StreamConfig.java b/src/main/java/ch/cern/nile/common/configs/StreamConfig.java deleted file mode 100644 index f4da20f..0000000 --- a/src/main/java/ch/cern/nile/common/configs/StreamConfig.java +++ /dev/null @@ -1,96 +0,0 @@ -package ch.cern.nile.common.configs; - -import java.util.Arrays; -import java.util.Set; -import java.util.stream.Collectors; - -import lombok.Getter; - -/** - * A class containing enums representing various stream configuration property categories. - */ -public class StreamConfig { - - @Getter - public enum ClientProperties { - SOURCE_TOPIC("source.topic"), - KAFKA_CLUSTER("kafka.cluster"), - CLIENT_ID("client.id"), - TRUSTSTORE_LOCATION("truststore.location"); - - private final String value; - - ClientProperties(String value) { - this.value = value; - } - - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } - } - - @Getter - public enum CommonProperties { - STREAM_TYPE("stream.type"), - STREAM_CLASS("stream.class"); - - private final String value; - - CommonProperties(String value) { - this.value = value; - } - - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } - } - - - @Getter - public enum DecodingProperties { - SINK_TOPIC("sink.topic"); - - private final String value; - - DecodingProperties(String value) { - this.value = value; - } - - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } - } - - @Getter - public enum RoutingProperties { - ROUTING_CONFIG_PATH("routing.config.path"), - DLQ_TOPIC("dlq.topic"); - - private final String value; - - RoutingProperties(String value) { - this.value = value; - } - - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } - } - - @Getter - public enum EnrichmentProperties { - ENRICHMENT_CONFIG_PATH("enrichment.config.path"), - SINK_TOPIC("sink.topic"); - - private final String value; - - EnrichmentProperties(String value) { - this.value = value; - } - - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } - } - -} diff --git a/src/main/java/ch/cern/nile/common/configuration/Configure.java b/src/main/java/ch/cern/nile/common/configuration/Configure.java new file mode 100644 index 0000000..72c2784 --- /dev/null +++ b/src/main/java/ch/cern/nile/common/configuration/Configure.java @@ -0,0 +1,16 @@ +package ch.cern.nile.common.configuration; + +import java.util.Properties; + +/** + * Interface for classes that can be configured with a Properties object. + */ +public interface Configure { + + /** + * Configure this class. + * + * @param properties Configuration properties + */ + void configure(Properties properties); +} diff --git a/src/main/java/ch/cern/nile/common/configs/PropertiesCheck.java b/src/main/java/ch/cern/nile/common/configuration/PropertiesCheck.java similarity index 68% rename from src/main/java/ch/cern/nile/common/configs/PropertiesCheck.java rename to src/main/java/ch/cern/nile/common/configuration/PropertiesCheck.java index 19f10c3..0504f99 100644 --- a/src/main/java/ch/cern/nile/common/configs/PropertiesCheck.java +++ b/src/main/java/ch/cern/nile/common/configuration/PropertiesCheck.java @@ -1,9 +1,14 @@ -package ch.cern.nile.common.configs; +package ch.cern.nile.common.configuration; import java.util.Objects; import java.util.Properties; import java.util.Set; +import ch.cern.nile.common.configuration.properties.ClientProperties; +import ch.cern.nile.common.configuration.properties.CommonProperties; +import ch.cern.nile.common.configuration.properties.DecodingProperties; +import ch.cern.nile.common.configuration.properties.EnrichmentProperties; +import ch.cern.nile.common.configuration.properties.RoutingProperties; import ch.cern.nile.common.exceptions.MissingPropertyException; import ch.cern.nile.common.exceptions.UnknownStreamTypeException; @@ -12,15 +17,15 @@ import ch.cern.nile.common.exceptions.UnknownStreamTypeException; */ public final class PropertiesCheck { + private static final Set<String> CLIENT_PROPERTIES = ClientProperties.getValues(); + private static final Set<String> COMMON_PROPERTIES = CommonProperties.getValues(); + private static final Set<String> DECODING_PROPERTIES = DecodingProperties.getValues(); + private static final Set<String> ROUTING_PROPERTIES = RoutingProperties.getValues(); + private static final Set<String> ENRICHMENT_PROPERTIES = EnrichmentProperties.getValues(); + private PropertiesCheck() { } - private static final Set<String> CLIENT_PROPERTIES = StreamConfig.ClientProperties.getValues(); - private static final Set<String> COMMON_PROPERTIES = StreamConfig.CommonProperties.getValues(); - private static final Set<String> DECODING_PROPERTIES = StreamConfig.DecodingProperties.getValues(); - private static final Set<String> ROUTING_PROPERTIES = StreamConfig.RoutingProperties.getValues(); - private static final Set<String> ENRICHMENT_PROPERTIES = StreamConfig.EnrichmentProperties.getValues(); - /** * Validates the properties file based on the type of stream. * @@ -29,7 +34,7 @@ public final class PropertiesCheck { * @throws MissingPropertyException if a required property is missing from the properties object. * @throws UnknownStreamTypeException if the stream type is unknown. */ - public static void validateProperties(Properties properties, StreamType streamType) { + public static void validateProperties(final Properties properties, final StreamType streamType) { Objects.requireNonNull(properties, "Properties object cannot be null"); Objects.requireNonNull(streamType, "Properties file is missing stream.type property"); @@ -59,11 +64,11 @@ public final class PropertiesCheck { * @param propsToCheck - set of required property keys. * @throws MissingPropertyException if a required property is missing from the properties object. */ - private static void validateRequiredProperties(Properties props, Set<String> propsToCheck) { + private static void validateRequiredProperties(final Properties props, final Set<String> propsToCheck) { Objects.requireNonNull(props, "Properties object cannot be null"); Objects.requireNonNull(propsToCheck, "Properties to check cannot be null"); - for (String prop : propsToCheck) { + for (final String prop : propsToCheck) { if (!props.containsKey(prop)) { throw new MissingPropertyException(String.format("Properties file is missing: %s property.", prop)); } diff --git a/src/main/java/ch/cern/nile/common/configs/StreamType.java b/src/main/java/ch/cern/nile/common/configuration/StreamType.java similarity index 77% rename from src/main/java/ch/cern/nile/common/configs/StreamType.java rename to src/main/java/ch/cern/nile/common/configuration/StreamType.java index 7f9dfdc..5b14d01 100644 --- a/src/main/java/ch/cern/nile/common/configs/StreamType.java +++ b/src/main/java/ch/cern/nile/common/configuration/StreamType.java @@ -1,4 +1,4 @@ -package ch.cern.nile.common.configs; +package ch.cern.nile.common.configuration; /** * Enum representing the types of streams supported by the application. diff --git a/src/main/java/ch/cern/nile/common/configuration/properties/ClientProperties.java b/src/main/java/ch/cern/nile/common/configuration/properties/ClientProperties.java new file mode 100644 index 0000000..80b12cd --- /dev/null +++ b/src/main/java/ch/cern/nile/common/configuration/properties/ClientProperties.java @@ -0,0 +1,33 @@ +package ch.cern.nile.common.configuration.properties; + +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +import lombok.Getter; + +/** + * Enum representing Client properties. + */ +@Getter +public enum ClientProperties { + SOURCE_TOPIC("source.topic"), + KAFKA_CLUSTER("kafka.cluster"), + CLIENT_ID("client.id"), + TRUSTSTORE_LOCATION("truststore.location"); + + private final String value; + + ClientProperties(final String value) { + this.value = value; + } + + /** + * Get the values of all the enum constants. + * + * @return a set of the values of all the enum constants + */ + public static Set<String> getValues() { + return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); + } +} diff --git a/src/main/java/ch/cern/nile/common/configuration/properties/CommonProperties.java b/src/main/java/ch/cern/nile/common/configuration/properties/CommonProperties.java new file mode 100644 index 0000000..700bd18 --- /dev/null +++ b/src/main/java/ch/cern/nile/common/configuration/properties/CommonProperties.java @@ -0,0 +1,31 @@ +package ch.cern.nile.common.configuration.properties; + +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +import lombok.Getter; + +/** + * Enum representing Common properties. + */ +@Getter +public enum CommonProperties { + STREAM_TYPE("stream.type"), + STREAM_CLASS("stream.class"); + + private final String value; + + CommonProperties(final String value) { + this.value = value; + } + + /** + * Get the values of all the enum constants. + * + * @return a set of the values of all the enum constants + */ + public static Set<String> getValues() { + return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); + } +} diff --git a/src/main/java/ch/cern/nile/common/configuration/properties/DecodingProperties.java b/src/main/java/ch/cern/nile/common/configuration/properties/DecodingProperties.java new file mode 100644 index 0000000..17817c7 --- /dev/null +++ b/src/main/java/ch/cern/nile/common/configuration/properties/DecodingProperties.java @@ -0,0 +1,30 @@ +package ch.cern.nile.common.configuration.properties; + +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +import lombok.Getter; + +/** + * Enum representing Decoding properties. + */ +@Getter +public enum DecodingProperties { + SINK_TOPIC("sink.topic"); + + private final String value; + + DecodingProperties(final String value) { + this.value = value; + } + + /** + * Get the values of all the enum constants. + * + * @return a set of the values of all the enum constants + */ + public static Set<String> getValues() { + return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); + } +} diff --git a/src/main/java/ch/cern/nile/common/configuration/properties/EnrichmentProperties.java b/src/main/java/ch/cern/nile/common/configuration/properties/EnrichmentProperties.java new file mode 100644 index 0000000..489da1b --- /dev/null +++ b/src/main/java/ch/cern/nile/common/configuration/properties/EnrichmentProperties.java @@ -0,0 +1,31 @@ +package ch.cern.nile.common.configuration.properties; + +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +import lombok.Getter; + +/** + * Enum representing the Enrichment properties. + */ +@Getter +public enum EnrichmentProperties { + ENRICHMENT_CONFIG_PATH("enrichment.config.path"), + SINK_TOPIC("sink.topic"); + + private final String value; + + EnrichmentProperties(final String value) { + this.value = value; + } + + /** + * Get the values of all the enum constants. + * + * @return a set of the values of all the enum constants + */ + public static Set<String> getValues() { + return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); + } +} diff --git a/src/main/java/ch/cern/nile/common/configuration/properties/RoutingProperties.java b/src/main/java/ch/cern/nile/common/configuration/properties/RoutingProperties.java new file mode 100644 index 0000000..e6511e6 --- /dev/null +++ b/src/main/java/ch/cern/nile/common/configuration/properties/RoutingProperties.java @@ -0,0 +1,31 @@ +package ch.cern.nile.common.configuration.properties; + +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +import lombok.Getter; + +/** + * Enum representing the Routing properties. + */ +@Getter +public enum RoutingProperties { + ROUTING_CONFIG_PATH("routing.config.path"), + DLQ_TOPIC("dlq.topic"); + + private final String value; + + RoutingProperties(final String value) { + this.value = value; + } + + /** + * Get the values of all the enum constants. + * + * @return a set of the values of all the enum constants + */ + public static Set<String> getValues() { + return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); + } +} diff --git a/src/main/java/ch/cern/nile/common/exceptions/DecodingException.java b/src/main/java/ch/cern/nile/common/exceptions/DecodingException.java index c24feaa..628c86a 100644 --- a/src/main/java/ch/cern/nile/common/exceptions/DecodingException.java +++ b/src/main/java/ch/cern/nile/common/exceptions/DecodingException.java @@ -2,11 +2,13 @@ package ch.cern.nile.common.exceptions; public class DecodingException extends RuntimeException { - public DecodingException(String message, Throwable cause) { + private static final long serialVersionUID = 1L; + + public DecodingException(final String message, final Throwable cause) { super(message, cause); } - public DecodingException(String message) { + public DecodingException(final String message) { super(message); } diff --git a/src/main/java/ch/cern/nile/common/exceptions/HealthProbeException.java b/src/main/java/ch/cern/nile/common/exceptions/HealthProbeException.java new file mode 100644 index 0000000..7cca9b2 --- /dev/null +++ b/src/main/java/ch/cern/nile/common/exceptions/HealthProbeException.java @@ -0,0 +1,11 @@ +package ch.cern.nile.common.exceptions; + +public class HealthProbeException extends RuntimeException { + + private static final long serialVersionUID = 1L; + + public HealthProbeException(final String message, final Throwable cause) { + super(message, cause); + } + +} diff --git a/src/main/java/ch/cern/nile/common/exceptions/MissingPropertyException.java b/src/main/java/ch/cern/nile/common/exceptions/MissingPropertyException.java index 41d6006..380bfda 100644 --- a/src/main/java/ch/cern/nile/common/exceptions/MissingPropertyException.java +++ b/src/main/java/ch/cern/nile/common/exceptions/MissingPropertyException.java @@ -2,7 +2,9 @@ package ch.cern.nile.common.exceptions; public class MissingPropertyException extends RuntimeException { - public MissingPropertyException(String message) { + private static final long serialVersionUID = 1L; + + public MissingPropertyException(final String message) { super(message); } diff --git a/src/main/java/ch/cern/nile/common/exceptions/ReverseDnsLookupException.java b/src/main/java/ch/cern/nile/common/exceptions/ReverseDnsLookupException.java index b104a0a..fd31391 100644 --- a/src/main/java/ch/cern/nile/common/exceptions/ReverseDnsLookupException.java +++ b/src/main/java/ch/cern/nile/common/exceptions/ReverseDnsLookupException.java @@ -2,7 +2,9 @@ package ch.cern.nile.common.exceptions; public class ReverseDnsLookupException extends RuntimeException { - public ReverseDnsLookupException(String message, Throwable cause) { + private static final long serialVersionUID = 1L; + + public ReverseDnsLookupException(final String message, final Throwable cause) { super(message, cause); } diff --git a/src/main/java/ch/cern/nile/common/exceptions/StreamingException.java b/src/main/java/ch/cern/nile/common/exceptions/StreamingException.java index aa9ecdd..0bc33ae 100644 --- a/src/main/java/ch/cern/nile/common/exceptions/StreamingException.java +++ b/src/main/java/ch/cern/nile/common/exceptions/StreamingException.java @@ -2,11 +2,13 @@ package ch.cern.nile.common.exceptions; public class StreamingException extends RuntimeException { - public StreamingException(Throwable cause) { + private static final long serialVersionUID = 1L; + + public StreamingException(final Throwable cause) { super(cause); } - public StreamingException(String message, Throwable cause) { + public StreamingException(final String message, final Throwable cause) { super(message, cause); } } diff --git a/src/main/java/ch/cern/nile/common/exceptions/UnknownStreamTypeException.java b/src/main/java/ch/cern/nile/common/exceptions/UnknownStreamTypeException.java index 1748730..6fb32f5 100644 --- a/src/main/java/ch/cern/nile/common/exceptions/UnknownStreamTypeException.java +++ b/src/main/java/ch/cern/nile/common/exceptions/UnknownStreamTypeException.java @@ -2,7 +2,9 @@ package ch.cern.nile.common.exceptions; public class UnknownStreamTypeException extends RuntimeException { - public UnknownStreamTypeException(String message) { + private static final long serialVersionUID = 1L; + + public UnknownStreamTypeException(final String message) { super(message); } diff --git a/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java b/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java index 6e36502..3d35381 100644 --- a/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java +++ b/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java @@ -7,27 +7,26 @@ import com.google.gson.Gson; import org.apache.kafka.common.serialization.Deserializer; +/** + * Deserializer for JSON POJOs. + * + * @param <T> Type of the POJO to be deserialized + */ public class JsonPojoDeserializer<T> implements Deserializer<T> { - private static final Gson gson = new Gson(); + private static final Gson GSON = new Gson(); /** * Class type for the deserialization. */ private Class<T> tClass; - /** - * Default constructor needed by Kafka. - */ - public JsonPojoDeserializer() { - } - /** * Constructor for the deserializer. * * @param clazz Class type for the deserialization */ - JsonPojoDeserializer(Class<T> clazz) { + JsonPojoDeserializer(final Class<T> clazz) { this.tClass = clazz; } @@ -39,7 +38,7 @@ public class JsonPojoDeserializer<T> implements Deserializer<T> { */ @Override @SuppressWarnings("unchecked") - public void configure(Map<String, ?> props, boolean isKey) { + public void configure(final Map<String, ?> props, final boolean isKey) { if (tClass == null) { tClass = (Class<T>) props.get("JsonPOJOClass"); } @@ -53,11 +52,12 @@ public class JsonPojoDeserializer<T> implements Deserializer<T> { * @return The deserialized object of type T or null if the byte array is null */ @Override - public T deserialize(String topic, byte[] bytes) { - if (bytes == null) { - return null; + public T deserialize(final String topic, final byte[] bytes) { + T deserializedData = null; + if (bytes != null) { + deserializedData = GSON.fromJson(new String(bytes, StandardCharsets.UTF_8), tClass); } - return gson.fromJson(new String(bytes, StandardCharsets.UTF_8), tClass); + return deserializedData; } /** @@ -65,6 +65,7 @@ public class JsonPojoDeserializer<T> implements Deserializer<T> { */ @Override public void close() { + // Nothing to do } } diff --git a/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java b/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java index 08ed1d4..075f77b 100644 --- a/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java +++ b/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java @@ -7,15 +7,14 @@ import com.google.gson.Gson; import org.apache.kafka.common.serialization.Serializer; +/** + * Serializer for JSON POJOs. + * + * @param <T> Type of the POJO to be serialized + */ public class JsonPojoSerializer<T> implements Serializer<T> { - private static final Gson gson = new Gson(); - - /** - * Default constructor needed by Kafka. - */ - public JsonPojoSerializer() { - } + private static final Gson GSON = new Gson(); /** * Needed due to the implementation of the Serializer interface. @@ -24,7 +23,8 @@ public class JsonPojoSerializer<T> implements Serializer<T> { * @param isKey Ignored */ @Override - public void configure(Map<String, ?> props, boolean isKey) { + public void configure(final Map<String, ?> props, final boolean isKey) { + // Nothing to do } /** @@ -35,11 +35,12 @@ public class JsonPojoSerializer<T> implements Serializer<T> { * @return The serialized data as bytes or null if the data is null */ @Override - public byte[] serialize(String topic, T data) { - if (data == null) { - return null; + public byte[] serialize(final String topic, final T data) { + byte[] serializedData = null; + if (data != null) { + serializedData = GSON.toJson(data).getBytes(StandardCharsets.UTF_8); } - return gson.toJson(data).getBytes(StandardCharsets.UTF_8); + return serializedData; } /** @@ -47,6 +48,7 @@ public class JsonPojoSerializer<T> implements Serializer<T> { */ @Override public void close() { + // Nothing to do } } diff --git a/src/main/java/ch/cern/nile/common/json/JsonSerde.java b/src/main/java/ch/cern/nile/common/json/JsonSerde.java index 5e1fdfc..10d82f0 100644 --- a/src/main/java/ch/cern/nile/common/json/JsonSerde.java +++ b/src/main/java/ch/cern/nile/common/json/JsonSerde.java @@ -13,8 +13,8 @@ import org.apache.kafka.common.serialization.Serializer; */ public class JsonSerde implements Serde<JsonObject> { - private final JsonPojoSerializer<JsonObject> serializer = new JsonPojoSerializer<>(); - private final JsonPojoDeserializer<JsonObject> deserializer = new JsonPojoDeserializer<>(JsonObject.class); + private final JsonPojoSerializer<JsonObject> jsonSerializer = new JsonPojoSerializer<>(); + private final JsonPojoDeserializer<JsonObject> jsonDeserializer = new JsonPojoDeserializer<>(JsonObject.class); /** * Configure this class. @@ -23,9 +23,9 @@ public class JsonSerde implements Serde<JsonObject> { * @param isKey Ignored */ @Override - public void configure(Map<String, ?> configs, boolean isKey) { - serializer.configure(configs, isKey); - deserializer.configure(configs, isKey); + public void configure(final Map<String, ?> configs, final boolean isKey) { + jsonSerializer.configure(configs, isKey); + jsonDeserializer.configure(configs, isKey); } /** @@ -33,8 +33,8 @@ public class JsonSerde implements Serde<JsonObject> { */ @Override public void close() { - serializer.close(); - deserializer.close(); + jsonSerializer.close(); + jsonDeserializer.close(); } /** @@ -44,7 +44,7 @@ public class JsonSerde implements Serde<JsonObject> { */ @Override public Serializer<JsonObject> serializer() { - return serializer; + return jsonSerializer; } /** @@ -54,6 +54,6 @@ public class JsonSerde implements Serde<JsonObject> { */ @Override public Deserializer<JsonObject> deserializer() { - return deserializer; + return jsonDeserializer; } } diff --git a/src/main/java/ch/cern/nile/common/models/Application.java b/src/main/java/ch/cern/nile/common/models/Application.java index ed573ca..fcede13 100644 --- a/src/main/java/ch/cern/nile/common/models/Application.java +++ b/src/main/java/ch/cern/nile/common/models/Application.java @@ -5,6 +5,9 @@ import lombok.NoArgsConstructor; import lombok.Setter; import lombok.ToString; +/** + * Application model. + */ @NoArgsConstructor @Getter @Setter diff --git a/src/main/java/ch/cern/nile/common/models/Topic.java b/src/main/java/ch/cern/nile/common/models/Topic.java index cfe06db..2807e7b 100644 --- a/src/main/java/ch/cern/nile/common/models/Topic.java +++ b/src/main/java/ch/cern/nile/common/models/Topic.java @@ -5,6 +5,9 @@ import lombok.NoArgsConstructor; import lombok.Setter; import lombok.ToString; +/** + * Topic model. + */ @NoArgsConstructor @Getter @Setter diff --git a/src/main/java/ch/cern/nile/common/probes/Health.java b/src/main/java/ch/cern/nile/common/probes/Health.java index 5dff849..3d60006 100644 --- a/src/main/java/ch/cern/nile/common/probes/Health.java +++ b/src/main/java/ch/cern/nile/common/probes/Health.java @@ -7,14 +7,16 @@ import com.sun.net.httpserver.HttpServer; import org.apache.kafka.streams.KafkaStreams; +import ch.cern.nile.common.exceptions.HealthProbeException; + /** * A simple HTTP server that responds to health checks with a 200 if the KafkaStreams instance is running, * or a 500 if it is not running. */ public class Health { - private static final int OK = 200; - private static final int ERROR = 500; + private static final int OK_RESPONSE = 200; + private static final int ERROR_RESPONSE = 500; private static final int PORT = 8899; private final KafkaStreams streams; @@ -26,7 +28,7 @@ public class Health { * * @param streams the KafkaStreams instance to check the state of */ - public Health(KafkaStreams streams) { + public Health(final KafkaStreams streams) { this(streams, new DefaultHttpServerFactory()); } @@ -36,7 +38,7 @@ public class Health { * @param streams the KafkaStreams instance to check the state of * @param httpServerFactory the factory to use to create the HttpServer instance */ - public Health(KafkaStreams streams, HttpServerFactory httpServerFactory) { + public Health(final KafkaStreams streams, final HttpServerFactory httpServerFactory) { this.streams = streams; this.httpServerFactory = httpServerFactory; } @@ -47,11 +49,11 @@ public class Health { public void start() { try { server = httpServerFactory.createHttpServer(new InetSocketAddress(PORT), 0); - } catch (IOException ioe) { - throw new RuntimeException("Could not setup http server: ", ioe); + } catch (IOException ex) { + throw new HealthProbeException("Failed to create HTTP server", ex); } server.createContext("/health", exchange -> { - int responseCode = streams.state().isRunning() ? OK : ERROR; + final int responseCode = streams.state().isRunning() ? OK_RESPONSE : ERROR_RESPONSE; exchange.sendResponseHeaders(responseCode, 0); exchange.close(); }); @@ -65,9 +67,12 @@ public class Health { server.stop(0); } - private static class DefaultHttpServerFactory implements HttpServerFactory { + /** + * The default HttpServerFactory implementation. + */ + private static final class DefaultHttpServerFactory implements HttpServerFactory { @Override - public HttpServer createHttpServer(InetSocketAddress address, int backlog) throws IOException { + public HttpServer createHttpServer(final InetSocketAddress address, final int backlog) throws IOException { return HttpServer.create(address, backlog); } } diff --git a/src/main/java/ch/cern/nile/common/probes/HttpServerFactory.java b/src/main/java/ch/cern/nile/common/probes/HttpServerFactory.java index 4744b2e..65fbc42 100644 --- a/src/main/java/ch/cern/nile/common/probes/HttpServerFactory.java +++ b/src/main/java/ch/cern/nile/common/probes/HttpServerFactory.java @@ -9,5 +9,14 @@ import com.sun.net.httpserver.HttpServer; * Factory for creating HttpServer instances. Used to allow mocking of HttpServer in tests. */ public interface HttpServerFactory { + + /** + * Creates a new HttpServer instance. + * + * @param address the address to bind the server to + * @param backlog the maximum number of pending connections + * @return the HttpServer instance + * @throws IOException if an I/O error occurs when creating the HttpServer + */ HttpServer createHttpServer(InetSocketAddress address, int backlog) throws IOException; } diff --git a/src/main/java/ch/cern/nile/common/schema/JsonType.java b/src/main/java/ch/cern/nile/common/schema/JsonType.java index 828f423..7c7f36e 100644 --- a/src/main/java/ch/cern/nile/common/schema/JsonType.java +++ b/src/main/java/ch/cern/nile/common/schema/JsonType.java @@ -4,6 +4,9 @@ import java.util.Date; import lombok.Getter; +/** + * Enum for JSON types for Connect schema(s). + */ @Getter enum JsonType { BYTE(Byte.class, "int8"), @@ -20,13 +23,25 @@ enum JsonType { private final Class<?> clazz; private final String type; - JsonType(Class<?> clazz, String type) { + /** + * Constructor. + * + * @param clazz Class for the {@link JsonType} + * @param type Type for the {@link JsonType} + */ + JsonType(final Class<?> clazz, final String type) { this.clazz = clazz; this.type = type; } - public static JsonType fromClass(Class<?> clazz) { - for (JsonType jsonType : JsonType.values()) { + /** + * Returns the {@link JsonType} for the given class. + * + * @param clazz Class to get the {@link JsonType} for + * @return {@link JsonType} for the given class + */ + static JsonType fromClass(final Class<?> clazz) { + for (final JsonType jsonType : values()) { if (jsonType.getClazz().equals(clazz)) { return jsonType; } diff --git a/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java b/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java index c48f319..724d120 100644 --- a/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java +++ b/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java @@ -2,9 +2,13 @@ package ch.cern.nile.common.schema; import java.util.Date; import java.util.HashMap; +import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; +/** + * Injects a Connect schema into the given data. + */ public final class SchemaInjector { private SchemaInjector() { @@ -16,56 +20,57 @@ public final class SchemaInjector { * @param data Data to inject the schema into * @return Data with the schema injected */ - public static Map<String, Object> inject(Map<String, Object> data) { - Map<String, Object> dataCopy = new HashMap<>(data); - Map<String, Object> schemaMap = generateSchemaMap(dataCopy); + public static Map<String, Object> inject(final Map<String, Object> data) { + final Map<String, Object> dataCopy = new HashMap<>(data); + final Map<String, Object> schemaMap = generateSchemaMap(dataCopy); - Map<String, Object> result = new HashMap<>(); + final Map<String, Object> result = new HashMap<>(); result.put("schema", schemaMap); result.put("payload", dataCopy); return result; } - private static Map<String, Object> generateSchemaMap(Map<String, Object> data) { - Map<String, Object> schemaMap = new HashMap<>(); + private static Map<String, Object> generateSchemaMap(final Map<String, Object> data) { + final Map<String, Object> schemaMap = new HashMap<>(); schemaMap.put("type", "struct"); schemaMap.put("fields", generateFieldMaps(data)); return schemaMap; } - private static Iterable<Map<String, Object>> generateFieldMaps(Map<String, Object> data) { + private static Iterable<Map<String, Object>> generateFieldMaps(final Map<String, Object> data) { return data.entrySet().stream().map(SchemaInjector::generateFieldMap).collect(Collectors.toList()); } - private static Map<String, Object> generateFieldMap(Map.Entry<String, Object> entry) { - Map<String, Object> fieldMap = new HashMap<>(); - String key = entry.getKey(); - Object value = entry.getValue(); + private static Map<String, Object> generateFieldMap(final Map.Entry<String, Object> entry) { + final Map<String, Object> fieldMap = new HashMap<>(); + final String key = entry.getKey(); + final Object value = entry.getValue(); validateValue(value); - JsonType type = JsonType.fromClass(value.getClass()); + final JsonType type = JsonType.fromClass(value.getClass()); fieldMap.put("field", key); fieldMap.put("type", type.getType()); - fieldMap.put("optional", !key.toLowerCase().contains("timestamp")); + fieldMap.put("optional", !key.toLowerCase(Locale.ENGLISH).contains("timestamp")); addTimestampAndDateFields(fieldMap, key, type); return fieldMap; } - private static void validateValue(Object value) { + private static void validateValue(final Object value) { if (value == null) { throw new IllegalArgumentException("Null values are not allowed in the data map."); } } - private static void addTimestampAndDateFields(Map<String, Object> fieldMap, String key, JsonType type) { - boolean isTimestampField = key.toLowerCase().contains("timestamp"); - boolean isDateType = type.getClazz().equals(Date.class); + private static void addTimestampAndDateFields(final Map<String, Object> fieldMap, final String key, + final JsonType type) { + final boolean isTimestampField = key.toLowerCase(Locale.ENGLISH).contains("timestamp"); + final boolean isDateType = type.getClazz().equals(Date.class); if (isTimestampField) { fieldMap.put("name", "org.apache.kafka.connect.data.Timestamp"); diff --git a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java index 152fe05..094054e 100644 --- a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java +++ b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java @@ -1,38 +1,40 @@ package ch.cern.nile.common.streams; -import java.time.DateTimeException; -import java.time.Instant; -import java.util.List; -import java.util.Map; import java.util.Properties; import java.util.concurrent.CountDownLatch; -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; - import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.Topology; -import org.apache.kafka.streams.kstream.ValueTransformer; -import org.apache.kafka.streams.processor.ProcessorContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ch.cern.nile.common.clients.KafkaStreamsClient; -import ch.cern.nile.common.configs.StreamConfig; +import ch.cern.nile.common.configuration.properties.ClientProperties; +import ch.cern.nile.common.configuration.properties.DecodingProperties; +import ch.cern.nile.common.exceptions.StreamingException; import ch.cern.nile.common.probes.Health; import lombok.Getter; import lombok.Setter; +/** + * AbstractStream is an abstract class implementing the {@link Streaming} interface, providing a framework + * for building and managing Kafka Streams applications. It encapsulates common functionality such + * as configuring the stream, handling its lifecycle, and managing health checks. Implementations of + * this class should provide the specific logic for creating the Kafka Streams topology by overriding + * the createTopology method. + * <p> + * Usage: + * Subclasses should implement the {@link AbstractStream#createTopology(StreamsBuilder) CreateTopology} + * method to define their specific processing topology. + * They can then use this abstract class to handle common streaming functionalities + * such as starting, stopping, and monitoring the Kafka Streams application. + */ public abstract class AbstractStream implements Streaming { private static final Logger LOGGER = LoggerFactory.getLogger(AbstractStream.class); - @Getter - private KafkaStreams streams; - @Getter @Setter private String sourceTopic; @@ -41,31 +43,35 @@ public abstract class AbstractStream implements Streaming { @Setter private String sinkTopic; - @Getter @Setter private long lastReadOffset = -2; - private Properties configs; + private Properties properties; + private KafkaStreams streams; private Health health; private CountDownLatch latch; /** - * Configure the stream with the given properties. + * Configures the Kafka Streams application with provided settings. * - * @param configs the properties to configure the stream with + * @param configs the configuration settings for the Kafka Streams application */ @Override - @SuppressWarnings("HiddenField") public void configure(final Properties configs) { - this.configs = configs; + this.properties = configs; } /** - * Start the stream. + * Starts the Kafka Streams application using the provided KafkaStreamsClient. + * <p> + * Initializes and manages the Kafka Streams application lifecycle, including graceful shutdown. + * Note: This method terminates the JVM upon completion. * - * @param kafkaStreamsClient the client to use to create the stream + * @param kafkaStreamsClient the client used to create and manage the Kafka Streams instance + * @throws StreamingException if an error occurs during streaming */ @Override + @SuppressWarnings("PMD.DoNotTerminateVM") public void stream(final KafkaStreamsClient kafkaStreamsClient) { init(kafkaStreamsClient); Runtime.getRuntime().addShutdownHook(new Thread(this::shutDown, "streams-shutdown-hook")); @@ -74,99 +80,32 @@ public abstract class AbstractStream implements Streaming { } /** - * Get a property from the stream's configuration. + * Implement this method to define the topology of the Kafka Streams application. * - * @param key the key of the property to get - * @return the value of the property + * @param builder the {@link StreamsBuilder} to use to create the topology */ - public String getProperty(final String key) { - return configs.getProperty(key); - } - - protected static void addTimestamp(final JsonArray gatewayInfo, final Map<String, Object> map) - throws DateTimeException { - final String timestampKey = "timestamp"; - final String timeKey = "time"; - - for (JsonElement element : gatewayInfo) { - if (element.isJsonObject()) { - JsonObject entry = element.getAsJsonObject(); - if (entry.has(timeKey)) { - map.put(timestampKey, Instant.parse(entry.get(timeKey).getAsString()).toEpochMilli()); - break; - } - } - } - if (!map.containsKey(timestampKey)) { - throw new DateTimeException( - String.format("No '%s' field found in gateway info (dropping the message): %s", timeKey, - gatewayInfo)); - } - } + protected abstract void createTopology(StreamsBuilder builder); /** - * Filter out records that are null. + * Use this method to log any exceptions that occur while streaming. * - * @param key is ignored - * @param value the value - * @return true if the record is not null, false otherwise + * @param exception the exception to log */ - protected static boolean filterNull(final String key, final Object value) { - return value != null; - } - - /** - * Filter out records that are empty. - * - * @param key is ignored - * @param value the value - * @return true if the record is not empty, false otherwise - */ - protected static boolean filterEmpty(final String key, final Object value) { - if (value instanceof List) { - return !((List<?>) value).isEmpty(); - } else if (value instanceof Map) { - return !((Map<?, ?>) value).isEmpty(); + protected void logStreamsException(final Exception exception) { + if (LOGGER.isWarnEnabled()) { + LOGGER.warn( + String.format("Error reading from topic %s. Last read offset %s:", sourceTopic, lastReadOffset), + exception); } - return false; - } - - /** - * Filter out records that do not have the required fields. - * - * @param key the key - * @param value the value - * @return true if the record has the required fields, false otherwise - */ - protected boolean filterRecord(String key, JsonObject value) { - return value != null && value.get("applicationID") != null && value.get("applicationName") != null - && value.get("deviceName") != null && value.get("devEUI") != null - && value.get("data") != null; - } - - /** - * Log an exception that occurred while reading from the source topic. - * - * @param exception the exception - */ - protected void logStreamsException(Exception exception) { - LOGGER.warn("Error reading from topic {}. Last read offset {}", sourceTopic, lastReadOffset, exception); - if (streams != null) { - LOGGER.info("Streams state is: {}", streams.state().toString()); + if (streams != null & LOGGER.isInfoEnabled()) { + LOGGER.info("Current state of streams: {}", streams.state()); } } - /** - * Implement this method to create the topology. - * - * @param builder the streams builder - */ - public abstract void createTopology(StreamsBuilder builder); - - private void init(KafkaStreamsClient kafkaStreamsClient) { + private void init(final KafkaStreamsClient kafkaStreamsClient) { final StreamsBuilder builder = new StreamsBuilder(); - sourceTopic = configs.getProperty(StreamConfig.ClientProperties.SOURCE_TOPIC.getValue()); - sinkTopic = configs.getProperty(StreamConfig.DecodingProperties.SINK_TOPIC.getValue()); + sourceTopic = properties.getProperty(ClientProperties.SOURCE_TOPIC.getValue()); + sinkTopic = properties.getProperty(DecodingProperties.SINK_TOPIC.getValue()); createTopology(builder); final Topology topology = builder.build(); streams = kafkaStreamsClient.create(topology); @@ -176,14 +115,15 @@ public abstract class AbstractStream implements Streaming { private void start() { LOGGER.info("Starting streams..."); + streams.start(); + health.start(); try { - streams.start(); - health.start(); latch.await(); - } catch (Exception e) { - LOGGER.error("Could not start streams.", e); - System.exit(1); + } catch (InterruptedException e) { + LOGGER.error("Error while waiting for latch", e); + throw new StreamingException("Error while waiting for latch", e); } + } private void shutDown() { @@ -193,37 +133,4 @@ public abstract class AbstractStream implements Streaming { latch.countDown(); } - public static class InjectOffsetTransformer implements ValueTransformer<JsonObject, JsonObject> { - - private ProcessorContext context; - - /** - * Initialize this transformer. - * - * @param context the context of this processor - */ - @Override - @SuppressWarnings("HiddenField") - public void init(final ProcessorContext context) { - this.context = context; - } - - /** - * Transform the given value. - * - * @param value the value to be transformed - * @return the transformed value - */ - @Override - public JsonObject transform(final JsonObject value) { - value.addProperty("offset", context.offset()); - return value; - } - - @Override - public void close() { - } - - } - } diff --git a/src/main/java/ch/cern/nile/common/streams/InjectOffsetTransformer.java b/src/main/java/ch/cern/nile/common/streams/InjectOffsetTransformer.java new file mode 100644 index 0000000..4733c46 --- /dev/null +++ b/src/main/java/ch/cern/nile/common/streams/InjectOffsetTransformer.java @@ -0,0 +1,64 @@ +package ch.cern.nile.common.streams; + +import com.google.gson.JsonObject; + +import org.apache.kafka.streams.kstream.ValueTransformer; +import org.apache.kafka.streams.processor.ProcessorContext; + + +/** + * The {@link InjectOffsetTransformer} is a Kafka Streams ValueTransformer that enriches each input JsonObject + * with the offset information of the current record being processed. This transformer is typically used + * in Kafka Streams topologies where tracking the position of records within a Kafka topic is necessary. + * <p> + * Usage: + * This transformer should be used within a Kafka Streams topology, typically in a transformValues() operation. + * It adds an "offset" property to the input JsonObject, which contains the offset value of the record + * in the source topic. The enhanced JsonObject can then be further processed in the stream. + * <p> + * Example: + * <pre>{@code + * StreamsBuilder builder = new StreamsBuilder(); + * builder.<String, JsonObject>stream("source-topic") + * .transformValues(InjectOffsetTransformer::new) + * .to("sink-topic"); + * }</pre> + */ +public class InjectOffsetTransformer implements ValueTransformer<JsonObject, JsonObject> { + + private ProcessorContext context; + + /** + * Initialize the transformer with the given processor context. This method is called when the + * transformer is instantiated and provides access to the ProcessorContext for retrieving metadata + * such as the record's offset. + * + * @param processorContext the processor context provided by the Kafka Streams framework + */ + @Override + public void init(final ProcessorContext processorContext) { + this.context = processorContext; + } + + /** + * Transform the input JsonObject by injecting the current record's offset. The offset is added as + * a property to the JsonObject, enabling downstream processors to access this metadata. + * + * @param value the input JsonObject to be transformed + * @return the transformed JsonObject with the offset property added + */ + @Override + public JsonObject transform(final JsonObject value) { + value.addProperty("offset", context.offset()); + return value; + } + + /** + * Close the transformer. + */ + @Override + public void close() { + // Nothing to do + } + +} diff --git a/src/main/java/ch/cern/nile/common/streams/StreamUtils.java b/src/main/java/ch/cern/nile/common/streams/StreamUtils.java new file mode 100644 index 0000000..09a7dbe --- /dev/null +++ b/src/main/java/ch/cern/nile/common/streams/StreamUtils.java @@ -0,0 +1,106 @@ +package ch.cern.nile.common.streams; + +import java.time.Instant; +import java.util.List; +import java.util.Map; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +import ch.cern.nile.common.exceptions.DecodingException; + +/** + * {@link StreamUtils} is a utility class providing static methods to assist in stream processing. + */ +public final class StreamUtils { + + private StreamUtils() { + } + + /** + * Adds the most recent timestamp found in the gatewayInfo JsonArray to the provided map. + * The timestamp is added as an epoch millisecond value under the key "timestamp". + * + * @param gatewayInfo the JsonArray containing gateway information, each entry expected to + * have a "time" field with an ISO-8601 formatted timestamp + * @param map the map to which the most recent timestamp will be added + * @throws DecodingException if no valid timestamp is found in the gatewayInfo + */ + public static void addTimestamp(final JsonArray gatewayInfo, final Map<String, Object> map) { + final String timeKey = "time"; + + Instant mostRecentTimestamp = null; + for (final JsonElement element : gatewayInfo) { + if (!element.isJsonObject()) { + continue; + } + + final JsonObject entry = element.getAsJsonObject(); + if (!entry.has(timeKey)) { + continue; + } + + final Instant currentTimestamp = Instant.parse(entry.get(timeKey).getAsString()); + if (mostRecentTimestamp == null || currentTimestamp.isAfter(mostRecentTimestamp)) { + mostRecentTimestamp = currentTimestamp; + } + } + + if (mostRecentTimestamp == null) { + throw new DecodingException("No timestamp found in gateway info."); + } + + map.put("timestamp", mostRecentTimestamp.toEpochMilli()); + } + + /** + * Filters out null values. + * + * @param ignored ignored parameter (unused in current implementation) + * @param value the value to be checked for null + * @return true if the value is not null, false otherwise + */ + public static boolean filterNull(final String ignored, final Object value) { + return value != null; + } + + /** + * Filters out empty lists and maps. + * Returns true if the value is neither an empty list nor an empty map, otherwise false. + * <p> + * This method is useful in stream processing scenarios where empty collections (lists or maps) are considered + * irrelevant or need to be filtered out. + * + * @param ignored ignored parameter (unused in current implementation) + * @param value the value to be checked, expected to be a List or Map + * @return true if the value is not an empty list or map, false otherwise + */ + public static boolean filterEmpty(final String ignored, final Object value) { + boolean isNotEmpty = true; + + if (value instanceof List) { + isNotEmpty = !((List<?>) value).isEmpty(); + } else if (value instanceof Map) { + isNotEmpty = !((Map<?, ?>) value).isEmpty(); + } + + return isNotEmpty; + } + + /** + * Filters records based on the presence of required fields in a JsonObject. + * Returns true if all required fields ("applicationID", "applicationName", "deviceName", + * "devEUI", and "data") are present, otherwise false. + * + * @param ignored ignored parameter (unused in current implementation) + * @param value the JsonObject to be checked for required fields + * @return true if all required fields are present, false otherwise + */ + public static boolean filterRecord(final String ignored, final JsonObject value) { + return value != null && value.get("applicationID") != null && value.get("applicationName") != null + && value.get("deviceName") != null && value.get("devEUI") != null + && value.get("data") != null; + } + +} diff --git a/src/main/java/ch/cern/nile/common/streams/Streaming.java b/src/main/java/ch/cern/nile/common/streams/Streaming.java index 36fb7a3..aefd2c2 100644 --- a/src/main/java/ch/cern/nile/common/streams/Streaming.java +++ b/src/main/java/ch/cern/nile/common/streams/Streaming.java @@ -1,10 +1,23 @@ package ch.cern.nile.common.streams; import ch.cern.nile.common.clients.KafkaStreamsClient; -import ch.cern.nile.common.configs.Configure; +import ch.cern.nile.common.configuration.Configure; +/** + * The Streaming interface defines the essential functions for a streaming application. + * It extends the Configure interface, allowing for configuration setup. Implementations + * of this interface are responsible for defining the streaming behavior, including + * how streams are created and managed using a KafkaStreamsClient. + */ public interface Streaming extends Configure { + /** + * Initializes and starts the streaming process. This method should define the setup and + * execution of the stream, utilizing the provided KafkaStreamsClient to create and manage + * Kafka Streams. + * + * @param kafkaStreamsClient the KafkaStreamsClient used to create and manage the stream. + */ void stream(KafkaStreamsClient kafkaStreamsClient); } diff --git a/src/test/java/ch/cern/nile/common/clients/KafkaStreamsClientTest.java b/src/test/java/ch/cern/nile/common/clients/KafkaStreamsClientTest.java index 5c5c63d..02c9d5c 100644 --- a/src/test/java/ch/cern/nile/common/clients/KafkaStreamsClientTest.java +++ b/src/test/java/ch/cern/nile/common/clients/KafkaStreamsClientTest.java @@ -1,8 +1,8 @@ package ch.cern.nile.common.clients; -import static org.junit.Assert.assertTrue; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.net.UnknownHostException; import java.util.Properties; @@ -17,11 +17,15 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; -import ch.cern.nile.common.configs.StreamConfig; +import ch.cern.nile.common.configuration.properties.ClientProperties; import ch.cern.nile.common.exceptions.ReverseDnsLookupException; +import lombok.SneakyThrows; + class KafkaStreamsClientTest { + private static final String INVALID_CLUSTER = "invalidCluster"; + private KafkaStreamsClient client; private Properties properties; private Topology topology; @@ -31,7 +35,6 @@ class KafkaStreamsClientTest { private AutoCloseable closeable; - @BeforeEach public void setup() { closeable = MockitoAnnotations.openMocks(this); @@ -39,13 +42,13 @@ class KafkaStreamsClientTest { @Override @SuppressWarnings("checkstyle:HiddenField") - public KafkaStreams create(Topology topology) { + public KafkaStreams create(final Topology topology) { return kafkaStreams; } @Override - protected String performDnsLookup(String kafkaCluster) throws UnknownHostException { - if (kafkaCluster.equals("invalidCluster")) { + protected String performDnsLookup(final String kafkaCluster) throws UnknownHostException { + if (INVALID_CLUSTER.equals(kafkaCluster)) { throw new UnknownHostException("Invalid cluster"); } return "localhost:9092"; @@ -56,39 +59,40 @@ class KafkaStreamsClientTest { } @AfterEach - public void tearDown() throws Exception { + @SneakyThrows + public void tearDown() { closeable.close(); } @Test void givenNonTestCluster_whenConfigure_thenKafkaStreamsCreated() { - properties.setProperty(StreamConfig.ClientProperties.CLIENT_ID.getValue(), "testClientId"); - properties.setProperty(StreamConfig.ClientProperties.KAFKA_CLUSTER.getValue(), "nonTestCluster"); - properties.setProperty(StreamConfig.ClientProperties.TRUSTSTORE_LOCATION.getValue(), "/path/to/truststore"); + properties.setProperty(ClientProperties.CLIENT_ID.getValue(), "testClientId"); + properties.setProperty(ClientProperties.KAFKA_CLUSTER.getValue(), "nonTestCluster"); + properties.setProperty(ClientProperties.TRUSTSTORE_LOCATION.getValue(), "/path/to/truststore"); properties.setProperty(StreamsConfig.SECURITY_PROTOCOL_CONFIG, "PLAINTEXT"); client.configure(properties); - KafkaStreams streams = client.create(topology); + final KafkaStreams streams = client.create(topology); assertNotNull(streams, "KafkaStreams object should not be null"); } @Test void givenTestCluster_whenConfigure_thenKafkaStreamsCreated() { - properties.setProperty(StreamConfig.ClientProperties.CLIENT_ID.getValue(), "testClientId"); - properties.setProperty(StreamConfig.ClientProperties.KAFKA_CLUSTER.getValue(), "test"); + properties.setProperty(ClientProperties.CLIENT_ID.getValue(), "testClientId"); + properties.setProperty(ClientProperties.KAFKA_CLUSTER.getValue(), "test"); properties.setProperty("bootstrap.servers", "localhost:9092"); client.configure(properties); - KafkaStreams streams = client.create(topology); + final KafkaStreams streams = client.create(topology); assertNotNull(streams, "KafkaStreams object should not be null"); } @Test void givenInvalidCluster_whenConfigure_thenReverseDnsLookupExceptionThrown() { - properties.setProperty(StreamConfig.ClientProperties.CLIENT_ID.getValue(), "testClientId"); - properties.setProperty(StreamConfig.ClientProperties.KAFKA_CLUSTER.getValue(), "invalidCluster"); + properties.setProperty(ClientProperties.CLIENT_ID.getValue(), "testClientId"); + properties.setProperty(ClientProperties.KAFKA_CLUSTER.getValue(), "invalidCluster"); assertThrows(ReverseDnsLookupException.class, () -> client.configure(properties), "Should throw ReverseDnsLookupException"); @@ -96,20 +100,20 @@ class KafkaStreamsClientTest { @Test void givenKnownDomain_whenPerformDnsLookup_thenResultContainsPort9093() throws UnknownHostException { - String domain = "www.google.com"; - String result = new KafkaStreamsClient().performDnsLookup(domain); + final String domain = "www.google.com"; + final String result = new KafkaStreamsClient().performDnsLookup(domain); assertNotNull(result, "Result should not be null"); - assertTrue("Result should contain port 9093", result.contains(":9093")); + assertTrue(result.contains(":9093"), "Result should contain port 9093"); } @Test void givenLocalhost_whenPerformDnsLookup_thenResultContainsPort9093() throws UnknownHostException { - String domain = "localhost"; - String result = new KafkaStreamsClient().performDnsLookup(domain); + final String domain = "localhost"; + final String result = new KafkaStreamsClient().performDnsLookup(domain); - assertNotNull(result); - assertTrue("Result should contain port 9093", result.contains(":9093")); + assertNotNull(result, "Result should not be null"); + assertTrue(result.contains(":9093"), "Result should contain port 9093"); } } diff --git a/src/test/java/ch/cern/nile/common/configs/StreamConfigTest.java b/src/test/java/ch/cern/nile/common/configs/StreamConfigTest.java deleted file mode 100644 index 728fa31..0000000 --- a/src/test/java/ch/cern/nile/common/configs/StreamConfigTest.java +++ /dev/null @@ -1,83 +0,0 @@ -package ch.cern.nile.common.configs; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import java.util.Set; - -import org.junit.jupiter.api.Test; - -class StreamConfigTest { - - @Test - void givenClientPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { - Set<String> expectedConfigs = Set.of("source.topic", "kafka.cluster", "client.id", "truststore.location"); - Set<String> actualConfigs = StreamConfig.ClientProperties.getValues(); - - assertEquals(expectedConfigs, actualConfigs, "Should return expected set of configs"); - } - - @Test - void givenClientPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { - assertThrows(IllegalArgumentException.class, () -> StreamConfig.ClientProperties.valueOf("unknown.property"), - "Should throw IllegalArgumentException"); - } - - @Test - void givenCommonPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { - Set<String> expectedConfigs = Set.of("stream.type", "stream.class"); - Set<String> actualConfigs = StreamConfig.CommonProperties.getValues(); - - assertEquals(expectedConfigs, actualConfigs, "Should return expected set of configs"); - } - - @Test - void givenCommonPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { - assertThrows(IllegalArgumentException.class, () -> StreamConfig.CommonProperties.valueOf("unknown.property"), - "Should throw IllegalArgumentException"); - } - - @Test - void givenDecodingPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { - Set<String> expectedConfigs = Set.of("sink.topic"); - Set<String> actualConfigs = StreamConfig.DecodingProperties.getValues(); - - assertEquals(expectedConfigs, actualConfigs, "Should return expected set of configs"); - } - - @Test - void givenDecodingPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { - assertThrows(IllegalArgumentException.class, () -> StreamConfig.DecodingProperties.valueOf("unknown.property"), - "Should throw IllegalArgumentException"); - } - - @Test - void givenRoutingPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { - Set<String> expectedConfigs = Set.of("routing.config.path", "dlq.topic"); - Set<String> actualConfigs = StreamConfig.RoutingProperties.getValues(); - - assertEquals(expectedConfigs, actualConfigs, "Should return expected set of configs"); - } - - @Test - void givenRoutingPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { - assertThrows(IllegalArgumentException.class, () -> StreamConfig.RoutingProperties.valueOf("unknown.property"), - "Should throw IllegalArgumentException"); - } - - @Test - void givenEnrichmentPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { - Set<String> expectedConfigs = Set.of("enrichment.config.path", "sink.topic"); - Set<String> actualConfigs = StreamConfig.EnrichmentProperties.getValues(); - - assertEquals(expectedConfigs, actualConfigs, "Should return expected set of configs"); - } - - @Test - void givenEnrichmentPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { - assertThrows(IllegalArgumentException.class, - () -> StreamConfig.EnrichmentProperties.valueOf("unknown.property"), - "Should throw IllegalArgumentException"); - } - -} diff --git a/src/test/java/ch/cern/nile/common/configs/PropertiesCheckTest.java b/src/test/java/ch/cern/nile/common/configuration/PropertiesCheckTest.java similarity index 64% rename from src/test/java/ch/cern/nile/common/configs/PropertiesCheckTest.java rename to src/test/java/ch/cern/nile/common/configuration/PropertiesCheckTest.java index 2c50497..58aa58c 100644 --- a/src/test/java/ch/cern/nile/common/configs/PropertiesCheckTest.java +++ b/src/test/java/ch/cern/nile/common/configuration/PropertiesCheckTest.java @@ -1,4 +1,4 @@ -package ch.cern.nile.common.configs; +package ch.cern.nile.common.configuration; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -7,6 +7,11 @@ import java.util.Properties; import org.junit.jupiter.api.Test; +import ch.cern.nile.common.configuration.properties.ClientProperties; +import ch.cern.nile.common.configuration.properties.CommonProperties; +import ch.cern.nile.common.configuration.properties.DecodingProperties; +import ch.cern.nile.common.configuration.properties.EnrichmentProperties; +import ch.cern.nile.common.configuration.properties.RoutingProperties; import ch.cern.nile.common.exceptions.MissingPropertyException; class PropertiesCheckTest { @@ -29,7 +34,7 @@ class PropertiesCheckTest { void givenValidDecodingProperties_whenValidateProperties_thenPassesValidation() { final Properties properties = new Properties(); initClientAndCommonProperties(properties); - properties.put(StreamConfig.DecodingProperties.SINK_TOPIC.getValue(), ""); + properties.put(DecodingProperties.SINK_TOPIC.getValue(), ""); assertDoesNotThrow(() -> PropertiesCheck.validateProperties(properties, StreamType.DECODING), "Should not throw exception"); @@ -39,8 +44,8 @@ class PropertiesCheckTest { void givenValidRoutingProperties_whenValidateProperties_thenPassesValidation() { final Properties properties = new Properties(); initClientAndCommonProperties(properties); - properties.put(StreamConfig.RoutingProperties.ROUTING_CONFIG_PATH.getValue(), ""); - properties.put(StreamConfig.RoutingProperties.DLQ_TOPIC.getValue(), ""); + properties.put(RoutingProperties.ROUTING_CONFIG_PATH.getValue(), ""); + properties.put(RoutingProperties.DLQ_TOPIC.getValue(), ""); assertDoesNotThrow(() -> PropertiesCheck.validateProperties(properties, StreamType.ROUTING), "Should not throw exception"); @@ -50,8 +55,8 @@ class PropertiesCheckTest { void givenValidEnrichmentProperties_whenValidateProperties_thenPassesValidation() { final Properties properties = new Properties(); initClientAndCommonProperties(properties); - properties.put(StreamConfig.EnrichmentProperties.ENRICHMENT_CONFIG_PATH.getValue(), ""); - properties.put(StreamConfig.EnrichmentProperties.SINK_TOPIC.getValue(), ""); + properties.put(EnrichmentProperties.ENRICHMENT_CONFIG_PATH.getValue(), ""); + properties.put(EnrichmentProperties.SINK_TOPIC.getValue(), ""); assertDoesNotThrow(() -> PropertiesCheck.validateProperties(properties, StreamType.ENRICHMENT), "Should not throw exception"); @@ -62,20 +67,20 @@ class PropertiesCheckTest { final Properties properties = new Properties(); initClientAndCommonProperties(properties); // Remove a required property for routing, for example - properties.remove(StreamConfig.RoutingProperties.ROUTING_CONFIG_PATH.getValue()); + properties.remove(RoutingProperties.ROUTING_CONFIG_PATH.getValue()); assertThrows(MissingPropertyException.class, () -> PropertiesCheck.validateProperties(properties, StreamType.ROUTING), "Properties file is missing: routing.config.path property."); } - private void initClientAndCommonProperties(Properties properties) { - properties.put(StreamConfig.ClientProperties.CLIENT_ID.getValue(), ""); - properties.put(StreamConfig.ClientProperties.KAFKA_CLUSTER.getValue(), ""); - properties.put(StreamConfig.ClientProperties.SOURCE_TOPIC.getValue(), ""); - properties.put(StreamConfig.ClientProperties.TRUSTSTORE_LOCATION.getValue(), ""); - properties.put(StreamConfig.CommonProperties.STREAM_CLASS.getValue(), ""); - properties.put(StreamConfig.CommonProperties.STREAM_TYPE.getValue(), ""); + private void initClientAndCommonProperties(final Properties properties) { + properties.put(ClientProperties.CLIENT_ID.getValue(), ""); + properties.put(ClientProperties.KAFKA_CLUSTER.getValue(), ""); + properties.put(ClientProperties.SOURCE_TOPIC.getValue(), ""); + properties.put(ClientProperties.TRUSTSTORE_LOCATION.getValue(), ""); + properties.put(CommonProperties.STREAM_CLASS.getValue(), ""); + properties.put(CommonProperties.STREAM_TYPE.getValue(), ""); } } diff --git a/src/test/java/ch/cern/nile/common/configs/StreamTypeTest.java b/src/test/java/ch/cern/nile/common/configuration/StreamTypeTest.java similarity index 79% rename from src/test/java/ch/cern/nile/common/configs/StreamTypeTest.java rename to src/test/java/ch/cern/nile/common/configuration/StreamTypeTest.java index 974c7d2..daecfbc 100644 --- a/src/test/java/ch/cern/nile/common/configs/StreamTypeTest.java +++ b/src/test/java/ch/cern/nile/common/configuration/StreamTypeTest.java @@ -1,4 +1,4 @@ -package ch.cern.nile.common.configs; +package ch.cern.nile.common.configuration; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -9,21 +9,21 @@ class StreamTypeTest { @Test void givenKnownStreamTypeRouting_whenFindByValue_thenMapsToRouting() { - StreamType result = StreamType.valueOf("ROUTING"); + final StreamType result = StreamType.valueOf("ROUTING"); assertEquals(StreamType.ROUTING, result, "Should return expected stream type"); } @Test void givenKnownStreamTypeDecoding_whenFindByValue_thenMapsToDecoding() { - StreamType result = StreamType.valueOf("DECODING"); + final StreamType result = StreamType.valueOf("DECODING"); assertEquals(StreamType.DECODING, result, "Should return expected stream type"); } @Test void givenKnownStreamTypeEnrichment_whenFindByValue_thenMapsToEnrichment() { - StreamType result = StreamType.valueOf("ENRICHMENT"); + final StreamType result = StreamType.valueOf("ENRICHMENT"); assertEquals(StreamType.ENRICHMENT, result, "Should return expected stream type"); } diff --git a/src/test/java/ch/cern/nile/common/configuration/properties/StreamConfigTest.java b/src/test/java/ch/cern/nile/common/configuration/properties/StreamConfigTest.java new file mode 100644 index 0000000..f400731 --- /dev/null +++ b/src/test/java/ch/cern/nile/common/configuration/properties/StreamConfigTest.java @@ -0,0 +1,87 @@ +package ch.cern.nile.common.configuration.properties; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.util.Set; + +import org.junit.jupiter.api.Test; + +class StreamConfigTest { + + private static final String UNKNOWN_PROPERTY = "unknown.property"; + private static final String SHOULD_THROW_ILLEGAL_ARGUMENT_EXCEPTION = "Should throw IllegalArgumentException"; + private static final String SHOULD_RETURN_EXPECTED_SET_OF_CONFIGS = "Should return expected set of configs"; + + @Test + void givenClientPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { + final Set<String> expectedConfigs = Set.of("source.topic", "kafka.cluster", "client.id", "truststore.location"); + final Set<String> actualConfigs = ClientProperties.getValues(); + + assertEquals(expectedConfigs, actualConfigs, SHOULD_RETURN_EXPECTED_SET_OF_CONFIGS); + } + + @Test + void givenClientPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { + assertThrows(IllegalArgumentException.class, () -> ClientProperties.valueOf(UNKNOWN_PROPERTY), + SHOULD_THROW_ILLEGAL_ARGUMENT_EXCEPTION); + } + + @Test + void givenCommonPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { + final Set<String> expectedConfigs = Set.of("stream.type", "stream.class"); + final Set<String> actualConfigs = CommonProperties.getValues(); + + assertEquals(expectedConfigs, actualConfigs, SHOULD_RETURN_EXPECTED_SET_OF_CONFIGS); + } + + @Test + void givenCommonPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { + assertThrows(IllegalArgumentException.class, () -> CommonProperties.valueOf(UNKNOWN_PROPERTY), + SHOULD_THROW_ILLEGAL_ARGUMENT_EXCEPTION); + } + + @Test + void givenDecodingPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { + final Set<String> expectedConfigs = Set.of("sink.topic"); + final Set<String> actualConfigs = DecodingProperties.getValues(); + + assertEquals(expectedConfigs, actualConfigs, SHOULD_RETURN_EXPECTED_SET_OF_CONFIGS); + } + + @Test + void givenDecodingPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { + assertThrows(IllegalArgumentException.class, () -> DecodingProperties.valueOf(UNKNOWN_PROPERTY), + SHOULD_THROW_ILLEGAL_ARGUMENT_EXCEPTION); + } + + @Test + void givenRoutingPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { + final Set<String> expectedConfigs = Set.of("routing.config.path", "dlq.topic"); + final Set<String> actualConfigs = RoutingProperties.getValues(); + + assertEquals(expectedConfigs, actualConfigs, SHOULD_RETURN_EXPECTED_SET_OF_CONFIGS); + } + + @Test + void givenRoutingPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { + assertThrows(IllegalArgumentException.class, () -> RoutingProperties.valueOf(UNKNOWN_PROPERTY), + SHOULD_THROW_ILLEGAL_ARGUMENT_EXCEPTION); + } + + @Test + void givenEnrichmentPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { + final Set<String> expectedConfigs = Set.of("enrichment.config.path", "sink.topic"); + final Set<String> actualConfigs = EnrichmentProperties.getValues(); + + assertEquals(expectedConfigs, actualConfigs, SHOULD_RETURN_EXPECTED_SET_OF_CONFIGS); + } + + @Test + void givenEnrichmentPropertiesEnum_whenValueOfWithUnknownProperty_thenThrowsIllegalArgumentException() { + assertThrows(IllegalArgumentException.class, + () -> EnrichmentProperties.valueOf(UNKNOWN_PROPERTY), + SHOULD_THROW_ILLEGAL_ARGUMENT_EXCEPTION); + } + +} diff --git a/src/test/java/ch/cern/nile/common/json/JsonPojoDeserializerTest.java b/src/test/java/ch/cern/nile/common/json/JsonPojoDeserializerTest.java index 6b455a5..aeddeff 100644 --- a/src/test/java/ch/cern/nile/common/json/JsonPojoDeserializerTest.java +++ b/src/test/java/ch/cern/nile/common/json/JsonPojoDeserializerTest.java @@ -10,42 +10,42 @@ import ch.cern.nile.common.models.Topic; class JsonPojoDeserializerTest { + private static final String TEST_TOPIC = "test-topic"; private final JsonPojoDeserializer<Application> applicationDeserializer = new JsonPojoDeserializer<>(Application.class); private final JsonPojoDeserializer<Topic> topicDeserializer = new JsonPojoDeserializer<>(Topic.class); @Test void givenJsonWithApplication_whenDeserialize_thenReturnsApplication() { - String json = "{\"name\":\"my-app\",\"topic\":{\"name\":\"my-topic\"}}"; - - Application expected = new Application(); + final String json = "{\"name\":\"my-app\",\"topic\":{\"name\":\"my-topic\"}}"; + final Application expected = new Application(); expected.setName("my-app"); expected.setTopic(new Topic()); expected.getTopic().setName("my-topic"); - Application actual = applicationDeserializer.deserialize("test-topic", json.getBytes()); + final Application actual = applicationDeserializer.deserialize(TEST_TOPIC, json.getBytes()); assertEquals(expected.toString(), actual.toString(), "Application deserialized incorrectly"); } @Test void givenJsonWithTopic_whenDeserialize_thenReturnsTopic() { - String json = "{\"name\":\"my-topic\"}"; + final String json = "{\"name\":\"my-topic\"}"; - Topic expected = new Topic(); + final Topic expected = new Topic(); expected.setName("my-topic"); - Topic actual = topicDeserializer.deserialize("test-topic", json.getBytes()); + final Topic actual = topicDeserializer.deserialize(TEST_TOPIC, json.getBytes()); assertEquals(expected.toString(), actual.toString(), "Topic deserialized incorrectly"); } @Test void givenNullBytes_whenDeserialize_thenReturnsNull() { - assertNull(applicationDeserializer.deserialize("test-topic", null), "Null bytes should return null"); + assertNull(applicationDeserializer.deserialize(TEST_TOPIC, null), "Null bytes should return null"); } @Test void givenNullJson_whenDeserialize_thenReturnsNull() { - assertNull(applicationDeserializer.deserialize("test-topic", "null".getBytes()), + assertNull(applicationDeserializer.deserialize(TEST_TOPIC, "null".getBytes()), "Null json should return null"); } diff --git a/src/test/java/ch/cern/nile/common/json/JsonPojoSerializerTest.java b/src/test/java/ch/cern/nile/common/json/JsonPojoSerializerTest.java index d995c9b..cb40372 100644 --- a/src/test/java/ch/cern/nile/common/json/JsonPojoSerializerTest.java +++ b/src/test/java/ch/cern/nile/common/json/JsonPojoSerializerTest.java @@ -1,6 +1,7 @@ package ch.cern.nile.common.json; import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertNull; import java.util.Collections; @@ -14,34 +15,38 @@ class JsonPojoSerializerTest { @Test void givenEmptyConfig_whenConfigure_thenDoesNotThrowException() { try (JsonPojoSerializer<Object> serializer = new JsonPojoSerializer<>()) { - serializer.configure(Collections.emptyMap(), true); + assertDoesNotThrow(() -> serializer.configure(Collections.emptyMap(), true), + "Should not throw exception"); } } @Test void givenNullData_whenSerialize_thenReturnsNull() { try (JsonPojoSerializer<Object> serializer = new JsonPojoSerializer<>()) { - assertNull(serializer.serialize("topic", null)); + assertNull(serializer.serialize("topic", null), "Should return null"); } } @Test void givenNonNullData_whenSerialize_thenReturnsJsonBytes() { - Map<String, String> data = new HashMap<>(); + final Map<String, String> data = new HashMap<>(); data.put("key", "value"); - byte[] expectedBytes = "{\"key\":\"value\"}".getBytes(); + final byte[] expectedBytes = "{\"key\":\"value\"}".getBytes(); try (JsonPojoSerializer<Map<String, String>> serializer = new JsonPojoSerializer<>()) { - byte[] actualBytes = serializer.serialize("topic", data); + final byte[] actualBytes = serializer.serialize("topic", data); - assertArrayEquals(expectedBytes, actualBytes); + assertArrayEquals(expectedBytes, actualBytes, "Should return expected bytes"); } } @Test + @SuppressWarnings("EmptyTryBlock") void givenSerializer_whenClosed_thenDoesNotThrowException() { - JsonPojoSerializer<Object> serializer = new JsonPojoSerializer<>(); - serializer.close(); + assertDoesNotThrow(() -> { + try (JsonPojoSerializer<Object> ignored = new JsonPojoSerializer<>()) { + } + }, "Should not throw exception"); } } diff --git a/src/test/java/ch/cern/nile/common/json/JsonSerdeTest.java b/src/test/java/ch/cern/nile/common/json/JsonSerdeTest.java index 5f8658d..2e40e65 100644 --- a/src/test/java/ch/cern/nile/common/json/JsonSerdeTest.java +++ b/src/test/java/ch/cern/nile/common/json/JsonSerdeTest.java @@ -13,12 +13,12 @@ class JsonSerdeTest { void givenEmptyConfigs_whenConfigure_thenSerializerAndDeserializerNotNull() { try (JsonSerde jsonSerde = new JsonSerde()) { - Map<String, Object> configs = new HashMap<>(); + final Map<String, Object> configs = new HashMap<>(); configs.put("config-key", "config-value"); jsonSerde.configure(configs, true); - assertNotNull(jsonSerde.serializer()); - assertNotNull(jsonSerde.deserializer()); + assertNotNull(jsonSerde.serializer(), "Should not be null"); + assertNotNull(jsonSerde.deserializer(), "Should not be null"); } } } diff --git a/src/test/java/ch/cern/nile/common/probes/HealthTest.java b/src/test/java/ch/cern/nile/common/probes/HealthTest.java index 793fbc4..f0fcc1d 100644 --- a/src/test/java/ch/cern/nile/common/probes/HealthTest.java +++ b/src/test/java/ch/cern/nile/common/probes/HealthTest.java @@ -20,11 +20,13 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + class HealthTest { private static final int PORT = 8899; - private static final int OK = 200; - private static final int ERROR = 500; + private static final int OK_RESPONSE = 200; + private static final int ERROR_RESPONSE = 500; private KafkaStreams mockStreams; private HttpServer mockServer; @@ -60,30 +62,34 @@ class HealthTest { } @Test + @SuppressWarnings("PMD.CloseResource") + @SuppressFBWarnings(value = "CloseResource", justification = "Mocked HttpServer") void givenKafkaStreamsRunning_whenHealthCheck_thenResponseStatus200() throws IOException { when(mockStreams.state()).thenReturn(KafkaStreams.State.RUNNING); health.start(); - ArgumentCaptor<HttpHandler> handlerCaptor = ArgumentCaptor.forClass(HttpHandler.class); + final ArgumentCaptor<HttpHandler> handlerCaptor = ArgumentCaptor.forClass(HttpHandler.class); verify(mockServer).createContext(eq("/health"), handlerCaptor.capture()); - HttpExchange mockExchange = mock(HttpExchange.class); + final HttpExchange mockExchange = mock(HttpExchange.class); handlerCaptor.getValue().handle(mockExchange); - verify(mockExchange).sendResponseHeaders(OK, 0); + verify(mockExchange).sendResponseHeaders(OK_RESPONSE, 0); verify(mockExchange).close(); } @Test + @SuppressWarnings("PMD.CloseResource") + @SuppressFBWarnings(value = "CloseResource", justification = "Mocked HttpServer") void givenKafkaStreamsNotRunning_whenHealthCheck_thenResponseStatus500() throws IOException { when(mockStreams.state()).thenReturn(KafkaStreams.State.NOT_RUNNING); health.start(); - ArgumentCaptor<HttpHandler> handlerCaptor = ArgumentCaptor.forClass(HttpHandler.class); + final ArgumentCaptor<HttpHandler> handlerCaptor = ArgumentCaptor.forClass(HttpHandler.class); verify(mockServer).createContext(eq("/health"), handlerCaptor.capture()); - HttpExchange mockExchange = mock(HttpExchange.class); + final HttpExchange mockExchange = mock(HttpExchange.class); handlerCaptor.getValue().handle(mockExchange); - verify(mockExchange).sendResponseHeaders(ERROR, 0); + verify(mockExchange).sendResponseHeaders(ERROR_RESPONSE, 0); verify(mockExchange).close(); } diff --git a/src/test/java/ch/cern/nile/common/schema/SchemaInjectorTest.java b/src/test/java/ch/cern/nile/common/schema/SchemaInjectorTest.java index c8d83fa..5137a65 100644 --- a/src/test/java/ch/cern/nile/common/schema/SchemaInjectorTest.java +++ b/src/test/java/ch/cern/nile/common/schema/SchemaInjectorTest.java @@ -6,62 +6,87 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import java.util.Date; +import java.util.HashMap; import java.util.List; import java.util.Map; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -class SchemaInjectorTest extends SchemaTestBase { +class SchemaInjectorTest { + + private static final String TIMESTAMP_COL = "timestamp_col"; + private static final String TIMESTAMP_TYPE = "org.apache.kafka.connect.data.Timestamp"; + private static final Map<String, Object> DATA = new HashMap<>(); + private static final String DATE_COL = "date_col"; + + @BeforeAll + public static void before() { + DATA.put("byte_col", (byte) 1); + DATA.put("short_col", (short) 2); + DATA.put("int_col", 3); + DATA.put("long_col", (long) 4); + DATA.put("float_col", 5.0f); + DATA.put("double_col", 6.0); + DATA.put("boolean_col", true); + DATA.put("string_col", "test"); + DATA.put("timestamp_col", 1_501_834_166_000L); + DATA.put(DATE_COL, new Date()); + DATA.put("bytes_col", new byte[]{1, 2, 3}); + } @Test void givenValidInputData_whenInject_thenReturnsCorrectSchemaAndPayload() { - final Map<String, Object> result = SchemaInjector.inject(data); + final Map<String, Object> result = SchemaInjector.inject(DATA); - assertNotNull(result); + assertNotNull(result, "Should not be null"); - assertTrue(result.containsKey("schema")); - assertTrue(result.containsKey("payload")); + assertTrue(result.containsKey("schema"), "Should contain schema"); + assertTrue(result.containsKey("payload"), "Should contain payload"); final Map<String, Object> schema = (Map<String, Object>) result.get("schema"); - assertEquals("struct", schema.get("type")); + assertEquals("struct", schema.get("type"), "Should be struct"); final List<Map<String, Object>> fields = (List<Map<String, Object>>) schema.get("fields"); - assertEquals(data.size(), fields.size()); + assertEquals(DATA.size(), fields.size(), "Should contain all fields"); - for (Map<String, Object> field : fields) { + for (final Map<String, Object> field : fields) { final String fieldName = (String) field.get("field"); - assertTrue(data.containsKey(fieldName)); - assertNotNull(field.get("type")); + assertTrue(DATA.containsKey(fieldName), String.format("Should contain field %s", fieldName)); + assertNotNull(field.get("type"), String.format("Should contain type for field %s", fieldName)); - if (fieldName.equals("timestamp_col")) { - assertFalse(Boolean.parseBoolean(field.get("optional").toString())); + if (TIMESTAMP_COL.equals(fieldName)) { + assertFalse(Boolean.parseBoolean(field.get("optional").toString()), "Should not be optional"); } else { - assertTrue(Boolean.parseBoolean(field.get("optional").toString())); + assertTrue(Boolean.parseBoolean(field.get("optional").toString()), "Should be optional"); } - if (fieldName.equals("timestamp_col")) { - assertEquals("org.apache.kafka.connect.data.Timestamp", field.get("name")); - assertEquals(1, field.get("version")); - } else if (fieldName.equals("date_col")) { - assertEquals("org.apache.kafka.connect.data.Date", field.get("name")); - assertEquals(1, field.get("version")); + if (TIMESTAMP_COL.equals(fieldName)) { + assertEquals(TIMESTAMP_TYPE, field.get("name"), "Should be timestamp"); + assertEquals(1, field.get("version"), "Should be version 1"); + } else if (DATE_COL.equals(fieldName)) { + assertEquals("org.apache.kafka.connect.data.Date", field.get("name"), "Should be date"); + assertEquals(1, field.get("version"), "Should be version 1"); } } final Map<String, Object> payload = (Map<String, Object>) result.get("payload"); - assertEquals(data, payload); + assertEquals(DATA, payload, "Should contain all fields"); } @Test void givenDataWithNullValue_whenInject_thenThrowsIllegalArgumentException() { + final Map<String, Object> data = new HashMap<>(DATA); data.put("nullValue", null); - assertThrows(IllegalArgumentException.class, () -> SchemaInjector.inject(data)); + assertThrows(IllegalArgumentException.class, () -> SchemaInjector.inject(data), "Should throw exception"); } @Test void givenDataWithUnsupportedType_whenInject_thenThrowsIllegalArgumentException() { + final Map<String, Object> data = new HashMap<>(DATA); data.put("unsupportedType", new Object()); - assertThrows(IllegalArgumentException.class, () -> SchemaInjector.inject(data)); + assertThrows(IllegalArgumentException.class, () -> SchemaInjector.inject(data), "Should throw exception"); } } diff --git a/src/test/java/ch/cern/nile/common/schema/SchemaTestBase.java b/src/test/java/ch/cern/nile/common/schema/SchemaTestBase.java deleted file mode 100644 index e9e358f..0000000 --- a/src/test/java/ch/cern/nile/common/schema/SchemaTestBase.java +++ /dev/null @@ -1,27 +0,0 @@ -package ch.cern.nile.common.schema; - -import java.util.Date; -import java.util.HashMap; -import java.util.Map; - -import org.junit.jupiter.api.BeforeEach; - -public class SchemaTestBase { - public Map<String, Object> data; - - @BeforeEach - void setUp() { - data = new HashMap<>(); - data.put("byte_col", (byte) 1); - data.put("short_col", (short) 2); - data.put("int_col", 3); - data.put("long_col", (long) 4); - data.put("float_col", 5.0f); - data.put("double_col", 6.0); - data.put("boolean_col", true); - data.put("string_col", "test"); - data.put("timestamp_col", 1501834166000L); - data.put("date_col", new Date()); - data.put("bytes_col", new byte[]{1, 2, 3}); - } -} diff --git a/src/test/java/ch/cern/nile/common/streams/StreamUtilsTest.java b/src/test/java/ch/cern/nile/common/streams/StreamUtilsTest.java new file mode 100644 index 0000000..a3fe4b7 --- /dev/null +++ b/src/test/java/ch/cern/nile/common/streams/StreamUtilsTest.java @@ -0,0 +1,95 @@ +package ch.cern.nile.common.streams; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.time.Instant; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; + +import org.junit.jupiter.api.Test; + +import ch.cern.nile.common.exceptions.DecodingException; + +class StreamUtilsTest { + + private static final String KEY = "key"; + + @Test + void givenValidGatewayInfo_whenAddingTimestamp_thenTimestampIsAdded() { + final JsonArray gatewayInfo = new JsonArray(); + final JsonObject jsonObject = new JsonObject(); + jsonObject.addProperty("time", Instant.now().toString()); + gatewayInfo.add(jsonObject); + + final Map<String, Object> map = new HashMap<>(); + StreamUtils.addTimestamp(gatewayInfo, map); + + assertTrue(map.containsKey("timestamp"), "Timestamp not added to map."); + } + + @Test + void givenInvalidGatewayInfo_whenAddingTimestamp_thenDateTimeExceptionThrown() { + final JsonArray gatewayInfo = new JsonArray(); + final Map<String, Object> map = new HashMap<>(); + + assertThrows(DecodingException.class, () -> StreamUtils.addTimestamp(gatewayInfo, map), "No exception thrown."); + } + + @Test + void givenNonNullValue_whenFilteringNull_thenReturnsTrue() { + assertTrue(StreamUtils.filterNull(KEY, new Object()), "Non-null value should return true."); + } + + @Test + void givenNullValue_whenFilteringNull_thenReturnsFalse() { + assertFalse(StreamUtils.filterNull(KEY, null), "Null value should return false."); + } + + @Test + void givenNonEmptyList_whenFilteringEmpty_thenReturnsTrue() { + assertTrue(StreamUtils.filterEmpty(KEY, List.of(1, 2, 3)), "Non-empty list should return true."); + } + + @Test + void givenEmptyList_whenFilteringEmpty_thenReturnsFalse() { + assertFalse(StreamUtils.filterEmpty(KEY, List.of()), "Empty list should return false."); + } + + @Test + void givenNonEmptyMap_whenFilteringEmpty_thenReturnsTrue() { + final Map<String, String> nonEmptyMap = new HashMap<>(); + nonEmptyMap.put(KEY, "value"); + assertTrue(StreamUtils.filterEmpty(KEY, nonEmptyMap), "Non-empty map should return true."); + } + + @Test + void givenEmptyMap_whenFilteringEmpty_thenReturnsFalse() { + final Map<String, String> emptyMap = new HashMap<>(); + assertFalse(StreamUtils.filterEmpty(KEY, emptyMap), "Empty map should return false."); + } + + @Test + void givenValidJsonObject_whenFilteringRecord_thenReturnsTrue() { + final JsonObject jsonObject = new JsonObject(); + jsonObject.addProperty("applicationID", "appId"); + jsonObject.addProperty("applicationName", "appName"); + jsonObject.addProperty("deviceName", "deviceName"); + jsonObject.addProperty("devEUI", "devEUI"); + jsonObject.addProperty("data", "data"); + + assertTrue(StreamUtils.filterRecord(KEY, jsonObject), "Valid record should return true."); + } + + @Test + void givenInvalidJsonObject_whenFilteringRecord_thenReturnsFalse() { + final JsonObject jsonObject = new JsonObject(); + + assertFalse(StreamUtils.filterRecord(KEY, jsonObject), "Invalid record should return false."); + } +} -- GitLab From 6126d666e49be35b600af45a1cf0e5346ffa11ac Mon Sep 17 00:00:00 2001 From: Dean Dalianis <dean.dalianis@cern.ch> Date: Mon, 29 Jan 2024 18:13:32 +0100 Subject: [PATCH 05/15] New PropertyEnum interface for property enums. Split all old property enums. Updated javadoc for all public methods. --- .../nile/common/StreamingApplication.java | 14 ++++--- .../common/clients/KafkaStreamsClient.java | 14 ++++--- .../nile/common/configuration/Configure.java | 2 +- .../common/configuration/PropertiesCheck.java | 27 +++++++------ .../properties/ClientProperties.java | 16 +------- .../properties/CommonProperties.java | 16 +------- .../properties/DecodingProperties.java | 16 +------- .../properties/EnrichmentProperties.java | 17 ++------- .../properties/PropertyEnum.java | 38 +++++++++++++++++++ .../properties/RoutingProperties.java | 17 ++------- .../common/json/JsonPojoDeserializer.java | 29 +++++++------- .../nile/common/json/JsonPojoSerializer.java | 24 +++++++----- .../ch/cern/nile/common/json/JsonSerde.java | 22 ++++++----- .../cern/nile/common/models/Application.java | 3 +- .../ch/cern/nile/common/models/Topic.java | 3 +- .../ch/cern/nile/common/probes/Health.java | 17 ++++++--- .../nile/common/probes/HttpServerFactory.java | 8 ++-- .../ch/cern/nile/common/schema/JsonType.java | 18 +++++---- .../nile/common/schema/SchemaInjector.java | 10 +++-- .../nile/common/streams/AbstractStream.java | 7 ++-- .../cern/nile/common/streams/StreamUtils.java | 4 +- .../cern/nile/common/streams/Streaming.java | 6 +-- .../properties/StreamConfigTest.java | 10 ++--- 23 files changed, 177 insertions(+), 161 deletions(-) create mode 100644 src/main/java/ch/cern/nile/common/configuration/properties/PropertyEnum.java diff --git a/src/main/java/ch/cern/nile/common/StreamingApplication.java b/src/main/java/ch/cern/nile/common/StreamingApplication.java index ff57de6..cd104d2 100644 --- a/src/main/java/ch/cern/nile/common/StreamingApplication.java +++ b/src/main/java/ch/cern/nile/common/StreamingApplication.java @@ -19,6 +19,8 @@ import ch.cern.nile.common.streams.Streaming; /** * {@link StreamingApplication} is the entry point for initializing and starting a Kafka Streams application. + * This class provides the main method to load configuration properties, perform necessary validations, + * and bootstrap the streaming process using a specified streaming implementation. */ public final class StreamingApplication { @@ -29,12 +31,14 @@ public final class StreamingApplication { } /** - * The main method for the StreamingApplication. It is the entry point of the application. + * The main method for the StreamingApplication, serving as the entry point of the application. + * It loads configuration properties from a provided file path, validates these properties, + * and initializes the streaming process using a dynamically loaded Streaming implementation. * - * @param args Command-line arguments, expecting the path to the properties file as the first argument. - * @throws IllegalArgumentException If the properties file path is not provided. - * @throws StreamingException If there are issues loading the properties file, validating properties, - * or starting the streaming process. + * @param args command-line arguments, expecting the path to the properties file as the first argument + * @throws IllegalArgumentException if the properties file path is not provided + * @throws StreamingException if there are issues with loading the properties file, + * validating properties, or starting the streaming process */ public static void main(final String[] args) { if (args.length < MIN_ARGS_LENGTH) { diff --git a/src/main/java/ch/cern/nile/common/clients/KafkaStreamsClient.java b/src/main/java/ch/cern/nile/common/clients/KafkaStreamsClient.java index 099b34d..5ec6f05 100644 --- a/src/main/java/ch/cern/nile/common/clients/KafkaStreamsClient.java +++ b/src/main/java/ch/cern/nile/common/clients/KafkaStreamsClient.java @@ -19,7 +19,7 @@ import ch.cern.nile.common.exceptions.ReverseDnsLookupException; import ch.cern.nile.common.json.JsonSerde; /** - * A client for creating KafkaStreams instances. + * A client for creating and configuring KafkaStreams instances. */ public class KafkaStreamsClient implements Configure { @@ -28,9 +28,12 @@ public class KafkaStreamsClient implements Configure { private Properties properties; /** - * Configures the KafkaStreams instance using the provided properties. + * Configures the KafkaStreams instance using the provided properties. This method sets up various + * configuration options such as application ID, client ID, bootstrap servers, security protocols, + * and serialization/deserialization settings based on the properties provided. * - * @param props the properties to be used for the configuration + * @param props the properties to be used for the configuration. Expected properties include + * client ID, Kafka cluster information, and security settings. */ @Override public void configure(final Properties props) { @@ -87,11 +90,12 @@ public class KafkaStreamsClient implements Configure { } /** - * Performs the actual DNS lookup. + * Resolves the provided Kafka cluster domain to a comma-separated list of hostnames with port 9093. + * This method performs a reverse DNS lookup and is used internally for setting up Kafka connections. * * @param kafkaCluster the domain of the Kafka cluster * @return a comma-separated list of hostnames with port 9093 - * @throws UnknownHostException if the hostname resolution fails + * @throws ReverseDnsLookupException if the hostname resolution fails */ protected String performDnsLookup(final String kafkaCluster) throws UnknownHostException { final StringBuilder stringBuilder = new StringBuilder(); diff --git a/src/main/java/ch/cern/nile/common/configuration/Configure.java b/src/main/java/ch/cern/nile/common/configuration/Configure.java index 72c2784..fa9e8ee 100644 --- a/src/main/java/ch/cern/nile/common/configuration/Configure.java +++ b/src/main/java/ch/cern/nile/common/configuration/Configure.java @@ -10,7 +10,7 @@ public interface Configure { /** * Configure this class. * - * @param properties Configuration properties + * @param properties the properties to use for configuration */ void configure(Properties properties); } diff --git a/src/main/java/ch/cern/nile/common/configuration/PropertiesCheck.java b/src/main/java/ch/cern/nile/common/configuration/PropertiesCheck.java index 0504f99..6563b5d 100644 --- a/src/main/java/ch/cern/nile/common/configuration/PropertiesCheck.java +++ b/src/main/java/ch/cern/nile/common/configuration/PropertiesCheck.java @@ -8,6 +8,7 @@ import ch.cern.nile.common.configuration.properties.ClientProperties; import ch.cern.nile.common.configuration.properties.CommonProperties; import ch.cern.nile.common.configuration.properties.DecodingProperties; import ch.cern.nile.common.configuration.properties.EnrichmentProperties; +import ch.cern.nile.common.configuration.properties.PropertyEnum; import ch.cern.nile.common.configuration.properties.RoutingProperties; import ch.cern.nile.common.exceptions.MissingPropertyException; import ch.cern.nile.common.exceptions.UnknownStreamTypeException; @@ -17,22 +18,24 @@ import ch.cern.nile.common.exceptions.UnknownStreamTypeException; */ public final class PropertiesCheck { - private static final Set<String> CLIENT_PROPERTIES = ClientProperties.getValues(); - private static final Set<String> COMMON_PROPERTIES = CommonProperties.getValues(); - private static final Set<String> DECODING_PROPERTIES = DecodingProperties.getValues(); - private static final Set<String> ROUTING_PROPERTIES = RoutingProperties.getValues(); - private static final Set<String> ENRICHMENT_PROPERTIES = EnrichmentProperties.getValues(); + private static final Set<String> CLIENT_PROPERTIES = PropertyEnum.getValues(ClientProperties.class); + private static final Set<String> COMMON_PROPERTIES = PropertyEnum.getValues(CommonProperties.class); + private static final Set<String> DECODING_PROPERTIES = PropertyEnum.getValues(DecodingProperties.class); + private static final Set<String> ROUTING_PROPERTIES = PropertyEnum.getValues(RoutingProperties.class); + private static final Set<String> ENRICHMENT_PROPERTIES = PropertyEnum.getValues(EnrichmentProperties.class); private PropertiesCheck() { } /** - * Validates the properties file based on the type of stream. + * Validates the properties file based on the type of stream (DECODING, ROUTING, or ENRICHMENT). + * This method checks if all required properties for the specified stream type are present in the + * properties object, throwing exceptions if any are missing. * - * @param properties - properties already loaded from file into java.util.Properties object. - * @param streamType - type of stream defined in the properties file. - * @throws MissingPropertyException if a required property is missing from the properties object. - * @throws UnknownStreamTypeException if the stream type is unknown. + * @param properties the properties already loaded from file into java.util.Properties object + * @param streamType the type of stream defined in the properties file + * @throws MissingPropertyException if a required property is missing from the properties object + * @throws UnknownStreamTypeException if the stream type is unknown */ public static void validateProperties(final Properties properties, final StreamType streamType) { Objects.requireNonNull(properties, "Properties object cannot be null"); @@ -60,8 +63,8 @@ public final class PropertiesCheck { /** * Validates the required properties within the given properties object. * - * @param props - properties object to check for required properties. - * @param propsToCheck - set of required property keys. + * @param props the properties object to check for required properties. + * @param propsToCheck the set of required property keys. * @throws MissingPropertyException if a required property is missing from the properties object. */ private static void validateRequiredProperties(final Properties props, final Set<String> propsToCheck) { diff --git a/src/main/java/ch/cern/nile/common/configuration/properties/ClientProperties.java b/src/main/java/ch/cern/nile/common/configuration/properties/ClientProperties.java index 80b12cd..99efbaf 100644 --- a/src/main/java/ch/cern/nile/common/configuration/properties/ClientProperties.java +++ b/src/main/java/ch/cern/nile/common/configuration/properties/ClientProperties.java @@ -1,16 +1,12 @@ package ch.cern.nile.common.configuration.properties; -import java.util.Arrays; -import java.util.Set; -import java.util.stream.Collectors; - import lombok.Getter; /** - * Enum representing Client properties. + * Enum representing various properties specific to clients in the application. */ @Getter -public enum ClientProperties { +public enum ClientProperties implements PropertyEnum { SOURCE_TOPIC("source.topic"), KAFKA_CLUSTER("kafka.cluster"), CLIENT_ID("client.id"), @@ -22,12 +18,4 @@ public enum ClientProperties { this.value = value; } - /** - * Get the values of all the enum constants. - * - * @return a set of the values of all the enum constants - */ - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } } diff --git a/src/main/java/ch/cern/nile/common/configuration/properties/CommonProperties.java b/src/main/java/ch/cern/nile/common/configuration/properties/CommonProperties.java index 700bd18..bacde5e 100644 --- a/src/main/java/ch/cern/nile/common/configuration/properties/CommonProperties.java +++ b/src/main/java/ch/cern/nile/common/configuration/properties/CommonProperties.java @@ -1,16 +1,12 @@ package ch.cern.nile.common.configuration.properties; -import java.util.Arrays; -import java.util.Set; -import java.util.stream.Collectors; - import lombok.Getter; /** - * Enum representing Common properties. + * Enum representing common properties used throughout the application. */ @Getter -public enum CommonProperties { +public enum CommonProperties implements PropertyEnum { STREAM_TYPE("stream.type"), STREAM_CLASS("stream.class"); @@ -20,12 +16,4 @@ public enum CommonProperties { this.value = value; } - /** - * Get the values of all the enum constants. - * - * @return a set of the values of all the enum constants - */ - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } } diff --git a/src/main/java/ch/cern/nile/common/configuration/properties/DecodingProperties.java b/src/main/java/ch/cern/nile/common/configuration/properties/DecodingProperties.java index 17817c7..5f79c32 100644 --- a/src/main/java/ch/cern/nile/common/configuration/properties/DecodingProperties.java +++ b/src/main/java/ch/cern/nile/common/configuration/properties/DecodingProperties.java @@ -1,16 +1,12 @@ package ch.cern.nile.common.configuration.properties; -import java.util.Arrays; -import java.util.Set; -import java.util.stream.Collectors; - import lombok.Getter; /** - * Enum representing Decoding properties. + * Enum representing properties related to the decoding process in the application. */ @Getter -public enum DecodingProperties { +public enum DecodingProperties implements PropertyEnum { SINK_TOPIC("sink.topic"); private final String value; @@ -19,12 +15,4 @@ public enum DecodingProperties { this.value = value; } - /** - * Get the values of all the enum constants. - * - * @return a set of the values of all the enum constants - */ - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } } diff --git a/src/main/java/ch/cern/nile/common/configuration/properties/EnrichmentProperties.java b/src/main/java/ch/cern/nile/common/configuration/properties/EnrichmentProperties.java index 489da1b..61d3094 100644 --- a/src/main/java/ch/cern/nile/common/configuration/properties/EnrichmentProperties.java +++ b/src/main/java/ch/cern/nile/common/configuration/properties/EnrichmentProperties.java @@ -1,16 +1,13 @@ package ch.cern.nile.common.configuration.properties; -import java.util.Arrays; -import java.util.Set; -import java.util.stream.Collectors; - import lombok.Getter; + /** - * Enum representing the Enrichment properties. + * Enum representing properties related to data enrichment in the application. */ @Getter -public enum EnrichmentProperties { +public enum EnrichmentProperties implements PropertyEnum { ENRICHMENT_CONFIG_PATH("enrichment.config.path"), SINK_TOPIC("sink.topic"); @@ -20,12 +17,4 @@ public enum EnrichmentProperties { this.value = value; } - /** - * Get the values of all the enum constants. - * - * @return a set of the values of all the enum constants - */ - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } } diff --git a/src/main/java/ch/cern/nile/common/configuration/properties/PropertyEnum.java b/src/main/java/ch/cern/nile/common/configuration/properties/PropertyEnum.java new file mode 100644 index 0000000..8b411b0 --- /dev/null +++ b/src/main/java/ch/cern/nile/common/configuration/properties/PropertyEnum.java @@ -0,0 +1,38 @@ +package ch.cern.nile.common.configuration.properties; + +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Interface representing an enumeration of property keys. Enums implementing this interface + * can be used to define sets of configuration properties. + * Each enum constant in an implementing class represents a specific property key. + * <p> + * This interface provides a method to retrieve the string values associated with each enum constant. + * <p> + * Implementing enums should define a private final field to store the property key and use the + * {@link lombok.Getter} annotation to automatically generate the required getter method. + */ +public interface PropertyEnum { + + /** + * Retrieves the string value associated with this enum constant. + * It's suggested to use {@link lombok.Getter} to generate this method. + * + * @return the string value associated with this enum constant + */ + String getValue(); + + /** + * Retrieves the string values associated with each enum constant of a given enum type that + * implements PropertyEnum. + * + * @param enumClass the class object of the enum type. + * @param <E> the type of the enum class that implements PropertyEnum + * @return a set containing the string values of all the enum constants in the specified enum + */ + static <E extends Enum<E> & PropertyEnum> Set<String> getValues(final Class<E> enumClass) { + return Arrays.stream(enumClass.getEnumConstants()).map(PropertyEnum::getValue).collect(Collectors.toSet()); + } +} diff --git a/src/main/java/ch/cern/nile/common/configuration/properties/RoutingProperties.java b/src/main/java/ch/cern/nile/common/configuration/properties/RoutingProperties.java index e6511e6..e97016c 100644 --- a/src/main/java/ch/cern/nile/common/configuration/properties/RoutingProperties.java +++ b/src/main/java/ch/cern/nile/common/configuration/properties/RoutingProperties.java @@ -1,16 +1,13 @@ package ch.cern.nile.common.configuration.properties; -import java.util.Arrays; -import java.util.Set; -import java.util.stream.Collectors; - import lombok.Getter; + /** - * Enum representing the Routing properties. + * Enum representing properties related to message routing within the application. */ @Getter -public enum RoutingProperties { +public enum RoutingProperties implements PropertyEnum { ROUTING_CONFIG_PATH("routing.config.path"), DLQ_TOPIC("dlq.topic"); @@ -20,12 +17,4 @@ public enum RoutingProperties { this.value = value; } - /** - * Get the values of all the enum constants. - * - * @return a set of the values of all the enum constants - */ - public static Set<String> getValues() { - return Arrays.stream(values()).map(o -> o.value).collect(Collectors.toSet()); - } } diff --git a/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java b/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java index 3d35381..36b6638 100644 --- a/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java +++ b/src/main/java/ch/cern/nile/common/json/JsonPojoDeserializer.java @@ -8,33 +8,35 @@ import com.google.gson.Gson; import org.apache.kafka.common.serialization.Deserializer; /** - * Deserializer for JSON POJOs. + * A deserializer for JSON POJOs using Gson. This class implements the Deserializer interface + * from Apache Kafka and provides a mechanism to convert JSON byte data back into Java objects (POJOs) + * of a specified type. * - * @param <T> Type of the POJO to be deserialized + * @param <T> The type of the POJO to be deserialized. */ public class JsonPojoDeserializer<T> implements Deserializer<T> { private static final Gson GSON = new Gson(); /** - * Class type for the deserialization. + * The class type for the deserialization. */ private Class<T> tClass; /** - * Constructor for the deserializer. + * Constructs a new JsonPojoDeserializer with the given class type for deserialization. * - * @param clazz Class type for the deserialization + * @param clazz the class type for the deserialization */ JsonPojoDeserializer(final Class<T> clazz) { this.tClass = clazz; } /** - * Configure this class. + * Configures this class with the given properties. * - * @param props Properties from the consumer configuration - * @param isKey Ignored + * @param props the properties from the consumer configuration + * @param isKey is ignored in this implementation */ @Override @SuppressWarnings("unchecked") @@ -45,11 +47,11 @@ public class JsonPojoDeserializer<T> implements Deserializer<T> { } /** - * Deserialize the provided byte array into an object of type T. + * Deserializes the provided byte array into an object of type T. * - * @param topic The topic associated with the data - * @param bytes The byte array to be deserialized - * @return The deserialized object of type T or null if the byte array is null + * @param topic the topic associated with the data + * @param bytes the byte array to be deserialized + * @return the deserialized object of type T or null if the byte array is null */ @Override public T deserialize(final String topic, final byte[] bytes) { @@ -61,7 +63,8 @@ public class JsonPojoDeserializer<T> implements Deserializer<T> { } /** - * Needed due to the implementation of the Serializer interface. + * Closes this deserializer. + * This method is required by the Serializer interface but does nothing in this implementation. */ @Override public void close() { diff --git a/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java b/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java index 075f77b..384a57d 100644 --- a/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java +++ b/src/main/java/ch/cern/nile/common/json/JsonPojoSerializer.java @@ -8,19 +8,22 @@ import com.google.gson.Gson; import org.apache.kafka.common.serialization.Serializer; /** - * Serializer for JSON POJOs. + * A serializer for JSON POJOs using Gson. This class implements the Serializer interface + * from Apache Kafka and provides a mechanism to convert Java objects (POJOs) of a specified type + * into JSON byte data. * - * @param <T> Type of the POJO to be serialized + * @param <T> The type of the POJO to be serialized. */ public class JsonPojoSerializer<T> implements Serializer<T> { private static final Gson GSON = new Gson(); /** - * Needed due to the implementation of the Serializer interface. + * Configures this serializer. This method is part of the Serializer interface but is not used + * in this implementation. * - * @param props Ignored - * @param isKey Ignored + * @param props is ignored in this implementation + * @param isKey is ignored in this implementation */ @Override public void configure(final Map<String, ?> props, final boolean isKey) { @@ -28,11 +31,11 @@ public class JsonPojoSerializer<T> implements Serializer<T> { } /** - * Serialize the provided data as a JSON string and convert it to bytes. + * Serializes the provided data into a JSON string using Gson and converts it to a byte array. * - * @param topic The topic associated with the data. - * @param data The data to be serialized. - * @return The serialized data as bytes or null if the data is null + * @param topic the topic associated with the data. This is not used in the serialization process + * @param data the POJO to be serialized + * @return the serialized data as bytes, or null if the data is null */ @Override public byte[] serialize(final String topic, final T data) { @@ -44,7 +47,8 @@ public class JsonPojoSerializer<T> implements Serializer<T> { } /** - * Needed due to the implementation of the Serializer interface. + * Closes this serializer. + * This method is required by the Serializer interface but does nothing in this implementation. */ @Override public void close() { diff --git a/src/main/java/ch/cern/nile/common/json/JsonSerde.java b/src/main/java/ch/cern/nile/common/json/JsonSerde.java index 10d82f0..a5b518c 100644 --- a/src/main/java/ch/cern/nile/common/json/JsonSerde.java +++ b/src/main/java/ch/cern/nile/common/json/JsonSerde.java @@ -9,7 +9,9 @@ import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serializer; /** - * A Serde for JSON objects. + * A Serde (Serializer/Deserializer) implementation for JSON objects using Gson. This class + * provides both serialization and deserialization capabilities for Kafka streams to handle + * JSON objects represented by the {@link JsonObject} class. */ public class JsonSerde implements Serde<JsonObject> { @@ -17,10 +19,12 @@ public class JsonSerde implements Serde<JsonObject> { private final JsonPojoDeserializer<JsonObject> jsonDeserializer = new JsonPojoDeserializer<>(JsonObject.class); /** - * Configure this class. + * Configures this Serde with the given properties. This method configures both the internal + * serializer and deserializer with the provided configuration settings. * - * @param configs Properties from the consumer configuration - * @param isKey Ignored + * @param configs the properties from the consumer or producer configuration + * @param isKey indicates whether this Serde is being used for key serialization/deserialization + * This parameter is ignored in this implementation */ @Override public void configure(final Map<String, ?> configs, final boolean isKey) { @@ -29,7 +33,7 @@ public class JsonSerde implements Serde<JsonObject> { } /** - * Close this class. + * Closes this Serde. This method closes both the internal serializer and deserializer. */ @Override public void close() { @@ -38,9 +42,9 @@ public class JsonSerde implements Serde<JsonObject> { } /** - * Get the serializer. + * Returns the serializer component of this Serde. * - * @return The serializer + * @return The {@link JsonPojoSerializer} for serializing JSON objects */ @Override public Serializer<JsonObject> serializer() { @@ -48,9 +52,9 @@ public class JsonSerde implements Serde<JsonObject> { } /** - * Get the deserializer. + * Returns the deserializer component of this Serde. * - * @return The deserializer + * @return The {@link JsonPojoDeserializer} for deserializing JSON objects */ @Override public Deserializer<JsonObject> deserializer() { diff --git a/src/main/java/ch/cern/nile/common/models/Application.java b/src/main/java/ch/cern/nile/common/models/Application.java index fcede13..828342f 100644 --- a/src/main/java/ch/cern/nile/common/models/Application.java +++ b/src/main/java/ch/cern/nile/common/models/Application.java @@ -6,7 +6,8 @@ import lombok.Setter; import lombok.ToString; /** - * Application model. + * Model representing an application with its name and associated topic. + * Primarily used in serialization and deserialization processes. */ @NoArgsConstructor @Getter diff --git a/src/main/java/ch/cern/nile/common/models/Topic.java b/src/main/java/ch/cern/nile/common/models/Topic.java index 2807e7b..d19b104 100644 --- a/src/main/java/ch/cern/nile/common/models/Topic.java +++ b/src/main/java/ch/cern/nile/common/models/Topic.java @@ -6,7 +6,8 @@ import lombok.Setter; import lombok.ToString; /** - * Topic model. + * Model representing a topic, identified by its name. + * Used in serialization and deserialization processes. */ @NoArgsConstructor @Getter diff --git a/src/main/java/ch/cern/nile/common/probes/Health.java b/src/main/java/ch/cern/nile/common/probes/Health.java index 3d60006..96edb38 100644 --- a/src/main/java/ch/cern/nile/common/probes/Health.java +++ b/src/main/java/ch/cern/nile/common/probes/Health.java @@ -10,8 +10,9 @@ import org.apache.kafka.streams.KafkaStreams; import ch.cern.nile.common.exceptions.HealthProbeException; /** - * A simple HTTP server that responds to health checks with a 200 if the KafkaStreams instance is running, - * or a 500 if it is not running. + * A simple HTTP server that responds to health checks at the "/health" endpoint. + * It returns a 200 OK response if the KafkaStreams instance is running, or a 500 Internal Server Error + * if it is not running. By default, the server listens on port 8899. */ public class Health { @@ -25,6 +26,7 @@ public class Health { /** * Creates a new Health instance that will respond to health checks on port 8899. + * Health checks determine the running state of the provided KafkaStreams instance. * * @param streams the KafkaStreams instance to check the state of */ @@ -33,7 +35,9 @@ public class Health { } /** - * Creates a new Health instance that will respond to health checks on port 8899. To be used for testing. + * Creates a new Health instance that will respond to health checks on port 8899, using the provided + * HttpServerFactory. This constructor is useful for testing. Health checks determine the running state + * of the provided KafkaStreams instance. * * @param streams the KafkaStreams instance to check the state of * @param httpServerFactory the factory to use to create the HttpServer instance @@ -44,7 +48,8 @@ public class Health { } /** - * Start the Health http server. + * Starts the Health HTTP server. The server listens for health check requests and responds + * based on the state of the KafkaStreams instance. */ public void start() { try { @@ -61,14 +66,14 @@ public class Health { } /** - * Stops the Health HTTP server. + * Stops the Health HTTP server, terminating the health check responses. */ public void stop() { server.stop(0); } /** - * The default HttpServerFactory implementation. + * The default HttpServerFactory implementation used to create HttpServer instances. */ private static final class DefaultHttpServerFactory implements HttpServerFactory { @Override diff --git a/src/main/java/ch/cern/nile/common/probes/HttpServerFactory.java b/src/main/java/ch/cern/nile/common/probes/HttpServerFactory.java index 65fbc42..51969de 100644 --- a/src/main/java/ch/cern/nile/common/probes/HttpServerFactory.java +++ b/src/main/java/ch/cern/nile/common/probes/HttpServerFactory.java @@ -6,16 +6,18 @@ import java.net.InetSocketAddress; import com.sun.net.httpserver.HttpServer; /** - * Factory for creating HttpServer instances. Used to allow mocking of HttpServer in tests. + * Factory for creating HttpServer instances. This interface is used to allow mocking of HttpServer + * in tests and to provide flexibility in the instantiation of HttpServer, facilitating dependency + * injection and customization. */ public interface HttpServerFactory { /** - * Creates a new HttpServer instance. + * Creates a new HttpServer instance bound to the specified address with the given backlog. * * @param address the address to bind the server to * @param backlog the maximum number of pending connections - * @return the HttpServer instance + * @return the created HttpServer instance * @throws IOException if an I/O error occurs when creating the HttpServer */ HttpServer createHttpServer(InetSocketAddress address, int backlog) throws IOException; diff --git a/src/main/java/ch/cern/nile/common/schema/JsonType.java b/src/main/java/ch/cern/nile/common/schema/JsonType.java index 7c7f36e..784a9af 100644 --- a/src/main/java/ch/cern/nile/common/schema/JsonType.java +++ b/src/main/java/ch/cern/nile/common/schema/JsonType.java @@ -5,7 +5,9 @@ import java.util.Date; import lombok.Getter; /** - * Enum for JSON types for Connect schema(s). + * Enum mapping Java classes to their corresponding JSON types for Connect schema(s). + * This enum provides a convenient way to determine the JSON type representation + * of various Java data types. */ @Getter enum JsonType { @@ -24,10 +26,10 @@ enum JsonType { private final String type; /** - * Constructor. + * Constructs a new JsonType enum constant. * - * @param clazz Class for the {@link JsonType} - * @param type Type for the {@link JsonType} + * @param clazz the Java class associated with this JSON type + * @param type the string representation of the JSON type */ JsonType(final Class<?> clazz, final String type) { this.clazz = clazz; @@ -35,10 +37,12 @@ enum JsonType { } /** - * Returns the {@link JsonType} for the given class. + * Returns the JsonType corresponding to the given Java class. + * Throws an IllegalArgumentException if the class is not supported. * - * @param clazz Class to get the {@link JsonType} for - * @return {@link JsonType} for the given class + * @param clazz the Java class to find the corresponding JsonType for + * @return the JsonType corresponding to the given class + * @throws IllegalArgumentException if the class is not supported */ static JsonType fromClass(final Class<?> clazz) { for (final JsonType jsonType : values()) { diff --git a/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java b/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java index 724d120..b4c3b4b 100644 --- a/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java +++ b/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java @@ -7,7 +7,8 @@ import java.util.Map; import java.util.stream.Collectors; /** - * Injects a Connect schema into the given data. + * Utility class for injecting Connect schemas into given data. The class provides static methods + * to generate a schema based on the data types present in a map and inject this schema into the data. */ public final class SchemaInjector { @@ -15,10 +16,11 @@ public final class SchemaInjector { } /** - * Injects a Connect schema into the given data. + * Injects a Connect schema into the given data. The method generates a schema based on the data types + * in the input map and returns a new map containing both the original data and the generated schema. * - * @param data Data to inject the schema into - * @return Data with the schema injected + * @param data the data to inject the schema into + * @return a new map containing the original data and the injected schema */ public static Map<String, Object> inject(final Map<String, Object> data) { final Map<String, Object> dataCopy = new HashMap<>(data); diff --git a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java index 094054e..bd4c593 100644 --- a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java +++ b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java @@ -62,10 +62,9 @@ public abstract class AbstractStream implements Streaming { } /** - * Starts the Kafka Streams application using the provided KafkaStreamsClient. - * <p> - * Initializes and manages the Kafka Streams application lifecycle, including graceful shutdown. - * Note: This method terminates the JVM upon completion. + * Starts the Kafka Streams application using the provided KafkaStreamsClient and initializes + * its lifecycle management, including graceful shutdown. This method also adds a shutdown hook + * to the JVM and terminates the JVM upon completion of the stream. * * @param kafkaStreamsClient the client used to create and manage the Kafka Streams instance * @throws StreamingException if an error occurs during streaming diff --git a/src/main/java/ch/cern/nile/common/streams/StreamUtils.java b/src/main/java/ch/cern/nile/common/streams/StreamUtils.java index 09a7dbe..37091ed 100644 --- a/src/main/java/ch/cern/nile/common/streams/StreamUtils.java +++ b/src/main/java/ch/cern/nile/common/streams/StreamUtils.java @@ -90,8 +90,8 @@ public final class StreamUtils { /** * Filters records based on the presence of required fields in a JsonObject. - * Returns true if all required fields ("applicationID", "applicationName", "deviceName", - * "devEUI", and "data") are present, otherwise false. + * Returns true if the JsonObject contains all required fields ("applicationID", "applicationName", + * "deviceName", "devEUI", and "data"), otherwise false. * * @param ignored ignored parameter (unused in current implementation) * @param value the JsonObject to be checked for required fields diff --git a/src/main/java/ch/cern/nile/common/streams/Streaming.java b/src/main/java/ch/cern/nile/common/streams/Streaming.java index aefd2c2..cc7987a 100644 --- a/src/main/java/ch/cern/nile/common/streams/Streaming.java +++ b/src/main/java/ch/cern/nile/common/streams/Streaming.java @@ -12,9 +12,9 @@ import ch.cern.nile.common.configuration.Configure; public interface Streaming extends Configure { /** - * Initializes and starts the streaming process. This method should define the setup and - * execution of the stream, utilizing the provided KafkaStreamsClient to create and manage - * Kafka Streams. + * Initializes and starts the streaming process using the provided KafkaStreamsClient. + * Implementations should define the setup and execution of the stream, including the + * creation and management of Kafka Streams instances. * * @param kafkaStreamsClient the KafkaStreamsClient used to create and manage the stream. */ diff --git a/src/test/java/ch/cern/nile/common/configuration/properties/StreamConfigTest.java b/src/test/java/ch/cern/nile/common/configuration/properties/StreamConfigTest.java index f400731..8d94322 100644 --- a/src/test/java/ch/cern/nile/common/configuration/properties/StreamConfigTest.java +++ b/src/test/java/ch/cern/nile/common/configuration/properties/StreamConfigTest.java @@ -16,7 +16,7 @@ class StreamConfigTest { @Test void givenClientPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { final Set<String> expectedConfigs = Set.of("source.topic", "kafka.cluster", "client.id", "truststore.location"); - final Set<String> actualConfigs = ClientProperties.getValues(); + final Set<String> actualConfigs = PropertyEnum.getValues(ClientProperties.class); assertEquals(expectedConfigs, actualConfigs, SHOULD_RETURN_EXPECTED_SET_OF_CONFIGS); } @@ -30,7 +30,7 @@ class StreamConfigTest { @Test void givenCommonPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { final Set<String> expectedConfigs = Set.of("stream.type", "stream.class"); - final Set<String> actualConfigs = CommonProperties.getValues(); + final Set<String> actualConfigs = PropertyEnum.getValues(CommonProperties.class); assertEquals(expectedConfigs, actualConfigs, SHOULD_RETURN_EXPECTED_SET_OF_CONFIGS); } @@ -44,7 +44,7 @@ class StreamConfigTest { @Test void givenDecodingPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { final Set<String> expectedConfigs = Set.of("sink.topic"); - final Set<String> actualConfigs = DecodingProperties.getValues(); + final Set<String> actualConfigs = PropertyEnum.getValues(DecodingProperties.class); assertEquals(expectedConfigs, actualConfigs, SHOULD_RETURN_EXPECTED_SET_OF_CONFIGS); } @@ -58,7 +58,7 @@ class StreamConfigTest { @Test void givenRoutingPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { final Set<String> expectedConfigs = Set.of("routing.config.path", "dlq.topic"); - final Set<String> actualConfigs = RoutingProperties.getValues(); + final Set<String> actualConfigs = PropertyEnum.getValues(RoutingProperties.class); assertEquals(expectedConfigs, actualConfigs, SHOULD_RETURN_EXPECTED_SET_OF_CONFIGS); } @@ -72,7 +72,7 @@ class StreamConfigTest { @Test void givenEnrichmentPropertiesEnum_whenGetValues_thenReturnsExpectedSet() { final Set<String> expectedConfigs = Set.of("enrichment.config.path", "sink.topic"); - final Set<String> actualConfigs = EnrichmentProperties.getValues(); + final Set<String> actualConfigs = PropertyEnum.getValues(EnrichmentProperties.class); assertEquals(expectedConfigs, actualConfigs, SHOULD_RETURN_EXPECTED_SET_OF_CONFIGS); } -- GitLab From 39e5d1ec1665d1c1c31f8a9fe4791bda1b9b71f6 Mon Sep 17 00:00:00 2001 From: Dean Dalianis <dean.dalianis@cern.ch> Date: Mon, 29 Jan 2024 18:26:10 +0100 Subject: [PATCH 06/15] Spotbugs fixes --- pom.xml | 1 - src/main/java/ch/cern/nile/common/StreamingApplication.java | 3 +++ src/main/java/ch/cern/nile/common/models/Application.java | 3 +++ src/main/java/ch/cern/nile/common/models/Topic.java | 3 +++ src/main/java/ch/cern/nile/common/probes/Health.java | 4 ++++ src/main/java/ch/cern/nile/common/streams/AbstractStream.java | 4 ++++ 6 files changed, 17 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index ff61604..8a5ffbb 100644 --- a/pom.xml +++ b/pom.xml @@ -74,7 +74,6 @@ <groupId>com.github.spotbugs</groupId> <artifactId>spotbugs-annotations</artifactId> <version>4.8.1</version> - <scope>test</scope> </dependency> </dependencies> diff --git a/src/main/java/ch/cern/nile/common/StreamingApplication.java b/src/main/java/ch/cern/nile/common/StreamingApplication.java index cd104d2..b18289e 100644 --- a/src/main/java/ch/cern/nile/common/StreamingApplication.java +++ b/src/main/java/ch/cern/nile/common/StreamingApplication.java @@ -17,6 +17,8 @@ import ch.cern.nile.common.configuration.properties.CommonProperties; import ch.cern.nile.common.exceptions.StreamingException; import ch.cern.nile.common.streams.Streaming; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + /** * {@link StreamingApplication} is the entry point for initializing and starting a Kafka Streams application. * This class provides the main method to load configuration properties, perform necessary validations, @@ -40,6 +42,7 @@ public final class StreamingApplication { * @throws StreamingException if there are issues with loading the properties file, * validating properties, or starting the streaming process */ + @SuppressFBWarnings(value = "PATH_TRAVERSAL_IN", justification = "This method is only used internally") public static void main(final String[] args) { if (args.length < MIN_ARGS_LENGTH) { throw new IllegalArgumentException("Properties file not passed"); diff --git a/src/main/java/ch/cern/nile/common/models/Application.java b/src/main/java/ch/cern/nile/common/models/Application.java index 828342f..8f01369 100644 --- a/src/main/java/ch/cern/nile/common/models/Application.java +++ b/src/main/java/ch/cern/nile/common/models/Application.java @@ -1,5 +1,6 @@ package ch.cern.nile.common.models; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; @@ -13,6 +14,8 @@ import lombok.ToString; @Getter @Setter @ToString +@SuppressFBWarnings(value = "EI_EXPOSE_REP", + justification = "This is a model class used for serialization and deserialization") public class Application { private String name; diff --git a/src/main/java/ch/cern/nile/common/models/Topic.java b/src/main/java/ch/cern/nile/common/models/Topic.java index d19b104..686ee8a 100644 --- a/src/main/java/ch/cern/nile/common/models/Topic.java +++ b/src/main/java/ch/cern/nile/common/models/Topic.java @@ -1,5 +1,6 @@ package ch.cern.nile.common.models; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; @@ -13,6 +14,8 @@ import lombok.ToString; @Getter @Setter @ToString +@SuppressFBWarnings(value = "EI_EXPOSE_REP", + justification = "This is a model class used for serialization and deserialization") public class Topic { private String name; diff --git a/src/main/java/ch/cern/nile/common/probes/Health.java b/src/main/java/ch/cern/nile/common/probes/Health.java index 96edb38..1ca381e 100644 --- a/src/main/java/ch/cern/nile/common/probes/Health.java +++ b/src/main/java/ch/cern/nile/common/probes/Health.java @@ -9,6 +9,8 @@ import org.apache.kafka.streams.KafkaStreams; import ch.cern.nile.common.exceptions.HealthProbeException; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; + /** * A simple HTTP server that responds to health checks at the "/health" endpoint. * It returns a 200 OK response if the KafkaStreams instance is running, or a 500 Internal Server Error @@ -42,6 +44,8 @@ public class Health { * @param streams the KafkaStreams instance to check the state of * @param httpServerFactory the factory to use to create the HttpServer instance */ + @SuppressFBWarnings(value = "EI_EXPOSE_REP2", + justification = "This is an internal class and the HttpServerFactory is not exposed to the outside") public Health(final KafkaStreams streams, final HttpServerFactory httpServerFactory) { this.streams = streams; this.httpServerFactory = httpServerFactory; diff --git a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java index bd4c593..1e8ff07 100644 --- a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java +++ b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java @@ -15,6 +15,7 @@ import ch.cern.nile.common.configuration.properties.DecodingProperties; import ch.cern.nile.common.exceptions.StreamingException; import ch.cern.nile.common.probes.Health; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import lombok.Getter; import lombok.Setter; @@ -57,6 +58,8 @@ public abstract class AbstractStream implements Streaming { * @param configs the configuration settings for the Kafka Streams application */ @Override + @SuppressFBWarnings(value = "EI_EXPOSE_REP2", + justification = "This is an internal class and the properties are not exposed to the outside") public void configure(final Properties configs) { this.properties = configs; } @@ -71,6 +74,7 @@ public abstract class AbstractStream implements Streaming { */ @Override @SuppressWarnings("PMD.DoNotTerminateVM") + @SuppressFBWarnings(value = "DM_EXIT", justification = "This is a Kafka Streams application") public void stream(final KafkaStreamsClient kafkaStreamsClient) { init(kafkaStreamsClient); Runtime.getRuntime().addShutdownHook(new Thread(this::shutDown, "streams-shutdown-hook")); -- GitLab From 3f0d5693b848099d17309dac5b2dd06971e4708c Mon Sep 17 00:00:00 2001 From: Konstantinos Dalianis <dean.dalianis@cern.ch> Date: Tue, 30 Jan 2024 11:43:00 +0100 Subject: [PATCH 07/15] Updated README.md --- README.md | 94 +++++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 92 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index ba2fe53..fdbb7ba 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,94 @@ # Nile Common -Nile Common is the common core library for the Nile streaming ecosystem. -It contains all the common classes and utilities that are used by the other Nile streaming applications. +**Nile Common** is a Java library designed for building Kafka streaming applications. This library +encapsulates a range of functionalities including stream processing, schema management, serialization & deserialization, +and Health checks, thus providing a robust foundation for developing data streaming solutions. + +## Getting Started + +### Prerequisites + +- Java 11 or higher + +### Adding Dependency + +Currently, the library is available as a maven artifact. To add the dependency to your project, add the following to +your `pom.xml` file: + +```xml + +<dependency> + <groupId>ch.cern.nile</groupId> + <artifactId>nile-common</artifactId> + <version>1.0.0</version> +</dependency> +``` + +Since this library is not yet available on Maven Central, you will need to also set up the registry in your `pom.xml` +file: + +```xml +<repositories> + <repository> + <id>gitlab-maven</id> + <url>https://gitlab.cern.ch/api/v4/projects/170995/packages/maven</url> + </repository> +</repositories> + +<distributionManagement> +<repository> + <id>gitlab-maven</id> + <url>https://gitlab.cern.ch/api/v4/projects/170995/packages/maven</url> +</repository> + +<snapshotRepository> + <id>gitlab-maven</id> + <url>https://gitlab.cern.ch/api/v4/projects/170995/packages/maven</url> +</snapshotRepository> +</distributionManagement> +``` + +## Basic Usage + +### Extending AbstractStream + +Extend the AbstractStream class to implement your custom streaming logic. This involves defining the stream processing +steps within the createTopology method. + +```java +package com.example.streams; + +import ch.cern.nile.common.streams.AbstractStream; + +import org.apache.kafka.streams.StreamsBuilder; +import org.apache.kafka.streams.kstream.KStream; + +public class MyCustomStream extends AbstractStream { + + public MyCustomStream(String sourceTopic, String sinkTopic) { + super(sourceTopic, sinkTopic); + } + + @Override + public void createTopology(StreamsBuilder builder) { + // Define your custom stream processing logic + builder.stream(sourceTopic, Consumed.with(Serdes.String(), new JsonSerde())) + .filter(StreamUtils::filterRecord) + .transformValues(InjectOffsetTransformer::new) + .mapValues(value -> { /* your transformation logic */ }) + .filter(StreamUtils::filterNull).to(sinkTopic); + } + + + @Override + public Map<String, Object> enrichCustomFunction(Map<String, Object> map, JsonObject value) { + // Optional: Define your custom enrichment logic + // In this example, we are adding usage of the schema injector + return SchemaInjector.inject(map); + } +} +``` + +## Support & Contact + +For support, questions, or feedback, please contact [Nile Support](mailto:nile-support.cern.ch). \ No newline at end of file -- GitLab From 8fca432c57fda58f8501096f48a1be49945ab9cc Mon Sep 17 00:00:00 2001 From: Konstantinos Dalianis <dean.dalianis@cern.ch> Date: Tue, 30 Jan 2024 11:51:21 +0100 Subject: [PATCH 08/15] AbstractStream: createTopology is now public --- src/main/java/ch/cern/nile/common/streams/AbstractStream.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java index 1e8ff07..2b3d005 100644 --- a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java +++ b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java @@ -87,7 +87,7 @@ public abstract class AbstractStream implements Streaming { * * @param builder the {@link StreamsBuilder} to use to create the topology */ - protected abstract void createTopology(StreamsBuilder builder); + public abstract void createTopology(StreamsBuilder builder); /** * Use this method to log any exceptions that occur while streaming. -- GitLab From 7b003259a6f23dcccb4e30d2e387ac00c272c0d7 Mon Sep 17 00:00:00 2001 From: Dean Dalianis <dean.dalianis@cern.ch> Date: Tue, 30 Jan 2024 15:13:55 +0100 Subject: [PATCH 09/15] AbstractStream: Changed get/set visibility to PROTECTED --- .../ch/cern/nile/common/streams/AbstractStream.java | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java index 2b3d005..4fb492c 100644 --- a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java +++ b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java @@ -16,6 +16,7 @@ import ch.cern.nile.common.exceptions.StreamingException; import ch.cern.nile.common.probes.Health; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import lombok.AccessLevel; import lombok.Getter; import lombok.Setter; @@ -36,15 +37,15 @@ public abstract class AbstractStream implements Streaming { private static final Logger LOGGER = LoggerFactory.getLogger(AbstractStream.class); - @Getter - @Setter + @Getter(AccessLevel.PROTECTED) + @Setter(AccessLevel.PROTECTED) private String sourceTopic; - @Getter - @Setter + @Getter(AccessLevel.PROTECTED) + @Setter(AccessLevel.PROTECTED) private String sinkTopic; - @Setter + @Setter(AccessLevel.PROTECTED) private long lastReadOffset = -2; private Properties properties; -- GitLab From 0c0d5f2b3afb651c1b8302de13b010fc98f973c7 Mon Sep 17 00:00:00 2001 From: Dean Dalianis <dean.dalianis@cern.ch> Date: Tue, 30 Jan 2024 15:55:31 +0100 Subject: [PATCH 10/15] AbstractStream: New constructor(s) --- .../nile/common/streams/AbstractStream.java | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java index 4fb492c..8fe5e02 100644 --- a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java +++ b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java @@ -38,11 +38,9 @@ public abstract class AbstractStream implements Streaming { private static final Logger LOGGER = LoggerFactory.getLogger(AbstractStream.class); @Getter(AccessLevel.PROTECTED) - @Setter(AccessLevel.PROTECTED) private String sourceTopic; @Getter(AccessLevel.PROTECTED) - @Setter(AccessLevel.PROTECTED) private String sinkTopic; @Setter(AccessLevel.PROTECTED) @@ -53,6 +51,25 @@ public abstract class AbstractStream implements Streaming { private Health health; private CountDownLatch latch; + /** + * Default constructor for AbstractStream. + */ + protected AbstractStream() { + // Default constructor + } + + /** + * Creates a new AbstractStream with the provided topics. + * Used for testing. + * + * @param sourceTopic the topic to read from + * @param sinkTopic the topic to write to + */ + protected AbstractStream(final String sourceTopic, final String sinkTopic) { + this.sourceTopic = sourceTopic; + this.sinkTopic = sinkTopic; + } + /** * Configures the Kafka Streams application with provided settings. * -- GitLab From 3acb8115faae72fe51d310063252d93448f5e5c6 Mon Sep 17 00:00:00 2001 From: Konstantinos Dalianis <dean.dalianis@cern.ch> Date: Wed, 31 Jan 2024 11:58:42 +0100 Subject: [PATCH 11/15] SchemaInjector bug fix: removed forced toString for non JsonPrimitive values --- .../java/ch/cern/nile/common/schema/SchemaInjector.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java b/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java index b4c3b4b..7de580b 100644 --- a/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java +++ b/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java @@ -1,5 +1,6 @@ package ch.cern.nile.common.schema; +import com.google.gson.JsonPrimitive; import java.util.Date; import java.util.HashMap; import java.util.Locale; @@ -48,9 +49,13 @@ public final class SchemaInjector { private static Map<String, Object> generateFieldMap(final Map.Entry<String, Object> entry) { final Map<String, Object> fieldMap = new HashMap<>(); final String key = entry.getKey(); - final Object value = entry.getValue(); + Object value = entry.getValue(); validateValue(value); + if (value instanceof JsonPrimitive) { + // TODO: test this quick bugfix further + value = ((JsonPrimitive) value).getAsString(); + } final JsonType type = JsonType.fromClass(value.getClass()); -- GitLab From 0d088eba8ae8551118a79728c71d015290be0159 Mon Sep 17 00:00:00 2001 From: Konstantinos Dalianis <dean.dalianis@cern.ch> Date: Thu, 1 Feb 2024 10:57:52 +0100 Subject: [PATCH 12/15] Timestamp: when not present in the message's gateways, now() is used --- .../cern/nile/common/streams/StreamUtils.java | 176 +++++++++--------- 1 file changed, 89 insertions(+), 87 deletions(-) diff --git a/src/main/java/ch/cern/nile/common/streams/StreamUtils.java b/src/main/java/ch/cern/nile/common/streams/StreamUtils.java index 37091ed..5aeac35 100644 --- a/src/main/java/ch/cern/nile/common/streams/StreamUtils.java +++ b/src/main/java/ch/cern/nile/common/streams/StreamUtils.java @@ -1,106 +1,108 @@ package ch.cern.nile.common.streams; -import java.time.Instant; -import java.util.List; -import java.util.Map; - +import ch.cern.nile.common.exceptions.DecodingException; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; - -import ch.cern.nile.common.exceptions.DecodingException; +import java.time.Instant; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * {@link StreamUtils} is a utility class providing static methods to assist in stream processing. */ public final class StreamUtils { - private StreamUtils() { + private static final Logger LOGGER = LoggerFactory.getLogger(StreamUtils.class); + + private StreamUtils() { + } + + /** + * Adds the most recent timestamp found in the gatewayInfo JsonArray to the provided map. + * The timestamp is added as an epoch millisecond value under the key "timestamp". + * + * @param gatewayInfo the JsonArray containing gateway information, each entry expected to + * have a "time" field with an ISO-8601 formatted timestamp + * @param map the map to which the most recent timestamp will be added + * @throws DecodingException if no valid timestamp is found in the gatewayInfo + */ + public static void addTimestamp(final JsonArray gatewayInfo, final Map<String, Object> map) { + final String timeKey = "time"; + Instant timestamp = null; + for (final JsonElement element : gatewayInfo) { + if (!element.isJsonObject()) { + continue; + } + + final JsonObject entry = element.getAsJsonObject(); + if (!entry.has(timeKey)) { + continue; + } + + final Instant currentTimestamp = Instant.parse(entry.get(timeKey).getAsString()); + if (timestamp == null || currentTimestamp.isAfter(timestamp)) { + timestamp = currentTimestamp; + } } - /** - * Adds the most recent timestamp found in the gatewayInfo JsonArray to the provided map. - * The timestamp is added as an epoch millisecond value under the key "timestamp". - * - * @param gatewayInfo the JsonArray containing gateway information, each entry expected to - * have a "time" field with an ISO-8601 formatted timestamp - * @param map the map to which the most recent timestamp will be added - * @throws DecodingException if no valid timestamp is found in the gatewayInfo - */ - public static void addTimestamp(final JsonArray gatewayInfo, final Map<String, Object> map) { - final String timeKey = "time"; - - Instant mostRecentTimestamp = null; - for (final JsonElement element : gatewayInfo) { - if (!element.isJsonObject()) { - continue; - } - - final JsonObject entry = element.getAsJsonObject(); - if (!entry.has(timeKey)) { - continue; - } - - final Instant currentTimestamp = Instant.parse(entry.get(timeKey).getAsString()); - if (mostRecentTimestamp == null || currentTimestamp.isAfter(mostRecentTimestamp)) { - mostRecentTimestamp = currentTimestamp; - } - } - - if (mostRecentTimestamp == null) { - throw new DecodingException("No timestamp found in gateway info."); - } - - map.put("timestamp", mostRecentTimestamp.toEpochMilli()); + if (timestamp == null) { + LOGGER.warn(String.format("No '%s' field found in gateway info, adding current timestamp.", timeKey)); + timestamp = Instant.now(); } - /** - * Filters out null values. - * - * @param ignored ignored parameter (unused in current implementation) - * @param value the value to be checked for null - * @return true if the value is not null, false otherwise - */ - public static boolean filterNull(final String ignored, final Object value) { - return value != null; + map.put("timestamp", timestamp.toEpochMilli()); + } + + /** + * Filters out null values. + * + * @param ignored ignored parameter (unused in current implementation) + * @param value the value to be checked for null + * @return true if the value is not null, false otherwise + */ + public static boolean filterNull(final String ignored, final Object value) { + return value != null; + } + + /** + * Filters out empty lists and maps. + * Returns true if the value is neither an empty list nor an empty map, otherwise false. + * <p> + * This method is useful in stream processing scenarios where empty collections (lists or maps) are considered + * irrelevant or need to be filtered out. + * + * @param ignored ignored parameter (unused in current implementation) + * @param value the value to be checked, expected to be a List or Map + * @return true if the value is not an empty list or map, false otherwise + */ + public static boolean filterEmpty(final String ignored, final Object value) { + boolean isNotEmpty = true; + + if (value instanceof List) { + isNotEmpty = !((List<?>) value).isEmpty(); + } else if (value instanceof Map) { + isNotEmpty = !((Map<?, ?>) value).isEmpty(); } - /** - * Filters out empty lists and maps. - * Returns true if the value is neither an empty list nor an empty map, otherwise false. - * <p> - * This method is useful in stream processing scenarios where empty collections (lists or maps) are considered - * irrelevant or need to be filtered out. - * - * @param ignored ignored parameter (unused in current implementation) - * @param value the value to be checked, expected to be a List or Map - * @return true if the value is not an empty list or map, false otherwise - */ - public static boolean filterEmpty(final String ignored, final Object value) { - boolean isNotEmpty = true; - - if (value instanceof List) { - isNotEmpty = !((List<?>) value).isEmpty(); - } else if (value instanceof Map) { - isNotEmpty = !((Map<?, ?>) value).isEmpty(); - } - - return isNotEmpty; - } - - /** - * Filters records based on the presence of required fields in a JsonObject. - * Returns true if the JsonObject contains all required fields ("applicationID", "applicationName", - * "deviceName", "devEUI", and "data"), otherwise false. - * - * @param ignored ignored parameter (unused in current implementation) - * @param value the JsonObject to be checked for required fields - * @return true if all required fields are present, false otherwise - */ - public static boolean filterRecord(final String ignored, final JsonObject value) { - return value != null && value.get("applicationID") != null && value.get("applicationName") != null - && value.get("deviceName") != null && value.get("devEUI") != null - && value.get("data") != null; - } + return isNotEmpty; + } + + /** + * Filters records based on the presence of required fields in a JsonObject. + * Returns true if the JsonObject contains all required fields ("applicationID", "applicationName", + * "deviceName", "devEUI", and "data"), otherwise false. + * + * @param ignored ignored parameter (unused in current implementation) + * @param value the JsonObject to be checked for required fields + * @return true if all required fields are present, false otherwise + */ + public static boolean filterRecord(final String ignored, final JsonObject value) { + return value != null && value.get("applicationID") != null && value.get("applicationName") != null + && value.get("deviceName") != null && value.get("devEUI") != null + && value.get("data") != null; + } } -- GitLab From 851f2e292a289593a6125e0e212fc060c1e2ea9a Mon Sep 17 00:00:00 2001 From: Konstantinos Dalianis <dean.dalianis@cern.ch> Date: Thu, 1 Feb 2024 11:18:26 +0100 Subject: [PATCH 13/15] Checkstyle fixes - Gateway missing timestamp test fix --- .../nile/common/schema/SchemaInjector.java | 5 +- .../cern/nile/common/streams/StreamUtils.java | 179 +++++++++--------- .../nile/common/streams/StreamUtilsTest.java | 8 +- 3 files changed, 98 insertions(+), 94 deletions(-) diff --git a/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java b/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java index 7de580b..f6e1d07 100644 --- a/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java +++ b/src/main/java/ch/cern/nile/common/schema/SchemaInjector.java @@ -1,12 +1,13 @@ package ch.cern.nile.common.schema; -import com.google.gson.JsonPrimitive; import java.util.Date; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; +import com.google.gson.JsonPrimitive; + /** * Utility class for injecting Connect schemas into given data. The class provides static methods * to generate a schema based on the data types present in a map and inject this schema into the data. @@ -53,7 +54,7 @@ public final class SchemaInjector { validateValue(value); if (value instanceof JsonPrimitive) { - // TODO: test this quick bugfix further + // TODO(#): test this quick bugfix further value = ((JsonPrimitive) value).getAsString(); } diff --git a/src/main/java/ch/cern/nile/common/streams/StreamUtils.java b/src/main/java/ch/cern/nile/common/streams/StreamUtils.java index 5aeac35..d53e80c 100644 --- a/src/main/java/ch/cern/nile/common/streams/StreamUtils.java +++ b/src/main/java/ch/cern/nile/common/streams/StreamUtils.java @@ -1,108 +1,113 @@ package ch.cern.nile.common.streams; -import ch.cern.nile.common.exceptions.DecodingException; -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; import java.time.Instant; import java.util.List; import java.util.Map; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import ch.cern.nile.common.exceptions.DecodingException; + /** * {@link StreamUtils} is a utility class providing static methods to assist in stream processing. */ public final class StreamUtils { - private static final Logger LOGGER = LoggerFactory.getLogger(StreamUtils.class); - - private StreamUtils() { - } - - /** - * Adds the most recent timestamp found in the gatewayInfo JsonArray to the provided map. - * The timestamp is added as an epoch millisecond value under the key "timestamp". - * - * @param gatewayInfo the JsonArray containing gateway information, each entry expected to - * have a "time" field with an ISO-8601 formatted timestamp - * @param map the map to which the most recent timestamp will be added - * @throws DecodingException if no valid timestamp is found in the gatewayInfo - */ - public static void addTimestamp(final JsonArray gatewayInfo, final Map<String, Object> map) { - final String timeKey = "time"; - Instant timestamp = null; - for (final JsonElement element : gatewayInfo) { - if (!element.isJsonObject()) { - continue; - } - - final JsonObject entry = element.getAsJsonObject(); - if (!entry.has(timeKey)) { - continue; - } - - final Instant currentTimestamp = Instant.parse(entry.get(timeKey).getAsString()); - if (timestamp == null || currentTimestamp.isAfter(timestamp)) { - timestamp = currentTimestamp; - } + private static final Logger LOGGER = LoggerFactory.getLogger(StreamUtils.class); + + private StreamUtils() { + } + + /** + * Adds the most recent timestamp found in the gatewayInfo JsonArray to the provided map. + * The timestamp is added as an epoch millisecond value under the key "timestamp". + * + * @param gatewayInfo the JsonArray containing gateway information, each entry expected to + * have a "time" field with an ISO-8601 formatted timestamp + * @param map the map to which the most recent timestamp will be added + * @throws DecodingException if no valid timestamp is found in the gatewayInfo + */ + public static void addTimestamp(final JsonArray gatewayInfo, final Map<String, Object> map) { + final String timeKey = "time"; + Instant timestamp = null; + for (final JsonElement element : gatewayInfo) { + if (!element.isJsonObject()) { + continue; + } + + final JsonObject entry = element.getAsJsonObject(); + if (!entry.has(timeKey)) { + continue; + } + + final Instant currentTimestamp = Instant.parse(entry.get(timeKey).getAsString()); + if (timestamp == null || currentTimestamp.isAfter(timestamp)) { + timestamp = currentTimestamp; + } + } + + if (timestamp == null) { + if (LOGGER.isWarnEnabled()) { + LOGGER.warn("No valid {} found in gatewayInfo: {}", timeKey, gatewayInfo); + } + timestamp = Instant.now(); + } + + map.put("timestamp", timestamp.toEpochMilli()); } - if (timestamp == null) { - LOGGER.warn(String.format("No '%s' field found in gateway info, adding current timestamp.", timeKey)); - timestamp = Instant.now(); + /** + * Filters out null values. + * + * @param ignored ignored parameter (unused in current implementation) + * @param value the value to be checked for null + * @return true if the value is not null, false otherwise + */ + public static boolean filterNull(final String ignored, final Object value) { + return value != null; } - map.put("timestamp", timestamp.toEpochMilli()); - } - - /** - * Filters out null values. - * - * @param ignored ignored parameter (unused in current implementation) - * @param value the value to be checked for null - * @return true if the value is not null, false otherwise - */ - public static boolean filterNull(final String ignored, final Object value) { - return value != null; - } - - /** - * Filters out empty lists and maps. - * Returns true if the value is neither an empty list nor an empty map, otherwise false. - * <p> - * This method is useful in stream processing scenarios where empty collections (lists or maps) are considered - * irrelevant or need to be filtered out. - * - * @param ignored ignored parameter (unused in current implementation) - * @param value the value to be checked, expected to be a List or Map - * @return true if the value is not an empty list or map, false otherwise - */ - public static boolean filterEmpty(final String ignored, final Object value) { - boolean isNotEmpty = true; - - if (value instanceof List) { - isNotEmpty = !((List<?>) value).isEmpty(); - } else if (value instanceof Map) { - isNotEmpty = !((Map<?, ?>) value).isEmpty(); + /** + * Filters out empty lists and maps. + * Returns true if the value is neither an empty list nor an empty map, otherwise false. + * <p> + * This method is useful in stream processing scenarios where empty collections (lists or maps) are considered + * irrelevant or need to be filtered out. + * + * @param ignored ignored parameter (unused in current implementation) + * @param value the value to be checked, expected to be a List or Map + * @return true if the value is not an empty list or map, false otherwise + */ + public static boolean filterEmpty(final String ignored, final Object value) { + boolean isNotEmpty = true; + + if (value instanceof List) { + isNotEmpty = !((List<?>) value).isEmpty(); + } else if (value instanceof Map) { + isNotEmpty = !((Map<?, ?>) value).isEmpty(); + } + + return isNotEmpty; } - return isNotEmpty; - } - - /** - * Filters records based on the presence of required fields in a JsonObject. - * Returns true if the JsonObject contains all required fields ("applicationID", "applicationName", - * "deviceName", "devEUI", and "data"), otherwise false. - * - * @param ignored ignored parameter (unused in current implementation) - * @param value the JsonObject to be checked for required fields - * @return true if all required fields are present, false otherwise - */ - public static boolean filterRecord(final String ignored, final JsonObject value) { - return value != null && value.get("applicationID") != null && value.get("applicationName") != null - && value.get("deviceName") != null && value.get("devEUI") != null - && value.get("data") != null; - } + /** + * Filters records based on the presence of required fields in a JsonObject. + * Returns true if the JsonObject contains all required fields ("applicationID", "applicationName", + * "deviceName", "devEUI", and "data"), otherwise false. + * + * @param ignored ignored parameter (unused in current implementation) + * @param value the JsonObject to be checked for required fields + * @return true if all required fields are present, false otherwise + */ + public static boolean filterRecord(final String ignored, final JsonObject value) { + return value != null && value.get("applicationID") != null && value.get("applicationName") != null + && value.get("deviceName") != null && value.get("devEUI") != null + && value.get("data") != null; + } } diff --git a/src/test/java/ch/cern/nile/common/streams/StreamUtilsTest.java b/src/test/java/ch/cern/nile/common/streams/StreamUtilsTest.java index a3fe4b7..cc94c90 100644 --- a/src/test/java/ch/cern/nile/common/streams/StreamUtilsTest.java +++ b/src/test/java/ch/cern/nile/common/streams/StreamUtilsTest.java @@ -1,7 +1,6 @@ package ch.cern.nile.common.streams; import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import java.time.Instant; @@ -14,8 +13,6 @@ import com.google.gson.JsonObject; import org.junit.jupiter.api.Test; -import ch.cern.nile.common.exceptions.DecodingException; - class StreamUtilsTest { private static final String KEY = "key"; @@ -34,11 +31,12 @@ class StreamUtilsTest { } @Test - void givenInvalidGatewayInfo_whenAddingTimestamp_thenDateTimeExceptionThrown() { + void givenMissingAllGatewayTimestamps_whenAddingTimestamp_thenCurrentTimestampIsAdded() { final JsonArray gatewayInfo = new JsonArray(); final Map<String, Object> map = new HashMap<>(); - assertThrows(DecodingException.class, () -> StreamUtils.addTimestamp(gatewayInfo, map), "No exception thrown."); + StreamUtils.addTimestamp(gatewayInfo, map); + assertTrue(map.containsKey("timestamp"), "Timestamp not added to map."); } @Test -- GitLab From 6df5bb1e9a11cc468ff719d53280130aa4bf1216 Mon Sep 17 00:00:00 2001 From: Dean Dalianis <dean.dalianis@cern.ch> Date: Mon, 5 Feb 2024 18:05:49 +0200 Subject: [PATCH 14/15] AbstractStream: Removed redundant constructors and fields --- .../nile/common/streams/AbstractStream.java | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java index 8fe5e02..27ca7de 100644 --- a/src/main/java/ch/cern/nile/common/streams/AbstractStream.java +++ b/src/main/java/ch/cern/nile/common/streams/AbstractStream.java @@ -51,25 +51,6 @@ public abstract class AbstractStream implements Streaming { private Health health; private CountDownLatch latch; - /** - * Default constructor for AbstractStream. - */ - protected AbstractStream() { - // Default constructor - } - - /** - * Creates a new AbstractStream with the provided topics. - * Used for testing. - * - * @param sourceTopic the topic to read from - * @param sinkTopic the topic to write to - */ - protected AbstractStream(final String sourceTopic, final String sinkTopic) { - this.sourceTopic = sourceTopic; - this.sinkTopic = sinkTopic; - } - /** * Configures the Kafka Streams application with provided settings. * -- GitLab From 2b1cf8b0841d7bd7c6188d1c4a0c0de09f4ce39a Mon Sep 17 00:00:00 2001 From: Konstantinos Dalianis <dean.dalianis@cern.ch> Date: Fri, 16 Feb 2024 15:21:41 +0100 Subject: [PATCH 15/15] MissingPropertyException: new constructor --- .../cern/nile/common/exceptions/MissingPropertyException.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/java/ch/cern/nile/common/exceptions/MissingPropertyException.java b/src/main/java/ch/cern/nile/common/exceptions/MissingPropertyException.java index 380bfda..2c1a2cc 100644 --- a/src/main/java/ch/cern/nile/common/exceptions/MissingPropertyException.java +++ b/src/main/java/ch/cern/nile/common/exceptions/MissingPropertyException.java @@ -8,4 +8,8 @@ public class MissingPropertyException extends RuntimeException { super(message); } + public MissingPropertyException(final String message, final Throwable cause) { + super(message, cause); + } + } -- GitLab