diff --git a/.gitignore b/.gitignore index 9c07d4a..ede0f27 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,6 @@ *.class *.log +.bsp +.idea +.metals +target diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc new file mode 100644 index 0000000..5c81c7b --- /dev/null +++ b/CONTRIBUTING.adoc @@ -0,0 +1,49 @@ += Contributing + +== Introduction + +You can contribute to [Kapoeira](https://github.com/lectra-tech/kapoeira) via +[pull requests](https://help.github.com/articles/about-pull-requests/) filed. + +== Signing off each Commit + +As part of filing a pull request we ask you to sign off the +[Developer Certificate of Origin](https://developercertificate.org/) (DCO) in each commit. +Any Pull Request with commits that are not signed off will be reject by the +[DCO check](https://probot.github.io/apps/dco/). + +A DCO is lightweight way for a contributor to confirm that they wrote or otherwise have the right +to submit code or documentation to a project. Simply add `Signed-off-by` as shown in the example below +to indicate that you agree with the DCO. + +Example for a commit message with a sign-off: + +---- + doc(readme.adoc): Align sample code + + Signed-off-by: John Doe +---- + +Git has the `-s` option (lower case) for `commit` that can sign off a commit for you, see example below: + +`$ git commit -s -m 'doc(readme.adoc): Align sample code'` + + +== Git History + +TODO + +- [Ensure that your commit messages will make your mom proud](https://www.robertcooper.me/git-commit-messages) +- [See what is done on ORT](https://github.com/oss-review-toolkit/.github/blob/main/CONTRIBUTING.md#git-history) + + +== Coding Conventions + +TODO +- code organization +- format + + + + +Thank you for reading and happy contributing! diff --git a/README.adoc b/README.adoc new file mode 100644 index 0000000..38579b7 --- /dev/null +++ b/README.adoc @@ -0,0 +1,112 @@ += Kapoeira +:toc: +:sectnums: + +Dockerized Integration test tool for Kafka Environment + + +image::src/main/docs/images/kapoeira.png[] + +image::src/main/docs/images/archi.png[] + +== Context +* Define Gherkin features respecting Kapoeira DSL defined by these link:src/test/resources/features[examples] +* Kafka records can be defined in Gherkin files or with links to external files (json, avro-json, plain...) +* External commands can be called for assertions (with .sh files for example, calling docker tools if needed) + +== How to build? +=== Locally +[source, bash] +---- +./run-local.sh build +---- + +== How to test? + +=== In your IDE +Run/Debug this Scala class : link:src/test/scala/com/lectra/kapoeira/FeaturesTestRunner.scala[FeaturesTestRunner] + +=== With Docker Locally +Coming soon + +== How to use? + +=== Manual Docker command +.Draft +[source, bash] +---- +docker run --rm -ti \ + -v :/features \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -e KAFKA_BOOTSTRAP_SERVER= \ + -e KAFKA_SCHEMA_REGISTRY_URL= \ + -e KAFKA_USER= \ + -e KAFKA_PASSWORD=<****> \ + -e JAAS_AUTHENT= \ + -e LOGGING_LEVEL= \ + -e THREADS=<8 (default) | ... > \ + /kapoeira: +---- + +[NOTE] +==== +* Volume mount on docker.sock only for calling docker command +* KAFKA_SCHEMA_REGISTRY_URL only for AVRO content +==== + +=== Produce & consume a record + +[source, language=gherkin] +---- +include::src/test/resources/features/producer-key-value.feature[] +---- + +=== Produce from a file + +[source, language=gherkin] +---- +include::src/test/resources/features/producer-file-value.feature[] +---- + +=== Specify keys & headers for a record + +[source, language=gherkin] +---- +include::src/test/resources/features/producer-file-key-value.feature[] +---- + +=== Assertions + +[source, language=gherkin] +---- +include::src/test/resources/features/assertions.feature[] +---- + +=== Call functions + +[source, language=gherkin] +---- +include::src/test/resources/features/call-function.feature[] +---- + +=== Call scripts + +[source, language=gherkin] +---- +include::src/test/resources/features/call-external-script.feature[] +---- + +=== Produce & Consume Avro records + +[source, language=gherkin] +---- +include::src/test/resources/features/producer-avro-file-key-value.feature[] +---- + +=== Produce & Consume with batches + +[source, language=gherkin] +---- +include::src/test/resources/features/batch-produce-consume.feature[] +---- + diff --git a/README.md b/README.md deleted file mode 100644 index 6a5b5d0..0000000 --- a/README.md +++ /dev/null @@ -1 +0,0 @@ -# kapoera \ No newline at end of file diff --git a/build.sbt b/build.sbt new file mode 100644 index 0000000..f66e333 --- /dev/null +++ b/build.sbt @@ -0,0 +1,72 @@ +import scala.io.Source + +val projectVersion = { + val versionFile = Source.fromFile("./version.txt") + val version = versionFile.getLines.mkString + versionFile.close() + version +} + +ThisBuild / version := projectVersion +ThisBuild / scalaVersion := "2.13.10" +ThisBuild / organization := "com.lectra.kafka" +ThisBuild / organizationName := "lectra" +ThisBuild / publishConfiguration := publishConfiguration.value.withOverwrite(true) +ThisBuild / publishLocalConfiguration := publishLocalConfiguration.value.withOverwrite(true) + +val zioVersion = "1.0.9" + +lazy val root = (project in file(".")) + .settings( + name := "kapoeira", + // confluent + libraryDependencies += "io.confluent" % "kafka-avro-serializer" % "7.2.2" exclude("javax.ws.rs", "javax.ws.rs-api"), + libraryDependencies += "io.confluent" % "kafka-json-schema-serializer" % "7.2.2" exclude("javax.ws.rs", "javax.ws.rs-api"), + // more libs to include + // https://github.com/confluentinc/schema-registry/blob/master/pom.xml + libraryDependencies += "org.apache.kafka" %% "kafka" % "3.2.3", + libraryDependencies += "io.cucumber" %% "cucumber-scala" % "6.10.2", + libraryDependencies += "io.cucumber" % "cucumber-junit" % "6.10.2", + libraryDependencies += "org.scalatest" %% "scalatest" % "3.2.15", + libraryDependencies += "com.typesafe" % "config" % "1.4.2", + libraryDependencies += "io.gatling" % "gatling-jsonpath" % "3.5.1", + libraryDependencies += "com.lihaoyi" %% "requests" % "0.6.9", + libraryDependencies += "com.lihaoyi" %% "ammonite-ops" % "2.3.8", + libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.2.11" % Runtime, + libraryDependencies += "dev.zio" %% "zio" % zioVersion, + libraryDependencies += "dev.zio" %% "zio-streams" % zioVersion, + // only tests + libraryDependencies += "org.scalamock" %% "scalamock" % "5.1.0" % Test, + libraryDependencies += "org.scalacheck" %% "scalacheck" % "1.14.1" % Test, + libraryDependencies ++= Seq( + "dev.zio" %% "zio-test" % zioVersion % "test", + "dev.zio" %% "zio-test-sbt" % zioVersion % "test" + ), + testFrameworks += new TestFramework("zio.test.sbt.ZTestFramework") + ) + +// assembly config +assembly / assemblyJarName := "kapoeira.jar" +assembly / assemblyMergeStrategy := { + case "module-info.class" => MergeStrategy.discard + case x if x.endsWith("/module-info.class") => MergeStrategy.discard + case "META-INF/io.netty.versions.properties" => MergeStrategy.first + case "kafka/kafka-version.properties" => MergeStrategy.first + case "application.conf" => + new sbtassembly.MergeStrategy { + val name = "reverseConcat" + + def apply( + tempDir: File, + path: String, + files: Seq[File] + ): Either[String, Seq[(File, String)]] = + MergeStrategy.concat(tempDir, path, files.reverse) + } + case "logback.xml" => MergeStrategy.first + case x => + val oldStrategy = (assembly / assemblyMergeStrategy).value + oldStrategy(x) + +} +assembly / mainClass := Some("io.cucumber.core.cli.Main") diff --git a/cc-by-sa-4.0.LICENSE b/cc-by-sa-4.0.LICENSE new file mode 100644 index 0000000..b3f2239 --- /dev/null +++ b/cc-by-sa-4.0.LICENSE @@ -0,0 +1,155 @@ +Creative Commons Attribution-ShareAlike 4.0 International Public License + +By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution-ShareAlike 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions. + +Section 1 – Definitions. + +a. Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image. + +b. Adapter's License means the license You apply to Your Copyright and Similar Rights in Your contributions to Adapted Material in accordance with the terms and conditions of this Public License. + +c. BY-SA Compatible License means a license listed at creativecommons.org/compatiblelicenses, approved by Creative Commons as essentially the equivalent of this Public License. + +d. Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights. + +e. Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements. + +f. Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material. + +g. License Elements means the license attributes listed in the name of a Creative Commons Public License. The License Elements of this Public License are Attribution and ShareAlike. + +h. Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License. + +i. Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license. + +j. Licensor means the individual(s) or entity(ies) granting rights under this Public License. + +k. Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them. + +l. Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world. + +m. You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning. + +Section 2 – Scope. + +a. License grant. + +1. Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to: + +A. reproduce and Share the Licensed Material, in whole or in part; and + +B. produce, reproduce, and Share Adapted Material. + +2. Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions. + +3. Term. The term of this Public License is specified in Section 6(a). + +4. Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material. + +5. Downstream recipients. + +A. Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License. + +B. Additional offer from the Licensor – Adapted Material. Every recipient of Adapted Material from You automatically receives an offer from the Licensor to exercise the Licensed Rights in the Adapted Material under the conditions of the Adapter’s License You apply. + +C. No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material. + +6. No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i). + +b. Other rights. + +1. Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise. + +2. Patent and trademark rights are not licensed under this Public License. + +3. To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties. + +Section 3 – License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the following conditions. + +a. Attribution. + +1. If You Share the Licensed Material (including in modified form), You must: + +A. retain the following if it is supplied by the Licensor with the Licensed Material: + +i. identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated); + +ii. a copyright notice; + +iii. a notice that refers to this Public License; + +iv. a notice that refers to the disclaimer of warranties; + +v. a URI or hyperlink to the Licensed Material to the extent reasonably practicable; + +B. indicate if You modified the Licensed Material and retain an indication of any previous modifications; and + +C. indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License. + +2. You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information. + +3. If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable. + +b. ShareAlike. + +In addition to the conditions in Section 3(a), if You Share Adapted Material You produce, the following conditions also apply. + +1. The Adapter’s License You apply must be a Creative Commons license with the same License Elements, this version or later, or a BY-SA Compatible License. + +2. You must include the text of, or the URI or hyperlink to, the Adapter's License You apply. You may satisfy this condition in any reasonable manner based on the medium, means, and context in which You Share Adapted Material. + +3. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, Adapted Material that restrict exercise of the rights granted under the Adapter's License You apply. + +Section 4 – Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material: + +a. for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database; + +b. if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material, including for purposes of Section 3(b); and + +c. You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights. + +Section 5 – Disclaimer of Warranties and Limitation of Liability. + +a. Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You. + +b. To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You. + +c. The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability. + +Section 6 – Term and Termination. + +a. This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically. + +b. Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates: + +1. automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or + +2. upon express reinstatement by the Licensor. + +For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License. + +c. For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License. + +d. Sections 1, 5, 6, 7, and 8 survive termination of this Public License. + +Section 7 – Other Terms and Conditions. + +a. The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed. + +b. Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License. + +Section 8 – Interpretation. + +a. For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License. + +b. To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions. + +c. No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor. + +d. Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority. \ No newline at end of file diff --git a/env.local b/env.local new file mode 100644 index 0000000..e3ecdc1 --- /dev/null +++ b/env.local @@ -0,0 +1,9 @@ +KAFKA_BOOTSTRAP_SERVER=localhost:9092 +KAFKA_SCHEMA_REGISTRY_URL=localhost:8081 +KAFKA_USER=xxx +KAFKA_PASSWORD=xxx +CONSUMER_TIMOUT_MS=10000 +CONSUMER_MAX_MESSAGES=100 +JAAS_AUTHENT=true +LOGGING_LEVEL=INFO +THREADS=16 diff --git a/project/build.properties b/project/build.properties new file mode 100644 index 0000000..f344c14 --- /dev/null +++ b/project/build.properties @@ -0,0 +1 @@ +sbt.version = 1.8.2 diff --git a/project/plugins.sbt b/project/plugins.sbt new file mode 100644 index 0000000..e68dbe9 --- /dev/null +++ b/project/plugins.sbt @@ -0,0 +1,3 @@ +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.5") +addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.4") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.1.0") \ No newline at end of file diff --git a/src/main/docs/images/archi.png b/src/main/docs/images/archi.png new file mode 100644 index 0000000..e4302c5 Binary files /dev/null and b/src/main/docs/images/archi.png differ diff --git a/src/main/docs/images/art-martial.jpg b/src/main/docs/images/art-martial.jpg new file mode 100644 index 0000000..f42de97 Binary files /dev/null and b/src/main/docs/images/art-martial.jpg differ diff --git a/src/main/docs/images/kapoeira.png b/src/main/docs/images/kapoeira.png new file mode 100644 index 0000000..3f628aa Binary files /dev/null and b/src/main/docs/images/kapoeira.png differ diff --git a/src/main/docs/images/kapoeira.puml b/src/main/docs/images/kapoeira.puml new file mode 100644 index 0000000..32407a7 --- /dev/null +++ b/src/main/docs/images/kapoeira.puml @@ -0,0 +1,33 @@ +@startuml + +skinparam defaultFontSize 20 +skinparam monochrome true + +file Feature <> +note bottom of Feature + GIVEN ... + WHEN ... + THEN ... +end note + +rectangle kapoeira <> #lightgrey { + component StepDefinitions <> + component Runner + + Runner -up-> StepDefinitions : USE +} +note top of kapoeira + Fixed DSL + **NO CODE TO WRITE!** +end note + + +component Execution +note bottom of Execution + **KAFKA calls** +end note + +Feature <- Runner : READ +StepDefinitions -> Execution : CALL + +@enduml \ No newline at end of file diff --git a/src/main/resources/application.conf b/src/main/resources/application.conf new file mode 100644 index 0000000..c71fca3 --- /dev/null +++ b/src/main/resources/application.conf @@ -0,0 +1,11 @@ +kafka { + bootstrap.server = ${KAFKA_BOOTSTRAP_SERVER} + schema.registry.url = ${KAFKA_SCHEMA_REGISTRY_URL} + authent.isjaas = ${JAAS_AUTHENT} + user = ${KAFKA_USER} + password = ${KAFKA_PASSWORD} +} +consumer { + group = ${?CONSUMER_GROUP_ID_PREFIX} + group = "kapoeira" +} \ No newline at end of file diff --git a/src/main/resources/entrypoint.sh b/src/main/resources/entrypoint.sh new file mode 100755 index 0000000..66ca201 --- /dev/null +++ b/src/main/resources/entrypoint.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +java -cp kapoeira.jar io.cucumber.core.cli.Main \ + --threads ${THREADS} \ + --glue com.lectra.kapoeira.glue \ + -p pretty \ + -p json:/reports/kapoeira-report.json \ + -p junit:/reports/kapoeira-report.xml \ + -p html:/reports/kapoeira-report.html "$1" diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml new file mode 100644 index 0000000..7e59ee8 --- /dev/null +++ b/src/main/resources/logback.xml @@ -0,0 +1,20 @@ + + + + + + %msg%n + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/main/scala/com/lectra/kapoeira/Config.scala b/src/main/scala/com/lectra/kapoeira/Config.scala new file mode 100644 index 0000000..e73fe09 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/Config.scala @@ -0,0 +1,19 @@ +package com.lectra.kapoeira + +import java.util.UUID +import com.typesafe.config.ConfigFactory + +import java.net.InetAddress +import scala.util.Try + +object Config { + private val config = ConfigFactory.load() + def uuidTest: String = UUID.randomUUID().toString + val KAFKA_BROKER_LIST: String = config.getString("kafka.bootstrap.server") + val KAFKA_USER: String = config.getString("kafka.user") + val KAFKA_PASSWORD: String = config.getString("kafka.password") + val hostname = Try{InetAddress.getLocalHost.getHostName}.getOrElse("Unknown") + def CONSUMER_GROUP: String = s"${config.getString("consumer.group")}-$hostname-$uuidTest" + val JAAS_AUTHENT: Boolean = config.getBoolean("kafka.authent.isjaas") + val KAFKA_SCHEMA_REGISTRY_URL = config.getString("kafka.schema.registry.url") +} diff --git a/src/main/scala/com/lectra/kapoeira/domain/AssertionContext.scala b/src/main/scala/com/lectra/kapoeira/domain/AssertionContext.scala new file mode 100644 index 0000000..ca560c4 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/domain/AssertionContext.scala @@ -0,0 +1,118 @@ +package com.lectra.kapoeira.domain + +import com.lectra.kapoeira.domain.AssertionContext.{HeadersValue, RecordExtraction, RecordValue} +import com.lectra.kapoeira.exception.AssertException +import com.typesafe.scalalogging.LazyLogging +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.common.header.Headers +import zio.Runtime + +final class AssertionContext( + val whenStepsLive: WhenSteps +) extends LazyLogging { + + private var expectedRecords: List[KeyValueWithAliasesRecord] = _ + private[domain] var expectedRecordByValueAlias: Map[String, KeyValueWithAliasesRecord] = _ + private[domain] var expectedRecordByHeadersAlias: Map[String, KeyValueWithAliasesRecord] = _ + private[domain] var expectedRecordsByTopicByKey: Map[String, Map[String, Seq[KeyValueWithAliasesRecord]]] = _ + private[domain] var consumedRecordsByTopicByKey: Map[String, Map[String, Seq[ConsumerRecord[String, Any]]]] = Map.empty + + private var whenSteps: WhenStep = WhenStep.empty + + def registerWhen( + recordsToSend: List[(Int, List[RecordRead])] + ): Unit = whenSteps = whenStepsLive + .registerWhen(whenSteps, recordsToSend) + + def launchConsumption( + expectedRecords: List[KeyValueWithAliasesRecord] + ) = { + // 1. context + this.expectedRecords = expectedRecords + expectedRecordByValueAlias = expectedRecords + .map(r => (r.valueAlias -> r)) + .toMap + expectedRecordByHeadersAlias = expectedRecords + .flatMap(r => + r.headersAlias.map { h => + if (expectedRecordByValueAlias.contains(h)) + logger.warn( + s"A value alias was already defined for $h ! Expect strange behaviors." + ) + h -> r + } + ) + .toMap + + // 2. transform List[KeyValueRecord] into Map1[topic, Map[key, Seq[KeyValueRecord]]] + expectedRecordsByTopicByKey = expectedRecords + .groupBy(_.topicAlias) + .map { case (k, keyValueRecordList) => + (k, keyValueRecordList.groupBy(_.key)) + } + + // 3. consume by Topic and group by key => Map2[topic, Map[key, Seq[ConsumerRecord]]] + consumedRecordsByTopicByKey = Runtime.default + .unsafeRunSync(whenStepsLive.run(whenSteps, expectedRecords)) + .fold(err => throw err.squash, identity) + } + + def extractConsumedRecordWithAlias( + alias: String + ): Option[RecordExtraction] = { + val extractedValue = expectedRecordByValueAlias + .get(alias) + .toRight(s"Alias $alias not found in value alias context") + .flatMap(r => extract(r, cr => RecordValue(cr.value()))) + val extractedAlias = expectedRecordByHeadersAlias + .get(alias) + .toRight(s"Alias $alias not found in header alias context") + .flatMap(r => extract(r, cr => HeadersValue.make(cr.headers()))) + ((extractedAlias, extractedValue) match { + case (Left(errAlias), Left(errValue)) => Left(s"$errAlias\n$errValue") + case _ => extractedValue.orElse(extractedAlias) + }) + .fold(err => throw new AssertException(err), Some(_)) + } + + private def extract( + record: KeyValueWithAliasesRecord, + f: ConsumerRecord[String, Any] => RecordExtraction + ): Either[String, RecordExtraction] = { + ( + expectedRecordsByTopicByKey.get(record.topicAlias), + consumedRecordsByTopicByKey + .get(record.topicAlias) + ) match { + case (Some(expectedForTopic), Some(consumedForTopic)) => + ( + expectedForTopic.get(record.key), + consumedForTopic.get(record.key) + ) match { + case (Some(expectedForKey), Some(consumedForKey)) => + Right(f(consumedForKey(expectedForKey.indexOf(record)))) + case (Some(_), _) => Left(s"Expected key ${record.key} not found in ${consumedForTopic.keys}") + case (_, _) => + Left(s"Aliases ${record} was not declared in dataTable.") + } + case (_, _) => + Left( + s"Topic alias ${record.topicAlias} was not declared in background." + ) + } + } + + def showConsumedRecords: String = + s"Consumed records : ${consumedRecordsByTopicByKey.toString()}" + +} +object AssertionContext { + sealed trait RecordExtraction + final case class RecordValue(value: Any) extends RecordExtraction + final case class HeadersValue(value: Map[String, Array[Byte]]) extends RecordExtraction + object HeadersValue { + def make(headers: Headers): HeadersValue = HeadersValue( + headers.toArray.map(h => (h.key(), h.value())).toMap + ) + } +} diff --git a/src/main/scala/com/lectra/kapoeira/domain/BackgroundContext.scala b/src/main/scala/com/lectra/kapoeira/domain/BackgroundContext.scala new file mode 100644 index 0000000..67a0e5e --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/domain/BackgroundContext.scala @@ -0,0 +1,54 @@ +package com.lectra.kapoeira.domain + +import com.lectra.kapoeira.domain.Services.{CloseConsumer, OutputConfigFactory, RecordConsumer} +import com.typesafe.scalalogging.LazyLogging +import org.apache.kafka.clients.admin.AdminClient +import org.apache.kafka.clients.consumer.ConsumerRecord + +class BackgroundContext extends LazyLogging { + + var inputTopicConfigs: Map[String, InputTopicConfig] = Map.empty + var outputConfigs: Map[String, OutputConfig] = Map.empty + var variables: Map[String, String] = Map.empty + var subjectConfigs: Map[String, SubjectConfig] = Map.empty + + def addInput(inputTopicConfig: InputTopicConfig) = + this.inputTopicConfigs = this.inputTopicConfigs ++ Map(inputTopicConfig.alias -> inputTopicConfig) + + def addOutput(outputTopicConfig: OutputTopicConfig)(implicit outputConfigFactory: OutputConfigFactory) = + this.outputConfigs = this.outputConfigs ++ Map( + outputTopicConfig.alias -> outputConfigFactory(outputTopicConfig, this.subjectConfigs) + ) + + def addSubject(subjectConfig: SubjectConfig) = + this.subjectConfigs = this.subjectConfigs ++ Map(subjectConfig.alias -> subjectConfig) + + def close()(implicit closeConsumer: CloseConsumer, adminClient: AdminClient) = + try { + this.outputConfigs.values.foreach(outputConfig => closeConsumer(outputConfig, adminClient)) + } catch { + case e: Exception => logger.error(e.getMessage) + } // FIXME + + def consumeTopic(topicAlias: String, keys: Map[String, Int])(implicit + recordConsumer: RecordConsumer + ): Map[String, Seq[ConsumerRecord[String, Any]]] = { + this.outputConfigs.get(topicAlias) match { + case Some(outputConfig) => recordConsumer(outputConfig, keys) + case _ => throw new IllegalArgumentException(s"missing $topicAlias in background") + } + } + + def addVariable(key: String, value: String): Unit = + this.variables = this.variables.updated(key, value) + + def getVariable(key: String): Option[String] = this.variables.get(key) + + def substituteVariablesIn(string: String): String = string match { + case null | "" => string + case _ => + variables.keys.foldLeft(string) { case (acc, key) => + acc.replaceAll(s"\\$$\\{$key\\}", variables.getOrElse(key, "$${key}")) + } + } +} diff --git a/src/main/scala/com/lectra/kapoeira/domain/MergeMaps.scala b/src/main/scala/com/lectra/kapoeira/domain/MergeMaps.scala new file mode 100644 index 0000000..8aaf49d --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/domain/MergeMaps.scala @@ -0,0 +1,29 @@ +package com.lectra.kapoeira.domain + +object MergeMaps { + + trait Associative[T] { + def combine(m1: T, m2: T): T + } + + implicit def associativeSeq[T]: Associative[Seq[T]] = new Associative[Seq[T]] { + override def combine(m1: Seq[T], m2: Seq[T]) = m1.concat(m2) + } + + implicit def associativeList[T]: Associative[List[T]] = new Associative[List[T]] { + override def combine(m1: List[T], m2: List[T]): List[T] = m1.concat(m2) + } + + implicit def associativeMap[K, V: Associative]: Associative[Map[K, V]] = new Associative[Map[K, V]] { + val associativeV = implicitly[Associative[V]] + override def combine(m1: Map[K, V], m2: Map[K, V]) = + m2.foldLeft(m1) { case (acc, (k, v)) => + acc.updated(k, acc.get(k).map(o=>associativeV.combine(o, v)).getOrElse(v)) + } + } + + implicit class AssociativeMergeMapsOps[T: Associative](associative: T) { + def merge(other: T): T = + implicitly[Associative[T]].combine(associative, other) + } +} diff --git a/src/main/scala/com/lectra/kapoeira/domain/Record.scala b/src/main/scala/com/lectra/kapoeira/domain/Record.scala new file mode 100644 index 0000000..77e4a56 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/domain/Record.scala @@ -0,0 +1,43 @@ +package com.lectra.kapoeira.domain + +sealed trait Record { + val topicAlias: String + val separator: String = "#" +} + +final case class KeyValueRecord( + topicAlias: String, + key: String, + value: String, + headers: Map[String, Any] = Map.empty, + batch: Int = 0 +) extends Record + +final case class KeyValueWithAliasesRecord( + topicAlias: String, + key: String, + valueAlias: String, + headersAlias: Option[String] = None, + batch: Int = 0 +) extends Record + +final case class FileKeyValueRecord( + topicAlias: String, + override val separator: String, + file: String, + batch: Int = 0 +) extends Record + +final case class FileValueRecord( + topicAlias: String, + key: String, + file: String, + batch: Int = 0 +) extends Record + +final case class FileFormattedValueRecord( + topicAlias: String, + key: String, + file: String, + batch: Int = 0 +) extends Record diff --git a/src/main/scala/com/lectra/kapoeira/domain/RecordRead.scala b/src/main/scala/com/lectra/kapoeira/domain/RecordRead.scala new file mode 100644 index 0000000..ed0b0d0 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/domain/RecordRead.scala @@ -0,0 +1,173 @@ +package com.lectra.kapoeira.domain + +import com.lectra.kapoeira.domain +import com.lectra.kapoeira.domain.Services.ReadHeaders + +import java.nio.charset.StandardCharsets +import scala.util.Try + +/** The record read from the cucumber Datatable directly, or a File. + */ +final case class RecordRead( + topicAlias: String, + key: String, + value: Array[Byte], + headers: Map[String, Any] +) + +/** Interpolate variables of T with the backgroundContext. + */ +trait Interpolate[T] { + def interpolate(t: T, backgroundContext: BackgroundContext): T +} + +/** Map a T to a list of RecordRead. Used to map domain.Record ADT related to a file. + */ +trait RecordDataFromFile[T] { + def readFromFile(t: T, f: String => List[String]): List[RecordRead] +} + +/** Map a T to a list of RecordRead. Used to map domain.Record ADT related to a values in Datatable. + */ +trait RecordData[T] { + def read(t: T): RecordRead +} + +/** Facilitate import of all implicits. + */ +trait RecordReadImplicits + extends InterpotaleImplicits + with RecordDataImplicits + with RecordDataFromFileImplicits + +trait InterpotaleImplicits { + + implicit val interpolateString: Interpolate[String] = + (t: String, ctx: BackgroundContext) => ctx.substituteVariablesIn(t) + implicit val interpolateRecordRead: Interpolate[RecordRead] = + (t: RecordRead, ctx: BackgroundContext) => { + RecordRead( + ctx + .substituteVariablesIn(t.topicAlias), + ctx + .substituteVariablesIn(t.key), + ctx + .substituteVariablesIn(new String(t.value)) + .getBytes(StandardCharsets.UTF_8), + interpolateMap(t.headers, ctx) + ) + } + + //runtime inspection of iterable to interpolate ... + private def interpolateIterable( + xs: Iterable[Any], + ctx: BackgroundContext + ): Iterable[Any] = + xs.map { + case s: String => ctx.substituteVariablesIn(s) + case m: Map[String, Any] => interpolateMap(m, ctx) + case x: Iterable[Any] => interpolateIterable(x, ctx) + case _ => xs + } + + //runtime inspection of Map to interpolate ... + private def interpolateMap( + map: Map[String, Any], + ctx: BackgroundContext + ): Map[String, Any] = map.map { + case (k, v: String) => + (ctx.substituteVariablesIn(k), ctx.substituteVariablesIn(v)) + case (k, v: Map[String, Any]) => + (ctx.substituteVariablesIn(k), interpolateMap(v, ctx)) + case (k, v: List[Any]) => + (ctx.substituteVariablesIn(k), interpolateIterable(v, ctx)) + case (k, v) => (ctx.substituteVariablesIn(k), v) + } + + implicit class InterpolateOps[T: Interpolate](val t: T) { + def interpolate(backgroundContext: BackgroundContext): T = + implicitly[Interpolate[T]].interpolate(t, backgroundContext) + } +} + +trait RecordDataFromFileImplicits { + + implicit def fileKeyValue(implicit + readHeaders: ReadHeaders[String] + ): RecordDataFromFile[FileKeyValueRecord] = + new RecordDataFromFile[FileKeyValueRecord] { + override def readFromFile( + t: FileKeyValueRecord, + f: String => List[String] + ): List[RecordRead] = { + f(t.file).map(line => { + val columns = line.split(t.separator) + domain.RecordRead( + t.topicAlias, + columns(0), + columns(1).getBytes(StandardCharsets.UTF_8), + Try(columns(2)) + .map(headersString => readHeaders.readHeaders(headersString)) + .getOrElse(Map.empty) + ) + }) + } + } + + implicit val fileValueRecord: RecordDataFromFile[FileValueRecord] = + new RecordDataFromFile[FileValueRecord] { + override def readFromFile( + t: FileValueRecord, + f: String => List[String] + ): List[RecordRead] = f(t.file).map(line => { + RecordRead( + t.topicAlias, + t.key, + line.getBytes(StandardCharsets.UTF_8), + Map.empty + ) + }) + } + + implicit val fileFormattedValue + : RecordDataFromFile[FileFormattedValueRecord] = + new RecordDataFromFile[FileFormattedValueRecord] { + override def readFromFile( + t: FileFormattedValueRecord, + f: String => List[String] + ): List[RecordRead] = { + val line = f(t.file).map(_.trim).mkString + List( + RecordRead( + t.topicAlias, + t.key, + line.getBytes(StandardCharsets.UTF_8), + Map.empty + ) + ) + } + } + + implicit class RecordDataFromFileOps[T: RecordDataFromFile](val t: T) { + def readFromFile(f: String => List[String]): List[RecordRead] = + implicitly[RecordDataFromFile[T]].readFromFile(t, f) + } +} + +trait RecordDataImplicits { + + implicit val keyValue: RecordData[KeyValueRecord] = + new RecordData[KeyValueRecord] { + override def read(t: KeyValueRecord): RecordRead = RecordRead( + t.topicAlias, + t.key, + t.value.getBytes(StandardCharsets.UTF_8), + t.headers + ) + } + + implicit class RecordDataOps[T: RecordData](val t: T) { + def read: RecordRead = implicitly[RecordData[T]].read(t) + } + +} diff --git a/src/main/scala/com/lectra/kapoeira/domain/Services.scala b/src/main/scala/com/lectra/kapoeira/domain/Services.scala new file mode 100644 index 0000000..01cb7ac --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/domain/Services.scala @@ -0,0 +1,43 @@ +package com.lectra.kapoeira.domain + +import org.apache.kafka.clients.admin.AdminClient +import org.apache.kafka.clients.consumer.ConsumerRecord +import zio.Task + +object Services { + + trait OutputConfigFactory { + def apply( + outputTopicConfig: OutputTopicConfig, + subjectConfigs: Map[String, SubjectConfig] + ): OutputConfig + } + + trait RecordConsumer { + def apply( + outputConfig: OutputConfig, + expectedKeys: Map[String, Int] + ): Map[String, Seq[ConsumerRecord[String, Any]]] + } + + trait RecordProducer { + def run( + record: RecordRead, + topicConfig: TopicConfig, + keySubjectConfig: Option[SubjectConfig], + valueSubjectConfig: Option[SubjectConfig] + ): Task[Unit] + } + + trait CloseConsumer { + def apply(outputConfig: OutputConfig, adminClient: AdminClient): Unit + } + + trait FileOpener { + def apply(filePath: String): List[String] + } + + trait ReadHeaders[T] { + def readHeaders(t: T): Map[String, Any] + } +} diff --git a/src/main/scala/com/lectra/kapoeira/domain/TopicConfig.scala b/src/main/scala/com/lectra/kapoeira/domain/TopicConfig.scala new file mode 100644 index 0000000..8aba5a8 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/domain/TopicConfig.scala @@ -0,0 +1,84 @@ +package com.lectra.kapoeira.domain + +import com.fasterxml.jackson.databind.JsonNode +import com.lectra.kapoeira.kafka.DataType +import org.apache.avro.generic.GenericData +import org.apache.kafka.clients.consumer.KafkaConsumer + +sealed trait TopicConfig { + val topicName: String + val alias: String + val keyType: String + val valueType: String + def keyIsAvro: Boolean = keyType.toLowerCase().trim != "string" + def valueIsAvro: Boolean = valueType.toLowerCase().trim != "string" +} + +final case class InputTopicConfig(topicName: String, alias: String, keyType: String, valueType: String) extends TopicConfig + +final case class OutputTopicConfig(topicName: String, alias: String, keyType: String, valueType: String, consumerTimeout: Int = 2) + extends TopicConfig + +sealed trait OutputConfig { + type KeyType + type ValueType + val outputConfig: OutputTopicConfig + val consumer: KafkaConsumer[KeyType, ValueType] +} +final case class OutputConfigStringString(outputConfig: OutputTopicConfig, consumer: KafkaConsumer[String, String]) + extends OutputConfig { + type KeyType = String + type ValueType = String +} +final case class OutputConfigStringAvro(outputConfig: OutputTopicConfig, consumer: KafkaConsumer[String, Any]) + extends OutputConfig { + type KeyType = String + type ValueType = Any +} +final case class OutputConfigAvroString(outputConfig: OutputTopicConfig, consumer: KafkaConsumer[Any, String]) + extends OutputConfig { + type KeyType = Any + type ValueType = String +} +final case class OutputConfigAvroAvro(outputConfig: OutputTopicConfig, consumer: KafkaConsumer[Any, Any]) extends OutputConfig { + type KeyType = Any + type ValueType = Any +} + +final case class OutputConfigStringJson(outputConfig: OutputTopicConfig, consumer: KafkaConsumer[String, JsonNode]) + extends OutputConfig { + type KeyType = String + type ValueType = JsonNode +} + +final case class OutputConfigJsonJson(outputConfig: OutputTopicConfig, consumer: KafkaConsumer[JsonNode, JsonNode]) + extends OutputConfig { + type KeyType = JsonNode + type ValueType = JsonNode +} + +final case class OutputConfigJsonString(outputConfig: OutputTopicConfig, consumer: KafkaConsumer[JsonNode, String]) + extends OutputConfig { + type KeyType = JsonNode + type ValueType = String +} + +final case class SubjectConfig(name: String, alias: String, format: SubjectFormat) + +sealed trait SubjectFormat + +object SubjectFormat { + case object Avro extends SubjectFormat { + override def toString() = "avro" + } + case object Json extends SubjectFormat { + override def toString() = "json" + } + val values = List(Avro, Json) + def parse(format: String) = + format.toLowerCase() match { + case "avro" => Some(Avro) + case "json" => Some(Json) + case _ => None + } +} diff --git a/src/main/scala/com/lectra/kapoeira/domain/WhenSteps.scala b/src/main/scala/com/lectra/kapoeira/domain/WhenSteps.scala new file mode 100644 index 0000000..14c4211 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/domain/WhenSteps.scala @@ -0,0 +1,126 @@ +package com.lectra.kapoeira.domain + +import com.lectra.kapoeira.domain.MergeMaps._ +import com.lectra.kapoeira.domain.Services.{RecordConsumer, RecordProducer} +import com.lectra.kapoeira.domain.WhenSteps._ +import com.lectra.kapoeira.glue.ConsoleTimer +import org.apache.kafka.clients.consumer.ConsumerRecord +import zio._ + +final case class WhenStep(toRun: Map[Int, Task[Unit]]) { + def add(other: Map[Int, Task[Unit]]): WhenStep = WhenStep( + toRun.merge(other) + ) + + def orderedBatchesToRun[V]( + f: (Int, Task[Unit]) => Task[V] + ): Task[Seq[V]] = + ZIO.foreach(toRun.toSeq.sortBy(_._1)) { case (i, u) => f(i, u) } +} +object WhenStep { + def empty: WhenStep = WhenStep(Map(0 -> ZIO.unit)) +} +trait WhenSteps { + def registerWhen( + whenStep: WhenStep, + recordsToSend: List[(Int, List[RecordRead])] + ): WhenStep + def run( + whenStep: WhenStep, + expectedRecords: List[KeyValueWithAliasesRecord] + ): Task[Map[String, Map[String, Seq[ConsumerRecord[String, Any]]]]] +} +object WhenSteps { + implicit val mergeTasks: Associative[Task[Unit]] = + new Associative[Task[Unit]] { + override def combine(m1: Task[Unit], m2: Task[Unit]): Task[Unit] = + m1 *> m2 + } +} + +final case class WhenStepsLive( + backgroundContext: BackgroundContext, + recordConsumer: RecordConsumer, + recordProducer: RecordProducer +) extends WhenSteps { + + override def registerWhen( + whenStep: WhenStep, + recordsToSend: List[(Int, List[RecordRead])] + ): WhenStep = whenStep.add(defineBatches(recordsToSend)) + + override def run( + whenStep: WhenStep, + expectedRecords: List[KeyValueWithAliasesRecord] + ): Task[Map[String, Map[String, Seq[ConsumerRecord[String, Any]]]]] = { + val allKeys = expectedRecords + .groupBy(_.batch) + .map { case (k, keys) => (k, keys.groupBy(_.topicAlias).map { case (t, vs) => (t, vs.size) }) } + + val expectedRecordByBatch: Map[Int, List[KeyValueWithAliasesRecord]] = expectedRecords.groupBy(_.batch) + whenStep + .orderedBatchesToRun { case (batchNumber, toRun) => + toRun *> + ZIO + .foreachPar( + expectedRecordByBatch + .get(batchNumber) + .toList + .flatten + .map(_.topicAlias) + .distinct + ) { topicAlias => + ZIO.effect( + topicAlias -> + allKeys + .get(batchNumber) + .map(keysForBatch => + backgroundContext + .consumeTopic(topicAlias, keysForBatch)(recordConsumer) + ) + .getOrElse(Map.empty) + ) + } + .map(r => r.toMap) + } + .map(_.reduce(_ merge _)) + } + private def defineBatches( + list: List[(Int, List[RecordRead])] + ): Map[Int, Task[Unit]] = + list + .foldLeft(Map.empty[Int, List[RecordRead]]) { case (acc, (batchNum, records)) => + acc.updated( + batchNum, + acc.getOrElse(batchNum, List.empty).concat(records) + ) + } + .map { case (batch, records) => (batch, defineTask(records)) } + + private def defineTask(recordsToSend: List[RecordRead]): Task[Unit] = { + ConsoleTimer.time( + "runProduce", { + ZIO + .foreach(recordsToSend) { record => + backgroundContext.inputTopicConfigs.get(record.topicAlias) match { + case Some(topicConfig) => + recordProducer.run( + record, + topicConfig, + backgroundContext.subjectConfigs.get(topicConfig.keyType), + backgroundContext.subjectConfigs.get(topicConfig.valueType) + ) + case None => + ZIO.fail( + new IllegalArgumentException( + s"missing ${record.topicAlias} in background" + ) + ) + } + } + .unit + } + ) + } + +} diff --git a/src/main/scala/com/lectra/kapoeira/domain/functions/DefaultFunctionRepository.scala b/src/main/scala/com/lectra/kapoeira/domain/functions/DefaultFunctionRepository.scala new file mode 100644 index 0000000..651dabd --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/domain/functions/DefaultFunctionRepository.scala @@ -0,0 +1,37 @@ +package com.lectra.kapoeira.domain.functions + +import java.util.{HexFormat, UUID} +import com.typesafe.scalalogging.LazyLogging + +import java.lang.Thread.sleep +import java.nio.charset.StandardCharsets +import java.security.MessageDigest +import java.time.OffsetDateTime + +object DefaultFunctionRepository extends FunctionRepository with LazyLogging { + + def functions: Map[String, Func] = Map( + "uuid" -> { _ => UUID.randomUUID().toString }, + "uppercase" -> { args => args.head.toUpperCase() }, + "lowercase" -> { args => args.head.toLowerCase() }, + "print" -> { args => logger.info(args.mkString(" ")) }, + "now" -> { _ => OffsetDateTime.now().toString }, + "sleep" -> { args => sleep(args.head.toLong) }, + "sha256" -> { sha256 _ }, + "sha1" -> { sha1 _ } + ) + + def sha256(args: Array[String]): String = { + val messageDigest = MessageDigest.getInstance("SHA-256") + val input = args.mkString(" ") + messageDigest.update(input.getBytes(StandardCharsets.UTF_8), 0, input.length) + HexFormat.of().formatHex(messageDigest.digest()) + } + + def sha1(args: Array[String]): String = { + val messageDigest = MessageDigest.getInstance("SHA-1") + val input = args.mkString(" ") + messageDigest.update(input.getBytes(StandardCharsets.UTF_8), 0, input.length) + HexFormat.of().formatHex(messageDigest.digest()) + } +} diff --git a/src/main/scala/com/lectra/kapoeira/domain/functions/Func.scala b/src/main/scala/com/lectra/kapoeira/domain/functions/Func.scala new file mode 100644 index 0000000..8e179c8 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/domain/functions/Func.scala @@ -0,0 +1,5 @@ +package com.lectra.kapoeira.domain.functions + +trait Func { + def apply(args: Array[String] = Array()) : Any +} diff --git a/src/main/scala/com/lectra/kapoeira/domain/functions/FunctionRepository.scala b/src/main/scala/com/lectra/kapoeira/domain/functions/FunctionRepository.scala new file mode 100644 index 0000000..594c917 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/domain/functions/FunctionRepository.scala @@ -0,0 +1,9 @@ +package com.lectra.kapoeira.domain.functions + +trait FunctionRepository { + def functions: Map[String, Func] + + def unapply(identifier: String): Option[Func] = { + functions.get(identifier) + } +} diff --git a/src/main/scala/com/lectra/kapoeira/exception/AssertException.scala b/src/main/scala/com/lectra/kapoeira/exception/AssertException.scala new file mode 100644 index 0000000..88faf1d --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/exception/AssertException.scala @@ -0,0 +1,3 @@ +package com.lectra.kapoeira.exception + +case class AssertException(msg: String) extends Exception(msg) diff --git a/src/main/scala/com/lectra/kapoeira/glue/Asserts.scala b/src/main/scala/com/lectra/kapoeira/glue/Asserts.scala new file mode 100644 index 0000000..854fccb --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/glue/Asserts.scala @@ -0,0 +1,185 @@ +package com.lectra.kapoeira.glue + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.{JsonNodeType, TextNode} +import com.lectra.kapoeira.domain.AssertionContext +import com.lectra.kapoeira.domain.AssertionContext.{HeadersValue, RecordValue} +import com.lectra.kapoeira.exception.AssertException +import com.typesafe.scalalogging.LazyLogging +import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils +import io.gatling.jsonpath.{JPError, JsonPath} +import org.scalatest.Assertion +import org.scalatest.Assertions._ +import org.scalatest.matchers.should.Matchers + +import scala.jdk.CollectionConverters._ +import scala.util.Try + +object Asserts extends Matchers with LazyLogging { + def equal( + assertionContext: AssertionContext, + alias: String, + jsonExpression: String, + expected: String + ): Assertion = assertKafkaOutput( + assertionContext, + alias, + jsonExpression, + { actual => assert(actual == JsonExpr(expected).value) } + ) + + def matchObject( + assertionContext: AssertionContext, + alias: String, + jsonExpression: String, + expected: String + ): Assertion = + assertKafkaOutput( + assertionContext, + alias, + jsonExpression, + { actual => + val actualSet = actual.toMap.toSet + val expectedSet = JsonExpr(expected).value.toMap.toSet + assert(expectedSet.intersect(actualSet) == expectedSet,s"Actual json $actual does not match $expected") + } + ) + + def matchExactObject( + assertionContext: AssertionContext, + alias: String, + jsonExpression: String, + expected: String + ): Assertion = + assertKafkaOutput( + assertionContext, + alias, + jsonExpression, + { actual => + val actualSet = actual.toMap.toSet + val expectedSet = JsonExpr(expected).value.toMap.toSet + assert(actualSet == expectedSet, s"Actual json $actual does not match $expected") + } + ) + + def assertKafkaOutput( + assertionContext: AssertionContext, + alias: String, + jsonExpression: String, + assertion: JsonNode => Assertion + ): Assertion = { + + assertionContext.extractConsumedRecordWithAlias(alias) match { + case Some(RecordValue(consumed)) => + val eventualJson = consumed match { + case bytes: Array[Byte] => new String(bytes).trim + case aString: String => aString + case value => value.toString.trim + } + val stringValue = + if (eventualJson.startsWith("{") || eventualJson.startsWith("[")) + eventualJson + else new String(AvroSchemaUtils.toJson(consumed)) + assertJson(jsonExpression, assertion, stringValue) + case Some(HeadersValue(headers)) => + Try { headers.map { case (k, v) => (k , new String(v)) } } + .flatMap(hm => Try { objectMapper.valueToTree[JsonNode](hm.asJava) }) + .map(jsonValue => assertJson(jsonExpression, assertion, jsonValue)) + .fold( + err => + throw AssertException( + s"Error when assert on $alias : ${err.getMessage}" + ), + identity + ) + case _ => + throw AssertException(s"Alias $alias not found in assertion context.") + } + + } + + def assertJson( + jsonExpression: String, + assertion: JsonNode => Assertion, + actual: String + ): Assertion = { + val jsonActual = Try { + objectMapper.readTree(actual) + } + .fold(_ => new TextNode(actual), identity) + assertJson(jsonExpression, assertion, jsonActual) + } + + def assertJson( + jsonExpression: String, + assertion: JsonNode => Assertion, + jsonActual: JsonNode + ): Assertion = { + logger.info(s"""jsonActual: [$jsonActual]""") + val jsonValue: Either[JPError, Iterator[JsonNode]] = + JsonPath.query(jsonExpression, jsonActual) + jsonValue match { + case Right(value) => + val found = + value.next() + logger.info(s"""jsonFound: [$found]""") + assertion(found) + case Left(x) => + logger.error("stringValue not found"); + throw new IllegalStateException(x.toString) + } + } + + implicit class JsonNodeOps(val jsonNode: JsonNode) { + + def fold[T]( + obj: JsonNode => T, + arr: JsonNode => T, + number: Double => T, + strng: String => T, + bln: Boolean => T, + bin: Array[Byte] => T, + nullOrUndef: JsonNode => T + ): T = jsonNode.getNodeType match { + case JsonNodeType.ARRAY => arr(jsonNode) + case JsonNodeType.NUMBER => number(jsonNode.doubleValue()) + case JsonNodeType.STRING => strng(jsonNode.asText()) + case JsonNodeType.BOOLEAN => bln(jsonNode.asBoolean()) + case JsonNodeType.BOOLEAN => bln(jsonNode.asBoolean()) + case JsonNodeType.BINARY => bin(jsonNode.binaryValue()) + case JsonNodeType.MISSING | JsonNodeType.NULL => nullOrUndef(jsonNode) + case JsonNodeType.OBJECT | JsonNodeType.POJO => obj(jsonNode) + } + + def recFold: Any = jsonNode.fold( + obj => + obj + .fields() + .asScala + .map(entry => (entry.getKey, entry.getValue.recFold)) + .toMap, + arr => arr.elements().asScala.map(_.recFold).toSeq, + identity, + identity, + identity, + identity, + identity + ) + + def toMap: Map[String, Any] = { + assert( + jsonNode.isObject, + s"Trying to convert ${jsonNode} to Map[String,Any]" + ) + recFold.asInstanceOf[Map[String, Any]] + } + + } + + final case class JsonExpr(json: String) { + def value: JsonNode = + Try { objectMapper.readTree(json) } + .getOrElse(new TextNode(json)) + } + +} diff --git a/src/main/scala/com/lectra/kapoeira/glue/DataTableParser.scala b/src/main/scala/com/lectra/kapoeira/glue/DataTableParser.scala new file mode 100644 index 0000000..781de47 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/glue/DataTableParser.scala @@ -0,0 +1,143 @@ +package com.lectra.kapoeira.glue + +import com.lectra.kapoeira.domain.{ + FileFormattedValueRecord, + FileKeyValueRecord, + FileValueRecord, + InputTopicConfig, + KeyValueRecord, + KeyValueWithAliasesRecord, + OutputTopicConfig, + SubjectConfig, + SubjectFormat +} +import com.typesafe.scalalogging.LazyLogging +import io.cucumber.datatable.DataTable + +import scala.jdk.CollectionConverters._ +import scala.util.{Failure, Success, Try} + +object DataTableParser extends LazyLogging { + + def parseInputTopicDataTable(records: DataTable): List[InputTopicConfig] = { + records + .asMaps() + .asScala + .map(_.asScala) + .map(row => InputTopicConfig(row("topic"), row("alias"), row("key_type"), row("value_type"))) + .toList + } + + def parseOutputTopicDataTable(records: DataTable): List[OutputTopicConfig] = { + records + .asMaps() + .asScala + .map(_.asScala) + .map(row => { + Try(row("readTimeoutInSecond").toInt) match { + case Success(timeout) => OutputTopicConfig(row("topic"), row("alias"), row("key_type"), row("value_type"), timeout) + case Failure(e) => + logger.error(e.getMessage) + OutputTopicConfig(row("topic"), row("alias"), row("key_type"), row("value_type")) + } + }) + .toList + } + + def parseSubjectDataTable(records: DataTable): List[SubjectConfig] = { + records + .asMaps() + .asScala + .map(_.asScala) + .map(row => { + val rowFormat = row("format") + val subjectFormat = SubjectFormat + .parse(rowFormat) + .fold { + val message = s"format is not known : $rowFormat, valid formats are ${SubjectFormat.values}" + logger.error(message) + throw new IllegalArgumentException(message) + }(identity) + SubjectConfig(row("name"), row("alias"), subjectFormat) + }) + .toList + } + + def parseKeyValueDataTable(records: DataTable): List[KeyValueRecord] = { + records + .asMaps() + .asScala + .map(_.asScala) + .map(row => + KeyValueRecord( + row("topic_alias"), + row("key"), + row("value"), + row.getOrElse("headers", "").readHeaders, + row.view.mapValues(Integer.parseInt).getOrElse("batch", 0) + ) + ) + .toList + } + + def parseKeyValueAliasesDataTable(records: DataTable): List[KeyValueWithAliasesRecord] = { + records + .asMaps() + .asScala + .map(_.asScala) + .map(row => + KeyValueWithAliasesRecord( + row("topic_alias"), + row("key"), + row("value"), + row.get("headers"), + row.view.mapValues(Integer.parseInt).getOrElse("batch", 0) + ) + ) + .toList + } + + def parseFileKeyValueDataTable(records: DataTable): List[FileKeyValueRecord] = { + records + .asMaps() + .asScala + .map(_.asScala) + .map(row => + FileKeyValueRecord( + row("topic_alias"), + row("separator"), + row("file"), + row.view.mapValues(Integer.parseInt).getOrElse("batch", 0) + ) + ) + .toList + } + + def parseFileValueDataTable(records: DataTable): List[FileValueRecord] = { + records + .asMaps() + .asScala + .map(_.asScala) + .map(row => + FileValueRecord(row("topic_alias"), row("key"), row("file"), row.view.mapValues(Integer.parseInt).getOrElse("batch", 0)) + ) + .toList + } + + def parseFileFormattedValueDataTable(records: DataTable): List[FileFormattedValueRecord] = { + records + .asMaps() + .asScala + .map(_.asScala) + .map(row => + FileFormattedValueRecord( + row("topic_alias"), + row("key"), + row("file"), + row.view.mapValues(Integer.parseInt).getOrElse("batch", 0) + ) + ) + .toList + } + +} diff --git a/src/main/scala/com/lectra/kapoeira/glue/FeaturesStepDefinitions.scala b/src/main/scala/com/lectra/kapoeira/glue/FeaturesStepDefinitions.scala new file mode 100644 index 0000000..81ba5a8 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/glue/FeaturesStepDefinitions.scala @@ -0,0 +1,302 @@ +package com.lectra.kapoeira.glue + +import ammonite.ops.ImplicitWd._ +import ammonite.ops._ +import com.lectra.kapoeira.domain.functions.DefaultFunctionRepository +import com.lectra.kapoeira.domain.{AssertionContext, BackgroundContext, WhenStepsLive} +import com.lectra.kapoeira.glue.Asserts.{JsonExpr, JsonNodeOps} +import com.lectra.kapoeira.glue.DataTableParser._ +import com.lectra.kapoeira.kafka.KapoeiraProducer +import com.typesafe.scalalogging.LazyLogging +import io.cucumber.datatable.DataTable +import io.cucumber.scala.{EN, ScalaDsl} +import org.scalatest.matchers.should.Matchers +import org.scalatest.Assertions._ + +import scala.util.{Failure, Success, Try} + +class FeaturesStepDefinitions + extends ScalaDsl + with EN + with Matchers + with LazyLogging { + + private implicit val backgroundContext: BackgroundContext = + new BackgroundContext() + + private val assertionContext = + new AssertionContext(WhenStepsLive(backgroundContext,kafkaConsume,KapoeiraProducer.run _)) + private val defaultFuncRepository = DefaultFunctionRepository + private val functionManger = new FunctionManager(defaultFuncRepository) + + // Background + Given("^input\\s+topic$") { data: DataTable => + ConsoleTimer.time( + "inputTopic", { + parseInputTopicDataTable(data).foreach(backgroundContext.addInput) + } + ) + } + + Given("^output\\s+topic$") { data: DataTable => + ConsoleTimer.time( + "outputTopic", { + parseOutputTopicDataTable(data).foreach(backgroundContext.addOutput) + } + ) + } + + // STATIC VAR + Given( + "^var\\s+(.*)\\s+=\\s+(?!call\\s+function\\s*:|call\\s+script\\s*:)(.*)$" + ) { (variableName: String, postEqualDefinition: String) => + backgroundContext.addVariable(variableName, postEqualDefinition) + } + + // FUNCTION + Given("^var\\s+(.*)\\s+=\\s+call\\s+function\\s*:\\s*(\\S+)\\s*(.*)$") { + (variableName: String, functionDefinition: String, params: String) => + { + functionManger(variableName, functionDefinition, backgroundContext.substituteVariablesIn(params)) + .fold(fail(_), identity) + } + } + + // SUBJECT + Given("^subject$") { data: DataTable => + parseSubjectDataTable(data).foreach(backgroundContext.addSubject) + } + + // Scenario + + // PRODUCE + + When( + "^records\\s+from\\s+file\\s+with\\s+key\\s+and\\s+value\\s+are\\s+sent\\s*$" + ) { records: DataTable => + val batches = parseFileKeyValueDataTable(records) + .map(r => + ( + r.batch, + r.readFromFile(openFile(_)).map(_.interpolate(backgroundContext)) + ) + ) + assertionContext.registerWhen(batches) + } + + When("^records\\s+from\\s+file\\s+with\\s+value\\s+are\\s+sent\\s*$") { + records: DataTable => + val batches = parseFileValueDataTable(records).map(r => + ( + r.batch, + r.readFromFile(openFile(_)).map(_.interpolate(backgroundContext)) + ) + ) + assertionContext.registerWhen(batches) + } + + When( + "^records\\s+from\\s+file\\s+with\\s+formatted\\s+value\\s+are\\s+sent\\s*$" + ) { records: DataTable => + val batches = parseFileFormattedValueDataTable(records) + .map(r => + ( + r.batch, + r.readFromFile(openFile(_)).map(_.interpolate(backgroundContext)) + ) + ) + assertionContext.registerWhen(batches) + } + + When("^records\\s+with\\s+key\\s+and\\s+value\\s+are\\s+sent\\s*$") { + records: DataTable => + val batches = parseKeyValueDataTable(records) + .map(_.read) + .map(_.interpolate(backgroundContext)) + assertionContext.registerWhen(List((0, batches))) + } + + // CONSUME + Then("^expected\\s+records\\s*$") { messages: DataTable => + ConsoleTimer.time( + "runConsume", { + try { + logger.debug("Expected records step") + assertionContext.launchConsumption( + parseKeyValueAliasesDataTable(messages).map(kv => + kv.copy(key = backgroundContext.substituteVariablesIn(kv.key)) + ) + ) + logger.debug(assertionContext.showConsumedRecords) + } finally { + backgroundContext.close() + } + } + ) + } + + // CALL EXTERNAL TOOLING + private def callScript(script: String) = { + Try( + %%(script.split(" ").map(backgroundContext.substituteVariablesIn).toList) + ) match { + case Success(commandResult) => + val result = ScriptResult.from(commandResult); + logger.debug(s"$result"); + result + case Failure(e: ShelloutException) => + val result = ScriptResult.from(e.result); + logger.error(s"$result"); + result + } + } + + private def scriptFullPath(script: String) = { + Try(System.getProperty("user.dir")) + .map { + case null => "" + case s => s.trim + } + .fold( + _ => script, + { baseDir => + if (script.startsWith("/")) script else s"$baseDir/$script" + } + ) + } + + final case class ScriptResult(exitCode: Int, stdOut: String, stdErr: String) + + object ScriptResult { + def from(commandResult: CommandResult) = ScriptResult( + commandResult.exitCode, + commandResult.out.string.trim, + commandResult.err.string.trim + ) + } + + And("^call\\s+script\\s+:\\s+(.+)") { script: String => + val result = callScript(scriptFullPath(script)) + result.exitCode shouldBe 0 + } + + And("^var\\s+(.*)\\s+=\\s+call\\s+script\\s*:\\s+(.+)$") { + (variableName: String, script: String) => + val result = callScript(scriptFullPath(script)) + result.exitCode shouldBe 0 + backgroundContext.addVariable(variableName, result.stdOut) + } + + //DOCSTRING version + And("^call\\s+script\\s+:") { script: String => + val result = callScript(script) + result.exitCode shouldBe 0 + } + + And("^var\\s+(.*)\\s+=\\s+call\\s+script\\s*:$") { + (variableName: String, script: String) => + val result = callScript(script) + result.exitCode shouldBe 0 + backgroundContext.addVariable(variableName, result.stdOut) + } + + // ASSERT + And("^assert\\s+(\\S+)\\s+(\\S+)\\s*==\\s*(.*)\\s*$") { + (alias: String, jsonExpression: String, expected: String) => + logger.debug( + s"Assert Step : (alias,jsonExpression,expected) ($alias,$jsonExpression,$expected)" + ) + logger.debug(assertionContext.showConsumedRecords) + // TODO capture operator vs several And expression + val interpolated = backgroundContext.substituteVariablesIn(expected) + logger.debug( + s"""alias: $alias, jsonExpression: $jsonExpression, expected: $interpolated""" + ) + Asserts.equal(assertionContext, alias, jsonExpression, interpolated) + } + + And("^assert\\s+(\\S+)\\s+(\\S+)\\s+match\\s+object\\s+(\\{.*\\})\\s*$") { + (alias: String, jsonExpression: String, expectedJsonObject: String) => + val interpolated = + backgroundContext.substituteVariablesIn(expectedJsonObject) + Asserts.matchObject(assertionContext, alias, jsonExpression, interpolated) + } + + And("^assert\\s+(\\S+)\\s+(\\S+)\\s+match\\s+exact\\s+object\\s+(\\{.*\\})\\s*$") { + (alias: String, jsonExpression: String, expectedJsonObject: String) => + val interpolated = + backgroundContext.substituteVariablesIn(expectedJsonObject) + Asserts.matchExactObject(assertionContext, alias, jsonExpression, interpolated) + } + + And("^assert\\s+(\\S+)\\s+(\\$\\S*)\\s+has size\\s+(\\d*)$") { + (alias: String, jsonExpression: String, expectedSize: Long) => + Asserts.assertKafkaOutput( + assertionContext, + alias, + jsonExpression, + { actual => actual should have size expectedSize } + ) + } + + And("^assert\\s+var\\s+(\\S+)\\s+(\\$\\S*)\\s*==\\s+(.*)$") { + (variableName: String, jsonExpression: String, expectedJson: String) => + val interpolatedExpectedJson = + backgroundContext.substituteVariablesIn(expectedJson) + val variable = backgroundContext.getVariable(variableName) + Asserts.assertJson( + jsonExpression, + { actual => assert(actual == JsonExpr(interpolatedExpectedJson).value) }, + variable.get + ) + } + + And("^assert\\s+var\\s+(\\S+)\\s+(\\$\\S*)\\s+match\\s+object\\s+(.*)$") { + (variableName: String, jsonExpression: String, expectedJson: String) => + val interpolatedExpectedJson = + backgroundContext.substituteVariablesIn(expectedJson) + val variable = backgroundContext.getVariable(variableName) + Asserts.assertJson( + jsonExpression, + { actual => + val actualSet = actual.toMap.toSet + val expectedSet = JsonExpr(interpolatedExpectedJson).value.toMap.toSet + assert(expectedSet.intersect(actualSet) == expectedSet,s"Actual json $actual does not match $expectedJson") + }, + variable.get + ) + } + + And("^assert\\s+var\\s+(\\S+)\\s+(\\$\\S*)\\s+match\\s+exact\\s+object\\s+(.*)$") { + (variableName: String, jsonExpression: String, expectedJson: String) => + val interpolatedExpectedJson = + backgroundContext.substituteVariablesIn(expectedJson) + val variable = backgroundContext.getVariable(variableName) + Asserts.assertJson( + jsonExpression, + { actual => + val actualSet = actual.toMap.toSet + val expectedSet = JsonExpr(interpolatedExpectedJson).value.toMap.toSet + assert(actualSet == expectedSet, s"Actual json $actual does not match $expectedJson") + }, + variable.get + ) + } + + And("^assert\\s+var\\s+(\\S+)\\s+(\\$\\S*)\\s+has size\\s+(\\d*)$") { + (variableName: String, jsonExpression: String, expectedSize: Long) => + val variable = backgroundContext.getVariable(variableName) + Asserts.assertJson( + jsonExpression, + { actual => actual should have size expectedSize }, + variable.get + ) + } + + And("^match\\s+(\\S+)\\s*==\\s*(\\{.*\\}|\\[.*\\])$") { + (output: String, expected: String) => + logger.info(output) + logger.info(expected) + } + +} diff --git a/src/main/scala/com/lectra/kapoeira/glue/FunctionManager.scala b/src/main/scala/com/lectra/kapoeira/glue/FunctionManager.scala new file mode 100644 index 0000000..1f1a424 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/glue/FunctionManager.scala @@ -0,0 +1,42 @@ +package com.lectra.kapoeira.glue + +import com.lectra.kapoeira.domain.BackgroundContext +import com.lectra.kapoeira.domain.functions.FunctionRepository + +object FunctionManager { + case class ParamsResolutionError(unresolvedVariable : String) { + override def toString = s"Unable to resolve $unresolvedVariable variable." + } +} + +class FunctionManager(functionRepository: FunctionRepository) { + + import FunctionManager._ + + def resolveVariables(params: String)(implicit backgroundContext: BackgroundContext): Either[ParamsResolutionError, Seq[String]] = { + val Variable = """\$\{(.*)\}""".r + val resolvedParams = params.split(" ") + + resolvedParams.foldLeft[Either[ParamsResolutionError, Seq[String]]](Right(Seq.empty[String])) { + case (Right(values), Variable(varId)) => + backgroundContext.getVariable(varId) + .map(v => values :+ v) + .toRight(ParamsResolutionError(varId)) + case (Right(values), value) => Right(values :+ value) + case (left, _) => left + } + } + + def apply(targetVariable: String, functionDefinition: String, params: String)(implicit backgroundContext: BackgroundContext): Either[String, Unit] = { + functionDefinition match { + case functionRepository(function)=> + val resolvedParams = if(params.isEmpty) Right(Seq.empty) else resolveVariables(params) + resolvedParams.map { p => + backgroundContext.addVariable(targetVariable, function(p.toArray).toString) + }.left.map(_.toString) + + case unknownIdentifier => Left(s"Function $unknownIdentifier isn't supported.") + } + } + +} diff --git a/src/main/scala/com/lectra/kapoeira/glue/package.scala b/src/main/scala/com/lectra/kapoeira/glue/package.scala new file mode 100644 index 0000000..08cb21a --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/glue/package.scala @@ -0,0 +1,139 @@ +package com.lectra.kapoeira + +import com.fasterxml.jackson.databind.{JsonNode, ObjectMapper} +import com.lectra.kapoeira.domain.Services._ +import com.lectra.kapoeira.domain.SubjectFormat.{Avro, Json} +import com.lectra.kapoeira.domain._ +import com.lectra.kapoeira.kafka.KapoeiraConsumer._ +import com.lectra.kapoeira.kafka.{KapoeiraAdmin, KapoeiraConsumer} +import com.typesafe.scalalogging.LazyLogging +import org.apache.kafka.clients.admin.AdminClient +import org.apache.kafka.clients.consumer.{ConsumerRecord, KafkaConsumer} +import org.apache.kafka.common.TopicPartition + +import scala.io.Source +import scala.jdk.CollectionConverters._ +import scala.util.Try + +package object glue extends LazyLogging with RecordReadImplicits { + + implicit val createConsumer: OutputConfigFactory = + (outputTopicConfig: OutputTopicConfig, subjectConfigs: Map[String, SubjectConfig]) => { + // String or Avro Consumer? + val valueFormat = subjectConfigs.get(outputTopicConfig.valueType).map(_.format) + val keyFormat = subjectConfigs.get(outputTopicConfig.keyType).map(_.format) + val config: OutputConfig = (keyFormat, valueFormat) match { + case (Some(Avro), Some(Avro)) => + OutputConfigAvroAvro(outputTopicConfig, KapoeiraConsumer.createConsumer[Any, Any]) + case (None, Some(Avro)) => + OutputConfigStringAvro(outputTopicConfig, KapoeiraConsumer.createConsumer[String, Any]) + case (Some(Avro), None) => + OutputConfigAvroString(outputTopicConfig, KapoeiraConsumer.createConsumer[Any, String]) + case (Some(Json), Some(Json)) => + OutputConfigJsonJson(outputTopicConfig, KapoeiraConsumer.createConsumer[JsonNode, JsonNode]) + case (None, Some(Json)) => + OutputConfigStringJson(outputTopicConfig, KapoeiraConsumer.createConsumer[String, JsonNode]) + case (Some(Json), None) => + OutputConfigJsonString(outputTopicConfig, KapoeiraConsumer.createConsumer[JsonNode, String]) + case (None, None) => + OutputConfigStringString(outputTopicConfig, KapoeiraConsumer.createConsumer[String, String]) + } + + val consumer = config.consumer + logger.debug( + s"ASSIGNMENT - Consumer assignment on ${outputTopicConfig.topicName}" + ) + val partitionInfo = + consumer.partitionsFor(outputTopicConfig.topicName).asScala + consumer.assign( + partitionInfo + .map(p => new TopicPartition(p.topic(), p.partition())) + .asJava + ) + logger.debug( + s"ASSIGNMENT - Consumer assignment ${!consumer.assignment().isEmpty}" + ) + val consumerPosition = consumer + .assignment() + .asScala + .map(tp => s"topic=$tp, partition=${tp.partition()}, position=${consumer.position(tp)}") + .mkString("\n") + logger.debug(consumerPosition) + consumer.commitSync() + + config + } + + implicit val kafkaConsume: RecordConsumer = new RecordConsumer { + override def apply( + outputConfig: OutputConfig, + expectedKeys: Map[String, Int] + ): Map[String, Seq[ConsumerRecord[String, Any]]] = + consume(outputConfig, expectedKeys) + } + + implicit val closeConsumer: CloseConsumer = + (outputConfig: OutputConfig, adminClient: AdminClient) => { + outputConfig.consumer.unsubscribe() + outputConfig.consumer.close() + adminClient.deleteConsumerGroups(java.util.Arrays.asList(outputConfig.consumer.groupMetadata().groupId())) + } + + implicit val adminClient: AdminClient = KapoeiraAdmin.createClient + + implicit val openFile: FileOpener = (filePath: String) => { + logger.info(s"openFile($filePath)") + val source = Option( + Thread.currentThread.getContextClassLoader.getResource(filePath) + ) match { + case None => Source.fromFile(s"/$filePath") + case Some(value) => Source.fromFile(value.getPath) + } + val result = source.getLines().toList + source.close() + result + } + + object ConsoleTimer { + def time[R](label: String, block: => R): R = { + logger.info(s"$label...") + val t0 = System.currentTimeMillis() + val result = block // call-by-name + val duration = (System.currentTimeMillis() - t0).toDouble / 1000d + logger.info(s"$label... DONE, elapsed time=%.3fs".format(duration)) + result + } + } + + private[glue] val objectMapper = new ObjectMapper() + + implicit class RecordReadOps(recordRead: RecordRead) { + def jsonHeaders: Try[Map[String, Array[Byte]]] = Try( + recordRead.headers.map { + case (k, v: String) => (k, v.getBytes) + case (k, v) => (k, objectMapper.writeValueAsBytes(v)) + } + ) + } + + implicit val readHeadersString: ReadHeaders[String] = (string: String) => + Try { + recursiveConversion( + objectMapper + .readValue(string, classOf[java.util.HashMap[String, Object]]) + ) + }.getOrElse(Map.empty) + + implicit class ReadHeadersOps[T: ReadHeaders](val headers: T) { + def readHeaders: Map[String, Any] = implicitly[ReadHeaders[T]].readHeaders(headers) + } + + private def recursiveConversion( + map: java.util.Map[String, Object] + ): Map[String, Any] = { + map.asScala.toMap.map { + case (k, v: java.util.Map[String, Object]) => (k, recursiveConversion(v)) + case (k, v) => (k, v) + } + } +} diff --git a/src/main/scala/com/lectra/kapoeira/kafka/DataType.scala b/src/main/scala/com/lectra/kapoeira/kafka/DataType.scala new file mode 100644 index 0000000..b79e470 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/kafka/DataType.scala @@ -0,0 +1,37 @@ +package com.lectra.kapoeira.kafka +import com.fasterxml.jackson.databind.JsonNode +import io.confluent.kafka.serializers.{KafkaAvroDeserializer, KafkaAvroSerializer} +import io.confluent.kafka.serializers.json.{KafkaJsonSchemaDeserializer, KafkaJsonSchemaSerializer} +import org.apache.avro.generic.GenericData +import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer} + +sealed trait DataType[A] { + type DeserializerT + type SerializerT + val classDeserializer: Class[DeserializerT] + val classSerializer: Class[SerializerT] +} +trait AvroType[T] extends DataType[T] { + type DeserializerT = KafkaAvroDeserializer + type SerializerT = KafkaAvroSerializer + val classDeserializer: Class[KafkaAvroDeserializer] = classOf[KafkaAvroDeserializer] + val classSerializer: Class[KafkaAvroSerializer] = classOf[KafkaAvroSerializer] +} +case object StringType extends DataType[String] { + type DeserializerT = StringDeserializer + type SerializerT = StringSerializer + val classDeserializer: Class[StringDeserializer] = classOf[StringDeserializer] + val classSerializer: Class[StringSerializer] = classOf[StringSerializer] +} +case object JsonType extends DataType[JsonNode] { + type DeserializerT = KafkaJsonSchemaDeserializer[JsonNode] + type SerializerT = KafkaJsonSchemaSerializer[JsonNode] + val classDeserializer: Class[KafkaJsonSchemaDeserializer[JsonNode]] = classOf[KafkaJsonSchemaDeserializer[JsonNode]] + val classSerializer: Class[KafkaJsonSchemaSerializer[JsonNode]] = classOf[KafkaJsonSchemaSerializer[JsonNode]] +} +object DataType { + implicit val avroType: DataType[Any] = new AvroType[Any] {} + implicit val avroTypeGeneric: DataType[GenericData.Record] = new AvroType[GenericData.Record] {} + implicit val stringType: DataType[String] = StringType + implicit val jsonType: DataType[JsonNode] = JsonType +} diff --git a/src/main/scala/com/lectra/kapoeira/kafka/KapoeiraAdmin.scala b/src/main/scala/com/lectra/kapoeira/kafka/KapoeiraAdmin.scala new file mode 100644 index 0000000..0b3edde --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/kafka/KapoeiraAdmin.scala @@ -0,0 +1,27 @@ +package com.lectra.kapoeira.kafka + +import com.lectra.kapoeira.Config._ +import org.apache.kafka.clients.CommonClientConfigs +import org.apache.kafka.clients.admin.{AdminClient, AdminClientConfig} +import org.apache.kafka.common.config.SaslConfigs +import org.apache.kafka.common.security.auth.SecurityProtocol + +import java.util +import java.util.Properties + +object KapoeiraAdmin { + def createClient = { + val kafkaParams = new Properties() + kafkaParams.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_BROKER_LIST) + if (JAAS_AUTHENT) { + kafkaParams.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-512") + kafkaParams.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SASL_SSL.name) + kafkaParams.put( + SaslConfigs.SASL_JAAS_CONFIG, + s"org.apache.kafka.common.security.scram.ScramLoginModule required username='$KAFKA_USER' password='$KAFKA_PASSWORD';" + ) + } + AdminClient.create(kafkaParams) + } + +} diff --git a/src/main/scala/com/lectra/kapoeira/kafka/KapoeiraConsumer.scala b/src/main/scala/com/lectra/kapoeira/kafka/KapoeiraConsumer.scala new file mode 100644 index 0000000..6684e68 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/kafka/KapoeiraConsumer.scala @@ -0,0 +1,121 @@ +package com.lectra.kapoeira.kafka + +import com.fasterxml.jackson.databind.JsonNode +import com.lectra.kapoeira.Config +import com.lectra.kapoeira.Config._ +import com.lectra.kapoeira.domain._ +import com.typesafe.scalalogging.LazyLogging +import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils +import io.confluent.kafka.schemaregistry.json.JsonSchemaUtils +import org.apache.kafka.clients.CommonClientConfigs +import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord, KafkaConsumer} +import org.apache.kafka.common.config.SaslConfigs +import org.apache.kafka.common.security.auth.SecurityProtocol +import io.confluent.kafka.serializers.KafkaJsonDeserializerConfig + +import java.time.Duration +import java.util.Properties +import scala.collection.mutable +import scala.jdk.CollectionConverters._ + +object KapoeiraConsumer extends LazyLogging { + + implicit class JsonOutputConfig(outputConfig: OutputConfig) { + def toJson[A](a: A): String = outputConfig match { + case _: OutputConfigStringString => a.toString + case _: OutputConfigStringAvro => a.toString + case _: OutputConfigAvroString => new String(AvroSchemaUtils.toJson(a)) + case _: OutputConfigAvroAvro => new String(AvroSchemaUtils.toJson(a)) + case _: OutputConfigStringJson => a.toString + case _: OutputConfigJsonString => new String(JsonSchemaUtils.toJson(a)) + case _: OutputConfigJsonJson => new String(JsonSchemaUtils.toJson(a)) + } + } + + def createConsumer[K: DataType, V: DataType]: KafkaConsumer[K, V] = { + val kafkaParams = new Properties() + kafkaParams.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_BROKER_LIST) + kafkaParams.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, implicitly[DataType[K]].classDeserializer) + kafkaParams.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, implicitly[DataType[V]].classDeserializer) + kafkaParams.put(ConsumerConfig.GROUP_ID_CONFIG, CONSUMER_GROUP) + kafkaParams.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest") + kafkaParams.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true: java.lang.Boolean) + kafkaParams.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed") + kafkaParams.put("schema.registry.url", Config.KAFKA_SCHEMA_REGISTRY_URL) + kafkaParams.put(KafkaJsonDeserializerConfig.JSON_VALUE_TYPE, classOf[JsonNode].getName) + + logger.info(s"KAFKA_BROKER_LIST=$KAFKA_BROKER_LIST") + logger.info(s"JAAS_AUTHENT=$JAAS_AUTHENT") + if (JAAS_AUTHENT) { + kafkaParams.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-512") + kafkaParams.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SASL_SSL.name) + kafkaParams.put( + SaslConfigs.SASL_JAAS_CONFIG, + s"org.apache.kafka.common.security.scram.ScramLoginModule required username='$KAFKA_USER' password='$KAFKA_PASSWORD';" + ) + } + logger.info(s"""Create consumer with : \n + |group.id = $CONSUMER_GROUP \n + |""".stripMargin) + new KafkaConsumer[K, V](kafkaParams) + } + + def consume( + outputConfig: OutputConfig, + expectedKeys: Map[String, Int] + ): Map[String, Seq[ConsumerRecord[String, Any]]] = { + val consumer = outputConfig.consumer + val topic = outputConfig.outputConfig.topicName + val waitDuration = outputConfig.outputConfig.consumerTimeout * 1000 + logger.info(s"Consuming AVRO $topic during $waitDuration ms...") + + consumer.assignment().forEach(p => logger.debug(s"BEFORE CONSUME - partition=$p, position=${consumer.position(p)}")) + consumer + .endOffsets(consumer.assignment()) + .forEach((k, v) => logger.debug(s"END OFFSETS BEFORE CONSUME - partition=${k.partition()}, position=$v")) + + var kafkaRecords = mutable.Seq[ConsumerRecord[String, Any]]() + val timer = System.currentTimeMillis() + val expectedSeq = expectedKeys.toSeq + while ( + kafkaRecords + .groupBy(k => k.key()) + .map { case (k, records) => (k, records.size) } + .toSeq + .intersect(expectedSeq) != expectedSeq && + (timer + waitDuration) > System.currentTimeMillis() + ) { + kafkaRecords ++= consumer + .poll(Duration.ofMillis(waitDuration)) + .records(topic) + .asScala + .toSeq + .map(record => + new ConsumerRecord( + record.topic(), + record.partition(), + record.offset(), + record.timestamp(), + record.timestampType(), + record.serializedKeySize(), + record.serializedValueSize(), + outputConfig.toJson(record.key()), + record.value(), + record.headers(), + record.leaderEpoch() + ) + ) + } + + consumer.assignment().forEach(p => logger.debug(s"AFTER CONSUME - partition=$p, position=${consumer.position(p)}")) + consumer + .endOffsets(consumer.assignment()) + .forEach((k, v) => logger.debug(s"END OFFSETS AFTER CONSUME - partition=${k.partition()}, position=$v")) + logger.debug(s"""AFTER CONSUME - Raw records retrieved : \n + |${kafkaRecords + .map(r => s"(${r.partition()},${r.offset()},${r.key()}:${r.value()})") + .mkString("[", ",", "]")}""".stripMargin) + + kafkaRecords.toSeq.groupBy(r => r.key()) + } +} diff --git a/src/main/scala/com/lectra/kapoeira/kafka/KapoeiraProducer.scala b/src/main/scala/com/lectra/kapoeira/kafka/KapoeiraProducer.scala new file mode 100644 index 0000000..40c2f68 --- /dev/null +++ b/src/main/scala/com/lectra/kapoeira/kafka/KapoeiraProducer.scala @@ -0,0 +1,221 @@ +package com.lectra.kapoeira.kafka + +import com.fasterxml.jackson.databind.{JsonNode, ObjectMapper} +import com.lectra.kapoeira.Config +import com.lectra.kapoeira.Config._ +import com.lectra.kapoeira.domain.SubjectFormat.{Avro, Json} +import com.lectra.kapoeira.domain._ +import com.lectra.kapoeira.glue.RecordReadOps +import com.typesafe.scalalogging.LazyLogging +import io.confluent.kafka.schemaregistry.avro.{AvroSchema, AvroSchemaUtils} +import io.confluent.kafka.schemaregistry.json.JsonSchemaUtils +import kafka.tools.ConsoleProducer +import kafka.tools.ConsoleProducer.producerProps +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import org.apache.kafka.clients.producer.{Callback, KafkaProducer, ProducerRecord, RecordMetadata} +import zio.{Task, ZIO, ZManaged} + +import scala.util.{Failure, Try} + +object KapoeiraProducer extends LazyLogging { + + private def serializeJson(subject: SubjectConfig, bytes: Array[Byte]): JsonNode = { + val schemaString = + requests + .get( + s"$KAFKA_SCHEMA_REGISTRY_URL/subjects/${subject.name}/versions/latest/schema" + ) + .text() + val value = new String(bytes) + val mapper = new ObjectMapper() + val schemaJson = mapper.readTree(schemaString) + val valueJson: JsonNode = mapper.readTree(value) + JsonSchemaUtils.envelope(schemaJson, valueJson) + } + + private def serializeAvro(subject: SubjectConfig, bytes: Array[Byte]): GenericData.Record = { + + val schemaVersions = + requests + .get( + s"$KAFKA_SCHEMA_REGISTRY_URL/subjects/${subject.name}/versions" + ) + .text() + val versions: Array[String] = schemaVersions.replace("[", "").replace("]", "").split(",") + + val init: Try[GenericData.Record] = Failure[GenericData.Record](new Exception(s"No schema version found for subject ${subject.name}")) + + versions.foldRight(init) { (version, acc) => + if (acc.isFailure) { + val schemaString = + requests + .get( + s"$KAFKA_SCHEMA_REGISTRY_URL/subjects/${subject.name}/versions/$version/schema" + ) + .text() + val parser = new Schema.Parser() + val schema = parser.parse(schemaString) + Try(AvroSchemaUtils + .toObject(new String(bytes), new AvroSchema(schema)) + .asInstanceOf[GenericData.Record]) + } + else { + acc + } + }.get + + } + + private def producer[K: DataType, V: DataType](topicConfig: TopicConfig): ZManaged[ + Any, + Throwable, + KafkaProducer[Any, Any] + ] = { + ZManaged + .make(ZIO.effect { + val params = commonConfig(topicConfig) ::: (if (JAAS_AUTHENT) jaasConfig else Nil) + import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig} + val props = producerProps(new ConsoleProducer.ProducerConfig(params.toArray)) + props.put( + ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, + Config.KAFKA_BROKER_LIST + ) + props.put( + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + implicitly[DataType[K]].classSerializer + ) + props.put( + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + implicitly[DataType[V]].classSerializer + ) + props.put("schema.registry.url", Config.KAFKA_SCHEMA_REGISTRY_URL) + new KafkaProducer[Any, Any](props) + }) { producer => + ZIO + .effect { + producer.flush() + producer.close + } + .catchAll(err => ZIO.effectTotal(err.printStackTrace())) + } + } + + private def produce[K, V]( + producer: KafkaProducer[K, V], + topic: String, + key: K, + headers: Map[String, Array[Byte]], + recordValue: V + ): Task[Unit] = ZIO.effectAsync[Any, Throwable, Unit] { case callback => + val record = new ProducerRecord[K, V](topic, key, recordValue) + headers.foreach { case (k, v) => + record.headers().add(k, v) + } + producer.send( + record, + new Callback { + override def onCompletion( + metadata: RecordMetadata, + exception: Exception + ): Unit = callback(ZIO.unit) + } + ) + } + + def run( + record: RecordRead, + topicConfig: TopicConfig, + keySubjectConfig: Option[SubjectConfig], + valueSubjectConfig: Option[SubjectConfig] + ): Task[Unit] = { + for { + headers <- ZIO.fromTry(record.jsonHeaders) + _ <- ((keySubjectConfig, valueSubjectConfig) match { + case (Some(keySubConf), Some(valueSubConf)) => + (keySubConf.format, valueSubConf.format) match { + case (Avro, Avro) => producer[GenericData.Record, GenericData.Record] _ + case (Avro, Json) => producer[GenericData.Record, JsonNode] _ + case (Json, Avro) => producer[JsonNode, GenericData.Record] _ + case (Json, Json) => producer[JsonNode, JsonNode] _ + } + case (None, Some(valueSubConf)) => + valueSubConf.format match { + case Avro => producer[String, GenericData.Record] _ + case Json => producer[String, JsonNode] _ + } + case (Some(keySubConf), None) => + keySubConf.format match { + case Avro => producer[GenericData.Record, String] _ + case Json => producer[JsonNode, String] _ + } + case _ => producer[String, String] _ + })(topicConfig).use { producer => + val keyParsed = keySubjectConfig + .map(subject => + subject.format match { + case SubjectFormat.Avro => serializeAvro(subject, record.key.getBytes()) + case SubjectFormat.Json => serializeJson(subject, record.key.getBytes()) + } + ) + .getOrElse(record.key) + val valueParsed = valueSubjectConfig + .map(subject => + subject.format match { + case SubjectFormat.Avro => serializeAvro(subject, record.value) + case SubjectFormat.Json => serializeJson(subject, record.value) + } + ) + .getOrElse(new String(record.value)) + produce(producer, topicConfig.topicName, keyParsed, headers, valueParsed) + } + } yield () + } + + private def commonConfig( + topicConfig: TopicConfig + ): List[String] = + List( + "--topic", + s"${topicConfig.topicName}", + "--broker-list", + s"$KAFKA_BROKER_LIST", + "--producer-property", + "max.in.flight.requests.per.connection=1", + "--producer-property", + "acks=1", + "--producer-property", + "retries=0", + "--property", + "parse.key=true" + ) + + private def jaasConfig(): List[String] = + List( + "--producer-property", + "sasl.mechanism=SCRAM-SHA-512", + "--producer-property", + "security.protocol=SASL_SSL", + "--property", + "security.protocol=SASL_SSL", // FIXME duplicate info? + "--property", + "sasl.mechanism=SCRAM-SHA-512", + "--producer-property", + s"""sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required username="$KAFKA_USER" password="$KAFKA_PASSWORD";""" + ) + + object CustomCallback extends Callback { + override def onCompletion( + metadata: RecordMetadata, + exception: Exception + ): Unit = + if (exception == null) { + logger.debug( + s"PRODUCER (async) - partition=${metadata.partition()} - offset=${metadata.offset()}" + ) + } else { + logger.error(exception.getMessage) + } + } + +} diff --git a/src/test/resources/application.conf b/src/test/resources/application.conf new file mode 100644 index 0000000..6d7a092 --- /dev/null +++ b/src/test/resources/application.conf @@ -0,0 +1,10 @@ +kafka { + bootstrap.server = "localhost:9092" + schema.registry.url = "localhost:8081" + user = "xxx" + password = "xxx" + authent.isjaas = true +} +consumer { + group="kapoeira-test" +} \ No newline at end of file diff --git a/src/test/resources/avro/kapoeira/kapoeira.avrokey.avdl b/src/test/resources/avro/kapoeira/kapoeira.avrokey.avdl new file mode 100644 index 0000000..08bf4e7 --- /dev/null +++ b/src/test/resources/avro/kapoeira/kapoeira.avrokey.avdl @@ -0,0 +1,6 @@ +@namespace( "com.lectra.kapoeira" ) +protocol AvrokeyProtocol { + record Avrokeyv1 { + string aKey; + } +} diff --git a/src/test/resources/avro/kapoeira/kapoeira.avrovalue.avdl b/src/test/resources/avro/kapoeira/kapoeira.avrovalue.avdl new file mode 100644 index 0000000..df80cb2 --- /dev/null +++ b/src/test/resources/avro/kapoeira/kapoeira.avrovalue.avdl @@ -0,0 +1,8 @@ +@namespace( "com.lectra.kapoeira" ) +protocol AvrovalueProtocol { + record Avrovaluev1 { + int anInt; + string aString; + union { null, string } anOptionalString = null; + } +} diff --git a/src/test/resources/avro/version.txt b/src/test/resources/avro/version.txt new file mode 100644 index 0000000..56a6051 --- /dev/null +++ b/src/test/resources/avro/version.txt @@ -0,0 +1 @@ +1 \ No newline at end of file diff --git a/src/test/resources/cucumber.properties b/src/test/resources/cucumber.properties new file mode 100644 index 0000000..b48dd63 --- /dev/null +++ b/src/test/resources/cucumber.properties @@ -0,0 +1 @@ +cucumber.publish.quiet=true diff --git a/src/test/resources/features/assertions.feature b/src/test/resources/features/assertions.feature new file mode 100644 index 0000000..679ae4c --- /dev/null +++ b/src/test/resources/features/assertions.feature @@ -0,0 +1,43 @@ +Feature: assertions + Background: + Given input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_in | string | string | + + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_out | string | string | 5 | + And var uuid = call function: uuid + + Scenario: Produce a record with headers + When records from file with key and value are sent + | topic_alias | separator | file | + | topic_in | # | features/records/keyheadersvalue.dat | + Then expected records + | topic_alias | key | headers | value | + | topic_out | key1_${uuid} | aliasHeaders2.1 | value2.1 | + | topic_out | key2_${uuid} | aliasHeaders2.2 | value2.2 | + | topic_out | key3_${uuid} | aliasHeaders2.3 | value2.3 | + And assert value2.1 $.qux == 42 + And assert value2.2 $ has size 2 + And assert value2.2 $ == [3,4] + And assert value2.3 $ == "value2.3" + And assert aliasHeaders2.1 $ == {"foo":"bar","baz":"42"} + And assert aliasHeaders2.1 $.foo == "bar" + + Scenario: Produce a complex record + When records from file with key and value are sent + | topic_alias | separator | file | + | topic_in | # | features/records/keyvalueobjectNarrays.dat | + Then expected records + | topic_alias | key | value | + | topic_out | key1_${uuid} | aliasValue1 | + | topic_out | key2_${uuid} | aliasValue2 | + And assert aliasValue1 $ match object {"foos":["item1","item2","item3"],"bar":{"baz":["item1","item2","item3"]}} + And assert aliasValue1 $ match object {"foos":["item1","item2","item3"]} + And assert aliasValue1 $ match object {"bar":{"baz":["item1","item2","item3"]}} + And assert aliasValue1 $.bar match object {"baz":["item1","item2","item3"]} + And assert aliasValue1 $.bar.baz[0] == "item1" + And assert aliasValue1 $ match exact object {"foos":["item1","item2","item3"],"bar":{"baz":["item1","item2","item3"]}} + And assert aliasValue1 $.bar match exact object {"baz":["item1","item2","item3"]} + And assert aliasValue2 $.qux[?(@.key1!=null)] match object {"key1":"toto"} diff --git a/src/test/resources/features/batch-produce-consume.feature b/src/test/resources/features/batch-produce-consume.feature new file mode 100644 index 0000000..31d79d5 --- /dev/null +++ b/src/test/resources/features/batch-produce-consume.feature @@ -0,0 +1,33 @@ +Feature: producer-file-key-value-mode-batch + + Background: + Given subject + | name | alias | format | + | kapoeira.avrovaluev1 | kapoAlias | avro | + Given input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_in1 | string | string | + | private.euw.kapoeira-dsl-it.avrovalue.tracking.raw | topic_in2 | string | kapoAlias | + + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.mergedstringvalue.tracking.raw | topic_out | string | string | 5 | + And var uuid = call function: uuid + + Scenario: Produce records in multiple topics, using batch mode to keep order between consumption and production + When records from file with key and value are sent + | topic_alias | separator | file | batch | + | topic_in1 | # | features/records/batch1.1.dat | 1 | + | topic_in1 | # | features/records/batch1.2.dat | 1 | + | topic_in2 | # | features/records/batch2.1.dat | 2 | + | topic_in2 | # | features/records/batch2.2.dat | 2 | + Then expected records + | topic_alias | key | value | batch | + | topic_out | samekey_${uuid} | value1.1 | 1 | + | topic_out | samekey_${uuid} | value1.2 | 1 | + | topic_out | samekey_${uuid} | value2.1 | 2 | + | topic_out | samekey_${uuid} | value2.2 | 2 | + And assert value1.1 $.FOO == 1_${uuid} + And assert value1.2 $.FOO == 2_${uuid} + And assert value1.1 $.ANINT == 3 + And assert value2.2 $.ANINT == 4 diff --git a/src/test/resources/features/call-external-script.feature b/src/test/resources/features/call-external-script.feature new file mode 100644 index 0000000..4bbeee7 --- /dev/null +++ b/src/test/resources/features/call-external-script.feature @@ -0,0 +1,38 @@ +Feature: call-external-script + + Background: + Given input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_in | string | string | + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_out | string | string | 5 | + + Scenario: call-scripts + Given var foo = bar + Given var foo2 = "bar" + When call script : /features/scripts/runExternalTool.sh 42 + And call script : + """ + echo 42 + """ + And var myValue = call script : /features/scripts/runExternalTool.sh 43 + And var myKey = call script : + """ + echo 44 + """ + And var myObject = {"foo":{"bar":"baz"},"qux":42} + And var myArray = [true,false,null,0,{"foo":"bar"},[1,2,3]] + And assert var myKey $ == 44 + And assert var myValue $ == "Hello World 43" + And assert var myObject $.qux == 42 + And assert var myArray $ has size 6 + And assert var myArray $[0] == true + And assert var myArray $[4].foo == "bar" + And assert var myArray $[4].foo == "${foo}" + And assert var myArray $[4].foo == ${foo2} + And assert var myArray $[5][0] == 1 + And assert var myObject $.foo match object {"bar":"baz"} + And assert var myObject $ match object {"foo":{"bar":"baz"}} + And assert var myObject $.foo match exact object {"bar":"baz"} + And assert var myObject $ match exact object {"foo":{"bar":"baz"},"qux":42} diff --git a/src/test/resources/features/call-function.feature b/src/test/resources/features/call-function.feature new file mode 100644 index 0000000..ddc7aa1 --- /dev/null +++ b/src/test/resources/features/call-function.feature @@ -0,0 +1,87 @@ +Feature: call-functions + + Background: + Given input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_in | string | string | + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_out | string | string | 5 | + + Scenario: uuid + Given var foo = bar + And var uuid = call function : uuid + And var isUUID = call script : /features/scripts/isUUID.sh ${uuid} + And var isNotUUID = call script : /features/scripts/isUUID.sh ${foo} + When records with key and value are sent + | topic_alias | key | value | + | topic_in | isUUID | ${isUUID} | + | topic_in | isNotUUID | ${isNotUUID} | + Then expected records + | topic_alias | key | value | + | topic_out | isUUID | aliasValue1 | + | topic_out | isNotUUID | aliasValue2 | + And assert aliasValue1 $ == "true" + And assert aliasValue2 $ == "false" + + Scenario: sleep_now + Given var date_before = call function : now + And var print_first = call function : print sleep before ${date_before} + When var sleep = call function : sleep 5000 + And var date_after = call function : now + And var print_second = call function : print sleep after ${date_after} + + Scenario: uppercase + Given var foo = bar + And var uppercaseFoo = call function : uppercase ${foo} + When records with key and value are sent + | topic_alias | key | value | + | topic_in | UPPERCASE | ${uppercaseFoo} | + Then expected records + | topic_alias | key | value | + | topic_out | UPPERCASE | aliasValue1 | + And assert aliasValue1 $ == "BAR" + + Scenario: lowercase + Given var foo = BAR + And var uppercaseFoo = call function : lowercase ${foo} + When records with key and value are sent + | topic_alias | key | value | + | topic_in | lowercase | ${uppercaseFoo} | + Then expected records + | topic_alias | key | value | + | topic_out | lowercase | aliasValue1 | + And assert aliasValue1 $ == "bar" + + Scenario: sha256 + Given var foo = some description to hash + And var sha256Foo = call function : sha256 ${foo} + When records with key and value are sent + | topic_alias | key | value | + | topic_in | sha256 | ${sha256Foo} | + Then expected records + | topic_alias | key | value | + | topic_out | sha256 | aliasValue1 | + And assert aliasValue1 $ == "c4503e8f44c69fea01bca0a28acd5ca9f82d31dd287c9200729d7b11f5658be5" + + Scenario: sha1 + Given var foo = some description to hash + And var sha1Foo = call function : sha1 ${foo} + When records with key and value are sent + | topic_alias | key | value | + | topic_in | sha1 | ${sha1Foo} | + Then expected records + | topic_alias | key | value | + | topic_out | sha1 | aliasValue1 | + And assert aliasValue1 $ == "398ccd6616fb1ec2086eddfccea671823b58f466" + + Scenario: sha1_Interpolation + Given var foo = some description to hash + And var sha1Foo = call function : sha1 ${foo}bar + When records with key and value are sent + | topic_alias | key | value | + | topic_in | sha1Inter | ${sha1Foo} | + Then expected records + | topic_alias | key | value | + | topic_out | sha1Inter | aliasValue1 | + And assert aliasValue1 $ == "aee512dd2d5d0f3ec26df2b2e0d583fe81a88ab4" diff --git a/src/test/resources/features/consumer-avro-full-key-value.feature b/src/test/resources/features/consumer-avro-full-key-value.feature new file mode 100644 index 0000000..cbad5e0 --- /dev/null +++ b/src/test/resources/features/consumer-avro-full-key-value.feature @@ -0,0 +1,30 @@ +Feature: consumer-avro-key-value + + Background: + Given subject + | name | alias | format | + | kapoeira.avrokeyv1 | avro_key | avro | + | kapoeira.avrovaluev1 | avro_value | avro | + And input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.fullavro.tracking.raw | topic_in | avro_key | avro_value | + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.fullavro.tracking.raw | topic_out | avro_key | avro_value | 10 | + And var uuid = call function : uuid + + Scenario: Produce a record + When records with key and value are sent + | topic_alias | key | value | + | topic_in | {"aKey":"aTestKey_${uuid}"} | {"anInt": 1, "aString": "myString1", "anOptionalString": { "string": "test"} } | + | topic_in | {"aKey":"aTestKey_${uuid}"} | {"anInt": 2, "aString": "myString2", "anOptionalString": null } | + Then expected records + | topic_alias | key | value | + | topic_out | {"aKey":"aTestKey_${uuid}"} | aliasValue1 | + | topic_out | {"aKey":"aTestKey_${uuid}"} | aliasValue2 | + And assert aliasValue1 $.anInt == 1 + And assert aliasValue1 $.aString == "myString1" + And assert aliasValue1 $.anOptionalString == "test" + And assert aliasValue2 $.anInt == 2 + And assert aliasValue2 $.aString == "myString2" + And assert aliasValue2 $.anOptionalString == null diff --git a/src/test/resources/features/consumer-avro-key-value.feature b/src/test/resources/features/consumer-avro-key-value.feature new file mode 100644 index 0000000..a55e425 --- /dev/null +++ b/src/test/resources/features/consumer-avro-key-value.feature @@ -0,0 +1,29 @@ +Feature: consumer-avro-key-value + + Background: + Given subject + | name | alias | format | + | kapoeira.avrovaluev1 | avro_value | avro | + And input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.avrovalue.tracking.raw | topic_in | string | avro_value | + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.avrovalue.tracking.raw | topic_out | string | avro_value | 10 | + And var uuid = call function : uuid + + Scenario: Produce a record + When records with key and value are sent + | topic_alias | key | value | + | topic_in | aTestKey_${uuid} | {"anInt": 1, "aString": "myString1", "anOptionalString": { "string": "test"} } | + | topic_in | aTestKey_${uuid} | {"anInt": 2, "aString": "myString2", "anOptionalString": null } | + Then expected records + | topic_alias | key | value | + | topic_out | aTestKey_${uuid} | aliasValue1 | + | topic_out | aTestKey_${uuid} | aliasValue2 | + And assert aliasValue1 $.anInt == 1 + And assert aliasValue1 $.aString == "myString1" + And assert aliasValue1 $.anOptionalString == "test" + And assert aliasValue2 $.anInt == 2 + And assert aliasValue2 $.aString == "myString2" + And assert aliasValue2 $.anOptionalString == null diff --git a/src/test/resources/features/consumer-file-key-value.feature b/src/test/resources/features/consumer-file-key-value.feature new file mode 100644 index 0000000..cef8e33 --- /dev/null +++ b/src/test/resources/features/consumer-file-key-value.feature @@ -0,0 +1,24 @@ +Feature: consumer-file-key-value + + Background: + Given input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_in | string | string | + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_out | string | string | 8 | + And var uuid = call function : uuid + + Scenario: Produce a record + When records from file with key and value are sent + | topic_alias | separator | file | + | topic_in | # | features/records/keyvalue.dat | + Then expected records + | topic_alias | key | value | + | topic_out | key1_${uuid} | aliasValue1 | + | topic_out | key1_${uuid} | aliasValue2 | + | topic_out | key2_${uuid} | aliasValue3 | + | topic_out | key5_${uuid} | aliasValue4 | + And assert aliasValue1 $ == "value1.1" + And assert aliasValue2 $ == "value1.2" + And assert aliasValue3 $ == "value2" diff --git a/src/test/resources/features/consumer-json-key-value.feature b/src/test/resources/features/consumer-json-key-value.feature new file mode 100644 index 0000000..40c7104 --- /dev/null +++ b/src/test/resources/features/consumer-json-key-value.feature @@ -0,0 +1,29 @@ +Feature: consumer-json-key-value + + Background: + Given subject + | name | alias | format | + | kapoeira.jsonvaluev1 | json_value | json | + And input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.jsonvalue.tracking.raw | topic_in | string | json_value | + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.jsonvalue.tracking.raw | topic_out | string | json_value | 10 | + And var uuid = call function : uuid + + Scenario: Produce a record + When records with key and value are sent + | topic_alias | key | value | + | topic_in | aTestKey_${uuid} | {"anInt": 1, "aString": "myString1", "anOptionalString": "test"} | + | topic_in | aTestKey_${uuid} | {"anInt": 2, "aString": "myString2", "anOptionalString": null } | + Then expected records + | topic_alias | key | value | + | topic_out | aTestKey_${uuid} | aliasValue1 | + | topic_out | aTestKey_${uuid} | aliasValue2 | + And assert aliasValue1 $.anInt == 1 + And assert aliasValue1 $.aString == "myString1" + And assert aliasValue1 $.anOptionalString == "test" + And assert aliasValue2 $.anInt == 2 + And assert aliasValue2 $.aString == "myString2" + And assert aliasValue2 $.anOptionalString == null diff --git a/src/test/resources/features/consumer-key-value.feature b/src/test/resources/features/consumer-key-value.feature new file mode 100644 index 0000000..8baf8d7 --- /dev/null +++ b/src/test/resources/features/consumer-key-value.feature @@ -0,0 +1,48 @@ +Feature: consumer-key-value + + Background: + Given input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_in | string | string | + And var foo = 42 + And var bar = 33 + And var key = aTestKey + And var toto = call function: uuid + And var uuid = call function: uuid + And var testJson = {"obj": "hello world" } + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_out | string | string | 8 | + + Scenario: Produce a record String/String + When records with key and value are sent + | topic_alias | key | value | + | topic_in | ${key}_${uuid} | aValue1 | + | topic_in | aTestKey_${uuid} | x ${foo} y ${bar} | + | topic_in | aTestKey_${uuid} | ${toto} | + | topic_in | aTestKey_${uuid} | ${testJson} | + Then expected records + | topic_alias | key | value | + | topic_out | aTestKey_${uuid} | aliasValue1 | + | topic_out | ${key}_${uuid} | aliasValue2 | + | topic_out | aTestKey_${uuid} | aliasValue3 | + | topic_out | aTestKey_${uuid} | aliasValue4 | + And assert aliasValue1 $ == "aValue1" + And assert aliasValue2 $ == "x ${foo} y ${bar}" + And assert aliasValue3 $ == "${toto}" + And assert aliasValue4 $.obj == "hello world" + + Scenario: Produce a record String/JsonString + When records with key and value are sent + | topic_alias | key | value | + | topic_in | aTestKey_${uuid} | "aValue" | + | topic_in | aTestKey2_${uuid} | {"a": "aValue2"} | + | topic_in | aTestKey2_${uuid} | [{"a": "aValue3"}] | + Then expected records + | topic_alias | key | value | + | topic_out | aTestKey_${uuid} | jsonString | + | topic_out | aTestKey2_${uuid} | jsonObject | + | topic_out | aTestKey2_${uuid} | jsonArray | + And assert jsonString $ == "\"aValue\"" + And assert jsonObject $.a == "aValue2" + And assert jsonArray $[0].a == "aValue3" diff --git a/src/test/resources/features/perf-records-scenarios.feature b/src/test/resources/features/perf-records-scenarios.feature new file mode 100644 index 0000000..f63a242 --- /dev/null +++ b/src/test/resources/features/perf-records-scenarios.feature @@ -0,0 +1,75 @@ +Feature: consumer-file-key-value + + Background: + Given input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_in | string | string | + + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_out | string | string | 5 | + + Scenario Outline: Produce several records + Given var uuid = call function : uuid + And var suffix = + + When records from file with key and value are sent + | topic_alias | separator | file | + | topic_in | # | features/records/perf-keyvalue.dat | + + Then expected records + | topic_alias | key | value | + | topic_out | key1_${uuid} | aliasValue10 | + | topic_out | key1_${uuid} | aliasValue11 | + | topic_out | key1_${uuid} | aliasValue12 | + | topic_out | key1_${uuid} | aliasValue13 | + | topic_out | key1_${uuid} | aliasValue14 | + | topic_out | key1_${uuid} | aliasValue15 | + | topic_out | key1_${uuid} | aliasValue16 | + | topic_out | key1_${uuid} | aliasValue17 | + | topic_out | key1_${uuid} | aliasValue18 | + | topic_out | key1_${uuid} | aliasValue19 | + | topic_out | key2_${uuid} | aliasValue20 | + | topic_out | key2_${uuid} | aliasValue21 | + | topic_out | key2_${uuid} | aliasValue22 | + | topic_out | key2_${uuid} | aliasValue23 | + | topic_out | key2_${uuid} | aliasValue24 | + | topic_out | key2_${uuid} | aliasValue25 | + | topic_out | key2_${uuid} | aliasValue26 | + | topic_out | key2_${uuid} | aliasValue27 | + | topic_out | key2_${uuid} | aliasValue28 | + | topic_out | key2_${uuid} | aliasValue29 | + And assert aliasValue10 $ == "value10_${suffix}" + And assert aliasValue11 $ == "value11_${suffix}" + And assert aliasValue12 $ == "value12_${suffix}" + And assert aliasValue13 $ == "value13_${suffix}" + And assert aliasValue14 $ == "value14_${suffix}" + And assert aliasValue15 $ == "value15_${suffix}" + And assert aliasValue16 $ == "value16_${suffix}" + And assert aliasValue17 $ == "value17_${suffix}" + And assert aliasValue18 $ == "value18_${suffix}" + And assert aliasValue19 $ == "value19_${suffix}" + And assert aliasValue20 $ == "value20_${suffix}" + And assert aliasValue21 $ == "value21_${suffix}" + And assert aliasValue22 $ == "value22_${suffix}" + And assert aliasValue23 $ == "value23_${suffix}" + And assert aliasValue24 $ == "value24_${suffix}" + And assert aliasValue25 $ == "value25_${suffix}" + And assert aliasValue26 $ == "value26_${suffix}" + And assert aliasValue27 $ == "value27_${suffix}" + And assert aliasValue28 $ == "value28_${suffix}" + And assert aliasValue29 $ == "value29_${suffix}" + + Examples: + | scenario | + | 0 | + | 1 | + | 2 | + | 3 | + | 4 | + | 5 | + | 6 | + | 7 | + | 8 | + | 9 | + diff --git a/src/test/resources/features/producer-avro-file-key-value.feature b/src/test/resources/features/producer-avro-file-key-value.feature new file mode 100644 index 0000000..78da982 --- /dev/null +++ b/src/test/resources/features/producer-avro-file-key-value.feature @@ -0,0 +1,29 @@ +Feature: producer-avro-file-key-value + + Background: + Given subject + | name | alias | format | + | kapoeira.avrovaluev1 | kapoAlias | avro | + Given input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.avrovalue.tracking.raw | topic_in | string | kapoAlias | + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.avrovalue.tracking.raw | topic_out | string | kapoAlias | 10 | + And var uuid = call function: uuid + + Scenario: Produce a record + When records from file with key and value are sent + | topic_alias | separator | file | + | topic_in | # | features/records/avrokeyvalue.dat | + Then expected records + | topic_alias | key | value | + | topic_out | key1_${uuid} | aliasValue1 | + | topic_out | key2_${uuid} | aliasValue2 | + And assert aliasValue1 $.anInt == 1 + And assert aliasValue1 $.aString == "myString1" + And assert aliasValue1 $.anOptionalString == "test" + And assert aliasValue2 $.anInt == 2 + And assert aliasValue2 $.aString == "myString2" + And assert aliasValue2 $.anOptionalString == null + diff --git a/src/test/resources/features/producer-file-formatted-value.feature b/src/test/resources/features/producer-file-formatted-value.feature new file mode 100644 index 0000000..bb716f1 --- /dev/null +++ b/src/test/resources/features/producer-file-formatted-value.feature @@ -0,0 +1,18 @@ +Feature: producer-file-formatted-value + + Background: + Given input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_in | string | string | + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_out | string | string | 5 | + And var uuid = call function: uuid + + Scenario: Produce a record + When records from file with formatted value are sent + | topic_alias | key | file | + | topic_in | keyY_${uuid} | features/records/formattedvalue.json | + Then expected records + | topic_alias | key | value | + | topic_out | keyY_${uuid} | ? | diff --git a/src/test/resources/features/producer-file-key-value.feature b/src/test/resources/features/producer-file-key-value.feature new file mode 100644 index 0000000..0d4b7b8 --- /dev/null +++ b/src/test/resources/features/producer-file-key-value.feature @@ -0,0 +1,28 @@ +Feature: producer-file-key-value + + Background: + Given input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_in | string | string | + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_out | string | string | 5 | + And var uuid = call function: uuid + + + Scenario: Produce a record with headers + When records from file with key and value are sent + | topic_alias | separator | file | + | topic_in | # | features/records/keyheadersvalue.dat | + Then expected records + | topic_alias | key | headers | value | + | topic_out | key1_${uuid} | aliasHeaders2.1 | value2.1 | + | topic_out | key2_${uuid} | aliasHeaders2.2 | value2.2 | + | topic_out | key3_${uuid} | aliasHeaders2.3 | value2.3 | + And assert value2.1 $.qux == 42 + And assert value2.2 $ has size 2 + And assert value2.2 $ == [3,4] + And assert value2.3 $ == "value2.3" + And assert aliasHeaders2.1 $ == {"foo":"bar","baz":"42"} + And assert aliasHeaders2.1 $.foo == "bar" + diff --git a/src/test/resources/features/producer-file-value.feature b/src/test/resources/features/producer-file-value.feature new file mode 100644 index 0000000..c21f992 --- /dev/null +++ b/src/test/resources/features/producer-file-value.feature @@ -0,0 +1,19 @@ +Feature: producer-file-value + + Background: + Given input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_in | string | string | + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_out | string | string | 5 | + And var uuid = call function: uuid + + Scenario: Produce a record + When records from file with value are sent + | topic_alias | key | file | + | topic_in | keyX_${uuid} | features/records/value.dat | + Then expected records + | topic_alias | key | value | + | topic_out | keyX_${uuid} | valueA | + diff --git a/src/test/resources/features/producer-key-value.feature b/src/test/resources/features/producer-key-value.feature new file mode 100644 index 0000000..622a6ff --- /dev/null +++ b/src/test/resources/features/producer-key-value.feature @@ -0,0 +1,18 @@ +Feature: producer-key-value + + Background: + Given input topic + | topic | alias | key_type | value_type | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_in | string | string | + And output topic + | topic | alias | key_type | value_type | readTimeoutInSecond | + | private.euw.kapoeira-dsl-it.stringvalue.tracking.raw | topic_out | string | string | 5 | + And var uuid = call function: uuid + + Scenario: Produce a record + When records with key and value are sent + | topic_alias | key | value | + | topic_in | aTestKey_${uuid} | aValue | + Then expected records + | topic_alias | key | value | + | topic_out | aTestKey_${uuid} | aValue | diff --git a/src/test/resources/features/records/avrokeyvalue.dat b/src/test/resources/features/records/avrokeyvalue.dat new file mode 100644 index 0000000..7d5d548 --- /dev/null +++ b/src/test/resources/features/records/avrokeyvalue.dat @@ -0,0 +1,2 @@ +key1_${uuid}#{"anInt": 1, "aString": "myString1", "anOptionalString": { "string": "test"} } +key2_${uuid}#{"anInt": 2, "aString": "myString2", "anOptionalString": null} } \ No newline at end of file diff --git a/src/test/resources/features/records/batch1.1.dat b/src/test/resources/features/records/batch1.1.dat new file mode 100644 index 0000000..0668216 --- /dev/null +++ b/src/test/resources/features/records/batch1.1.dat @@ -0,0 +1 @@ +samekey_${uuid}#{"foo":"1_${uuid}"} \ No newline at end of file diff --git a/src/test/resources/features/records/batch1.2.dat b/src/test/resources/features/records/batch1.2.dat new file mode 100644 index 0000000..64bde89 --- /dev/null +++ b/src/test/resources/features/records/batch1.2.dat @@ -0,0 +1 @@ +samekey_${uuid}#{"foo":"2_${uuid}"} \ No newline at end of file diff --git a/src/test/resources/features/records/batch2.1.dat b/src/test/resources/features/records/batch2.1.dat new file mode 100644 index 0000000..a83cba5 --- /dev/null +++ b/src/test/resources/features/records/batch2.1.dat @@ -0,0 +1 @@ +samekey_${uuid}#{"anInt": 3, "aString": "${uuid}", "anOptionalString": null } \ No newline at end of file diff --git a/src/test/resources/features/records/batch2.2.dat b/src/test/resources/features/records/batch2.2.dat new file mode 100644 index 0000000..03c23c3 --- /dev/null +++ b/src/test/resources/features/records/batch2.2.dat @@ -0,0 +1 @@ +samekey_${uuid}#{"anInt": 4, "aString": "${uuid}", "anOptionalString": null } diff --git a/src/test/resources/features/records/formattedvalue.json b/src/test/resources/features/records/formattedvalue.json new file mode 100644 index 0000000..311632e --- /dev/null +++ b/src/test/resources/features/records/formattedvalue.json @@ -0,0 +1,4 @@ +{ + "field1": "value1", + "field2": 2 +} \ No newline at end of file diff --git a/src/test/resources/features/records/keyheadersvalue.dat b/src/test/resources/features/records/keyheadersvalue.dat new file mode 100644 index 0000000..ec2fdc5 --- /dev/null +++ b/src/test/resources/features/records/keyheadersvalue.dat @@ -0,0 +1,3 @@ +key1_${uuid}#{"qux":42}#{"foo":"bar","baz":42} +key2_${uuid}#[3,4]#{"foo":"bar","baz":[1,2]} +key3_${uuid}#value2.3#{"foo":"bar"} diff --git a/src/test/resources/features/records/keyvalue.dat b/src/test/resources/features/records/keyvalue.dat new file mode 100644 index 0000000..3e236f3 --- /dev/null +++ b/src/test/resources/features/records/keyvalue.dat @@ -0,0 +1,3 @@ +key1_${uuid}#value1.1 +key1_${uuid}#value1.2 +key2_${uuid}#value2 \ No newline at end of file diff --git a/src/test/resources/features/records/keyvalueobject.dat b/src/test/resources/features/records/keyvalueobject.dat new file mode 100644 index 0000000..bd49191 --- /dev/null +++ b/src/test/resources/features/records/keyvalueobject.dat @@ -0,0 +1,2 @@ +key1_${uuid}#{"foo":"fooString","bar":{"optionalVal":42}} +key2_${uuid}#{"foo":"fooString","bar":{}} \ No newline at end of file diff --git a/src/test/resources/features/records/keyvalueobjectNarrays.dat b/src/test/resources/features/records/keyvalueobjectNarrays.dat new file mode 100644 index 0000000..df09050 --- /dev/null +++ b/src/test/resources/features/records/keyvalueobjectNarrays.dat @@ -0,0 +1,2 @@ +key1_${uuid}#{"foos":["item1","item2","item3"],"bar":{"baz":["item1","item2","item3"]}} +key2_${uuid}#{"qux":[{"key1":"toto"},{"key2":"titi"}]} \ No newline at end of file diff --git a/src/test/resources/features/records/perf-keyvalue.dat b/src/test/resources/features/records/perf-keyvalue.dat new file mode 100644 index 0000000..f28e027 --- /dev/null +++ b/src/test/resources/features/records/perf-keyvalue.dat @@ -0,0 +1,20 @@ +key1_${uuid}#value10_${suffix} +key1_${uuid}#value11_${suffix} +key1_${uuid}#value12_${suffix} +key1_${uuid}#value13_${suffix} +key1_${uuid}#value14_${suffix} +key1_${uuid}#value15_${suffix} +key1_${uuid}#value16_${suffix} +key1_${uuid}#value17_${suffix} +key1_${uuid}#value18_${suffix} +key1_${uuid}#value19_${suffix} +key2_${uuid}#value20_${suffix} +key2_${uuid}#value21_${suffix} +key2_${uuid}#value22_${suffix} +key2_${uuid}#value23_${suffix} +key2_${uuid}#value24_${suffix} +key2_${uuid}#value25_${suffix} +key2_${uuid}#value26_${suffix} +key2_${uuid}#value27_${suffix} +key2_${uuid}#value28_${suffix} +key2_${uuid}#value29_${suffix} \ No newline at end of file diff --git a/src/test/resources/features/records/value.dat b/src/test/resources/features/records/value.dat new file mode 100644 index 0000000..35c10e1 --- /dev/null +++ b/src/test/resources/features/records/value.dat @@ -0,0 +1,3 @@ +valueA +valueB +valueC \ No newline at end of file diff --git a/src/test/resources/features/scripts/isUUID.sh b/src/test/resources/features/scripts/isUUID.sh new file mode 100755 index 0000000..ed76c1a --- /dev/null +++ b/src/test/resources/features/scripts/isUUID.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +uuid=$1 + +if [[ $uuid =~ ^\{?[A-F0-9a-f]{8}-[A-F0-9a-f]{4}-[A-F0-9a-f]{4}-[A-F0-9a-f]{4}-[A-F0-9a-f]{12}\}?$ ]]; then + echo "true" +else + echo "false" +fi diff --git a/src/test/resources/features/scripts/runExternalTool.sh b/src/test/resources/features/scripts/runExternalTool.sh new file mode 100755 index 0000000..be7f2ef --- /dev/null +++ b/src/test/resources/features/scripts/runExternalTool.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +docker run --rm busybox echo "Hello World $1" diff --git a/src/test/resources/json/kapoeira.jsonvaluev1.json b/src/test/resources/json/kapoeira.jsonvaluev1.json new file mode 100644 index 0000000..0f1e03f --- /dev/null +++ b/src/test/resources/json/kapoeira.jsonvaluev1.json @@ -0,0 +1,4 @@ +{ + "schemaType": "JSON", + "schema": "{ \"$schema\": \"http://json-schema.org/draft-04/schema#\", \"title\": \"com.lectra.kapoeira.jsonvaluev1\", \"description\": \"jsonvaluev1\", \"type\": \"object\", \"properties\": { \"anInt\": { \"type\": \"integer\" }, \"aString\": { \"type\": \"string\" }, \"anOptionalString\": {\"type\":[\"string\",\"null\"]} } }" +} diff --git a/src/test/scala/com/lectra/kapoeira/FeaturesTestRunner.scala b/src/test/scala/com/lectra/kapoeira/FeaturesTestRunner.scala new file mode 100644 index 0000000..a3fdce7 --- /dev/null +++ b/src/test/scala/com/lectra/kapoeira/FeaturesTestRunner.scala @@ -0,0 +1,16 @@ +package com.lectra.kapoeira + +import io.cucumber.junit.{Cucumber, CucumberOptions} +import org.junit.runner.RunWith + +@RunWith(classOf[Cucumber]) +@CucumberOptions( + features = Array("classpath:features"), + glue = Array("classpath:com.lectra.kapoeira.glue"), + plugin = Array( + "pretty", + "json:target/reports/kapoeira-report.json", + "junit:target/reports/kapoeira-report.xml", + "html:target/reports/kapoeira-report.html") +) +class FeaturesTestRunner {} diff --git a/src/test/scala/com/lectra/kapoeira/domain/AssertionContextTest.scala b/src/test/scala/com/lectra/kapoeira/domain/AssertionContextTest.scala new file mode 100644 index 0000000..6ae1cc3 --- /dev/null +++ b/src/test/scala/com/lectra/kapoeira/domain/AssertionContextTest.scala @@ -0,0 +1,254 @@ +package com.lectra.kapoeira.domain + +import com.lectra.kapoeira.domain.Services.RecordConsumer +import com.lectra.kapoeira.kafka.KapoeiraProducer +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.scalamock.scalatest.MockFactory +import org.scalatest.GivenWhenThen +import org.scalatest.featurespec.AnyFeatureSpec +import org.scalatest.matchers.should.Matchers + +import java.nio.charset.StandardCharsets + +class AssertionContextTest + extends AnyFeatureSpec + with Matchers + with GivenWhenThen + with MockFactory { + + implicit val recordConsume: RecordConsumer = (_, _) => Map.empty + + Feature("init") { + Scenario("IAE because of bad background") { + Given("context") + val backgroundContext = new BackgroundContext + val assertionContext = new AssertionContext(WhenStepsLive(backgroundContext,recordConsume,KapoeiraProducer.run _)) + + When("init AssertionContext") + Then("IAE") + assertThrows[IllegalArgumentException] { + assertionContext.launchConsumption( + List(KeyValueWithAliasesRecord("topic", "key", "valueAlias")) + ) + } + } + + Scenario("minimum data") { + Given("minimal background") + val backgroundContext = mock[BackgroundContext] + val assertionContext = new AssertionContext(WhenStepsLive(backgroundContext,recordConsume,KapoeiraProducer.run _)) + val consumerRecord = new ConsumerRecord( + "topic", + 0, + 0, + "key", + "value".getBytes.asInstanceOf[Any] + ) + val keyValueRecord = + KeyValueWithAliasesRecord("topic", "key", "valueAlias") + (backgroundContext + .consumeTopic(_: String, _: Map[String, Int])(_: RecordConsumer)) + .expects(*, *, *) + .returning(Map("key" -> Seq(consumerRecord))) + val expectedConsumedRecords = List(keyValueRecord) + + When("init AssertionContext") + assertionContext.launchConsumption( + expectedConsumedRecords + ) + + Then("assertionContext maps") + assertionContext.expectedRecordByValueAlias shouldBe Map( + "valueAlias" -> keyValueRecord + ) + assertionContext.expectedRecordsByTopicByKey shouldBe Map( + "topic" -> Map("key" -> Seq(keyValueRecord)) + ) + assertionContext.consumedRecordsByTopicByKey shouldBe + Map( + "topic" -> Map("key" -> Seq(consumerRecord)) + ) + } + + Scenario("two topics, 1 key per topic") { + Given("background with 2 topics") + val backgroundContext = mock[BackgroundContext] + val assertionContext = new AssertionContext(WhenStepsLive(backgroundContext,recordConsume,KapoeiraProducer.run _)) + val recordWithHeaders = new ConsumerRecord( + "topic1", + 0, + 0, + "key1", + "value1.1".getBytes.asInstanceOf[Any] + ) + recordWithHeaders + .headers() + .add("foo", """"bar"""".getBytes(StandardCharsets.UTF_8)) + val consumerRecords1 = Seq( + recordWithHeaders, + new ConsumerRecord( + "topic1", + 0, + 1, + "key1", + "value1.2".getBytes.asInstanceOf[Any] + ) + ) + val consumerRecords2 = Seq( + new ConsumerRecord( + "topic2", + 0, + 0, + "key2", + "value2".getBytes.asInstanceOf[Any] + ) + ) + (backgroundContext + .consumeTopic(_: String, _: Map[String, Int])(_: RecordConsumer)) + .expects("topic1", *, *) + .returning(Map("key1" -> consumerRecords1)) + (backgroundContext + .consumeTopic(_: String, _: Map[String, Int])(_: RecordConsumer)) + .expects("topic2", *, *) + .returning(Map("key2" -> consumerRecords2)) + val expectedConsumedRecords = List( + KeyValueWithAliasesRecord( + "topic1", + "key1", + "alias_value1.1", + Some("aliasHeaders1.1") + ), + KeyValueWithAliasesRecord( + "topic2", + "key2", + "alias_value2", + Some("aliasHeaders2") + ), + KeyValueWithAliasesRecord( + "topic1", + "key1", + "alias_value1.2", + Some("aliasHeaders1.2") + ) + ) + + When("init AssertionContext") + assertionContext.launchConsumption( + expectedConsumedRecords + ) + + Then("assertionContext maps") + assertionContext.expectedRecordByValueAlias shouldBe Map( + "alias_value1.1" -> expectedConsumedRecords.head, + "alias_value2" -> expectedConsumedRecords(1), + "alias_value1.2" -> expectedConsumedRecords(2) + ) + assertionContext.expectedRecordByHeadersAlias shouldBe Map( + "aliasHeaders1.1" -> expectedConsumedRecords.head, + "aliasHeaders2" -> expectedConsumedRecords(1), + "aliasHeaders1.2" -> expectedConsumedRecords(2) + ) + assertionContext.expectedRecordsByTopicByKey shouldBe Map( + "" + + "topic1" -> Map( + "key1" -> Seq( + expectedConsumedRecords.head, + expectedConsumedRecords(2) + ) + ), + "topic2" -> Map("key2" -> Seq(expectedConsumedRecords(1))) + ) + assertionContext.consumedRecordsByTopicByKey shouldBe Map( + "topic1" -> Map("key1" -> consumerRecords1), + "topic2" -> Map("key2" -> consumerRecords2) + ) + + And("extracting consumed record by headers alias") + assertionContext.extractConsumedRecordWithAlias( + "alias_value1.1" + ) shouldBe Some( + AssertionContext.RecordValue(consumerRecords1.head.value()) + ) + val Some(AssertionContext.HeadersValue(headers)) = + assertionContext.extractConsumedRecordWithAlias( + "aliasHeaders1.1" + ) + headers.map { case (k, v) => (k, new String(v)) } shouldBe Map( + "foo" -> """"bar"""" + ) + } + + Scenario("1 topic, 2 keys") { + Given("background with 1 topic") + val backgroundContext = mock[BackgroundContext] + val assertionContext = new AssertionContext(WhenStepsLive(backgroundContext,recordConsume,KapoeiraProducer.run _)) + val consumerRecordsKey1 = Seq( + new ConsumerRecord( + "topic1", + 0, + 0, + "key1", + "value1.1".getBytes.asInstanceOf[Any] + ), + new ConsumerRecord( + "topic1", + 0, + 1, + "key1", + "value1.2".getBytes.asInstanceOf[Any] + ) + ) + val consumerRecordsKey2 = Seq( + new ConsumerRecord( + "topic2", + 1, + 0, + "key2", + "value2".getBytes.asInstanceOf[Any] + ) + ) + (backgroundContext + .consumeTopic(_: String, _: Map[String, Int])(_: RecordConsumer)) + .expects("topic1", *, *) + .returning( + Map( + "key1" -> consumerRecordsKey1, + "key2" -> consumerRecordsKey2 + ) + ) + val expectedConsumedRecords = List( + KeyValueWithAliasesRecord("topic1", "key1", "alias_value1.1"), + KeyValueWithAliasesRecord("topic1", "key2", "alias_value2"), + KeyValueWithAliasesRecord("topic1", "key1", "alias_value1.2") + ) + + When("init AssertionContext") + assertionContext.launchConsumption( + expectedConsumedRecords + ) + + Then("assertionContext maps") + assertionContext.expectedRecordByValueAlias shouldBe Map( + "alias_value1.1" -> expectedConsumedRecords.head, + "alias_value2" -> expectedConsumedRecords(1), + "alias_value1.2" -> expectedConsumedRecords(2) + ) + assertionContext.expectedRecordsByTopicByKey shouldBe Map( + "topic1" -> Map( + "key1" -> Seq( + expectedConsumedRecords.head, + expectedConsumedRecords(2) + ), + "key2" -> Seq(expectedConsumedRecords(1)) + ) + ) + assertionContext.consumedRecordsByTopicByKey shouldBe Map( + "topic1" -> Map( + "key1" -> consumerRecordsKey1, + "key2" -> consumerRecordsKey2 + ) + ) + } + } + +} diff --git a/src/test/scala/com/lectra/kapoeira/domain/BackgroundContextTest.scala b/src/test/scala/com/lectra/kapoeira/domain/BackgroundContextTest.scala new file mode 100644 index 0000000..786ea2a --- /dev/null +++ b/src/test/scala/com/lectra/kapoeira/domain/BackgroundContextTest.scala @@ -0,0 +1,26 @@ +package com.lectra.kapoeira.domain + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +class BackgroundContextTest extends AnyFlatSpec with Matchers { + "substitute variables" should "replace in a string all by its value" in { + val template = "some ${foo} with ${bar}" + val context = new BackgroundContext + context.addVariable("foo","a value") + context.addVariable("bar","42") + val actual = context.substituteVariablesIn(template) + + actual shouldEqual "some a value with 42" + } + + it should "not replace in a string not corresponding key" in { + val template = "some ${} with ${x} finally" + val context = new BackgroundContext + context.addVariable("foo","a value") + context.addVariable("bar","42") + val actual = context.substituteVariablesIn(template) + + actual shouldEqual template + } +} diff --git a/src/test/scala/com/lectra/kapoeira/domain/MergeMapsSpec.scala b/src/test/scala/com/lectra/kapoeira/domain/MergeMapsSpec.scala new file mode 100644 index 0000000..12de7f8 --- /dev/null +++ b/src/test/scala/com/lectra/kapoeira/domain/MergeMapsSpec.scala @@ -0,0 +1,75 @@ +package com.lectra.kapoeira.domain + +import com.lectra.kapoeira.domain.MergeMaps._ +import zio.test.Assertion.{equalTo, hasSameElements} +import zio.test.{Assertion, DefaultRunnableSpec, Gen, assert, check} + +object MergeMapsSpec extends DefaultRunnableSpec { + val spec = suite("merge Maps with Associative")( + suite("sequences")( + testM("Merge sequences") { + check( + Gen.listOfBounded(1, 10)(Gen.anyInt), + Gen.listOfBounded(1, 10)(Gen.anyInt) + ) { case (xs, ys) => + assert(xs.merge(ys))(Assertion.hasSameElements(xs ++ ys)) + } + }, + testM("associativity") { + check( + Gen.listOfBounded(1, 10)(Gen.anyInt), + Gen.listOfBounded(1, 10)(Gen.anyInt), + Gen.listOfBounded(1, 10)(Gen.anyInt) + ) { case (xs, ys, zs) => + //(xs + (ys + zs)) === ((xs + ys) + zs) + assert(xs.merge(ys.merge(zs)))(equalTo((xs.merge(ys)).merge(zs))) + } + } + ), + suite("maps")( + testM("Merge maps of sequences") { + check( + Gen.mapOfBounded(1, 10)( + Gen.elements("key1", "key2"), + Gen.listOfBounded(1, 10)(Gen.anyInt) + ), + Gen.mapOfBounded(1, 10)( + Gen.elements("key2", "key3"), + Gen.listOfBounded(1, 10)(Gen.anyInt) + ) + ) { case (xs, ys) => + assert(xs.merge(ys))( + hasSameElements( + (xs.get("key1").map(l => "key1" -> l).toList ++ + ys.get("key3").map(l => "key3" -> l).toList ++ + (xs + .getOrElse("key2", List.empty) + .merge(ys.getOrElse("key2", List.empty)) match { + case Nil => Option.empty + case ls => Some("key2" -> ls) + }).toList).toMap + ) + ) + } + }, + testM("associativity") { + check( + Gen.mapOfBounded(1, 10)( + Gen.elements("key1", "key2", "key3"), + Gen.listOfBounded(1, 10)(Gen.anyInt) + ), + Gen.mapOfBounded(1, 10)( + Gen.elements("key1", "key2", "key3"), + Gen.listOfBounded(1, 10)(Gen.anyInt) + ), + Gen.mapOfBounded(1, 10)( + Gen.elements("key1", "key2", "key3"), + Gen.listOfBounded(1, 10)(Gen.anyInt) + ) + ) { case (xs, ys, zs) => + assert(xs.merge(ys.merge(zs)))(equalTo((xs.merge(ys)).merge(zs))) + } + } + ) + ) +} diff --git a/src/test/scala/com/lectra/kapoeira/domain/WhenStepsSpec.scala b/src/test/scala/com/lectra/kapoeira/domain/WhenStepsSpec.scala new file mode 100644 index 0000000..669ff62 --- /dev/null +++ b/src/test/scala/com/lectra/kapoeira/domain/WhenStepsSpec.scala @@ -0,0 +1,190 @@ +package com.lectra.kapoeira.domain + +import com.lectra.kapoeira.domain.Services.{RecordConsumer, RecordProducer} +import org.apache.kafka.clients.consumer.ConsumerRecord +import zio.ZIO +import zio.test.Assertion._ +import zio.test.{DefaultRunnableSpec, _} + +object WhenStepsSpec extends DefaultRunnableSpec { + + val aTopic = "aTopic" + val aTopic2 = "aTopic2" + val aTopicAlias = "aTopicAlias" + val aKey = "aKey" + + val spec = suite("Handle run of when steps")( + suite("sending record, and consume")( + testM("one record") { + //prepare + val aValue = "aValue" + + val backgroundContext: BackgroundContext = buildBackgroundContext + val kafkaStubb = new KafkaStubb + val whenStepsService = + WhenStepsLive( + backgroundContext, + kafkaStubb.consumer(), + kafkaStubb.producer() + ) + val steps = whenStepsService.registerWhen( + WhenStep.empty, + List( + 0 -> List( + RecordRead(aTopicAlias, aKey, aValue.getBytes, Map.empty) + ) + ) + ) + + //run + for { + res <- whenStepsService + .run( + steps, + List(KeyValueWithAliasesRecord(aTopicAlias, aKey, "valueAlias")) + ) + .map(_.map { case (t, vs) => + (t, vs.map { case (k, v) => (k, v.map(_.value())) }) + }) + } yield ( + //assert + assert(res(aTopicAlias)(aKey))(hasSameElements(Seq(aValue))) + ) + }, + testM("one batch of many records, received in order") { + //prepare + val backgroundContext: BackgroundContext = buildBackgroundContext + val kafkaStubb = new KafkaStubb + val whenStepsService = + WhenStepsLive( + backgroundContext, + kafkaStubb.consumer(), + kafkaStubb.producer() + ) + val steps = whenStepsService.registerWhen( + WhenStep.empty, + List( + 0 -> List( + RecordRead(aTopicAlias, aKey, "aValue1".getBytes, Map.empty), + RecordRead(aTopicAlias, aKey, "aValue2".getBytes, Map.empty), + RecordRead(aTopicAlias, aKey, "aValue3".getBytes, Map.empty), + RecordRead(aTopicAlias, aKey, "aValue4".getBytes, Map.empty) + ) + ) + ) + + //run + for { + res <- whenStepsService + .run( + steps, + List( + KeyValueWithAliasesRecord(aTopicAlias, aKey, "valueAlias1"), + KeyValueWithAliasesRecord(aTopicAlias, aKey, "valueAlias2"), + KeyValueWithAliasesRecord(aTopicAlias, aKey, "valueAlias3"), + KeyValueWithAliasesRecord(aTopicAlias, aKey, "valueAlias4") + ) + ) + .map(_.map { case (t, vs) => + (t, vs.map { case (k, v) => (k, v.map(_.value())) }) + }) + } yield ( + //assert + assert(res(aTopicAlias)(aKey))( + hasSameElements(Seq("aValue1", "aValue2", "aValue3", "aValue4")) + ) + ) + }, + testM("two batches of many records, received in order") { + //prepare + val backgroundContext: BackgroundContext = buildBackgroundContext + val kafkaStubb = new KafkaStubb + val whenStepsService = + WhenStepsLive( + backgroundContext, + kafkaStubb.consumer(), + kafkaStubb.producer() + ) + val steps = whenStepsService.registerWhen( + WhenStep.empty, + List( + 1 -> List( + RecordRead(aTopicAlias, aKey, "aValue1".getBytes, Map.empty), + RecordRead(aTopicAlias, aKey, "aValue2".getBytes, Map.empty) + ), + 2 -> List( + RecordRead(aTopicAlias, aKey, "aValue3".getBytes, Map.empty), + RecordRead(aTopicAlias, aKey, "aValue4".getBytes, Map.empty) + ) + ) + ) + + //run + for { + res <- whenStepsService + .run( + steps, + List( + KeyValueWithAliasesRecord(aTopicAlias, aKey, "valueAlias1", None, 1), + KeyValueWithAliasesRecord(aTopicAlias, aKey, "valueAlias2", None, 1), + KeyValueWithAliasesRecord(aTopicAlias, aKey, "valueAlias3", None, 2), + KeyValueWithAliasesRecord(aTopicAlias, aKey, "valueAlias4", None, 2) + ) + ) + .map(_.map { case (t, vs) => + (t, vs.map { case (k, v) => (k, v.map(_.value())) }) + }) + } yield ( + //assert + assert(res.get(aTopicAlias))(isSome) && + assert(res(aTopicAlias).get(aKey))( + isSome( + hasSameElements(Seq("aValue1", "aValue2", "aValue3", "aValue4")) + ) + ) + ) + } + ) + ) + + final class KafkaStubb { + var records: Map[String, Seq[ConsumerRecord[String, Any]]] = Map.empty + + def consumer(): RecordConsumer = (_, _) => { + val res = collection.immutable.Map.from(records) + records = Map.empty //simulate an offset commit + res + } + + def producer(): RecordProducer = (record, topicConfig, _, _) => + ZIO.effectTotal { + records = records.updated( + record.key, + records + .getOrElse(record.key, Seq.empty) :+ new ConsumerRecord[String, Any]( + topicConfig.topicName, + 0, + 0, + record.key, + new String(record.value) + ) + ) + } + } + + private def buildBackgroundContext = { + val backgroundContext = new BackgroundContext + backgroundContext.addInput( + InputTopicConfig(aTopic, aTopicAlias, "string", "string") + ) + backgroundContext.addOutput( + OutputTopicConfig(aTopic, aTopicAlias, "string", "string") + )( + ( + outputTopicConfig: OutputTopicConfig, + subjectConfigs: Map[String, SubjectConfig] + ) => OutputConfigStringString(outputTopicConfig, null) + ) + backgroundContext + } +} diff --git a/src/test/scala/com/lectra/kapoeira/glue/AssertsTest.scala b/src/test/scala/com/lectra/kapoeira/glue/AssertsTest.scala new file mode 100644 index 0000000..4ddff34 --- /dev/null +++ b/src/test/scala/com/lectra/kapoeira/glue/AssertsTest.scala @@ -0,0 +1,124 @@ +package com.lectra.kapoeira.glue + +import com.lectra.kapoeira.domain.Services.RecordConsumer +import com.lectra.kapoeira.domain.{AssertionContext, BackgroundContext, KeyValueRecord, KeyValueWithAliasesRecord, WhenStepsLive} +import com.lectra.kapoeira.kafka.KapoeiraProducer +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.scalamock.scalatest.MockFactory +import org.scalatest.exceptions.TestFailedException +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +class AssertsTest extends AnyFlatSpec with Matchers with MockFactory { + + val recordConsume: RecordConsumer = (_, _) => Map.empty + + behavior of "Asserts" + + it should "assert equality on literals" in { + val backgroundContext = mock[BackgroundContext] + val assertionContext = new AssertionContext(WhenStepsLive(backgroundContext, recordConsume, KapoeiraProducer.run _)) + val consumerRecord = + new ConsumerRecord("topic", 0, 0, "key", """{"foo":"bar"}""".getBytes.asInstanceOf[Any]) + val valueAlias = "valueAlias" + val keyValueRecord = KeyValueWithAliasesRecord("topic", "key", valueAlias) + (backgroundContext + .consumeTopic(_: String, _: Map[String, Int])(_: RecordConsumer)) + .expects(*, *, *) + .returning(Map("key" -> Seq(consumerRecord))) + val expectedConsumedRecords = List(keyValueRecord) + assertionContext.launchConsumption(expectedConsumedRecords) + + Asserts.equal(assertionContext, valueAlias, "$.foo", "\"bar\"") + } + + it should "assert matching objects" in { + val backgroundContext = mock[BackgroundContext] + val assertionContext = new AssertionContext(WhenStepsLive(backgroundContext, recordConsume, KapoeiraProducer.run _)) + val consumerRecord = new ConsumerRecord( + "topic", + 0, + 0, + "key", + """{"foo":"bar","baz":{"qux":42,"quux":"corge"}}""".getBytes.asInstanceOf[Any] + ) + val valueAlias = "valueAlias" + val keyValueRecord = KeyValueWithAliasesRecord("topic", "key", valueAlias) + (backgroundContext + .consumeTopic(_: String, _: Map[String, Int])(_: RecordConsumer)) + .expects(*, *, *) + .returning(Map("key" -> Seq(consumerRecord))) + val expectedConsumedRecords = List(keyValueRecord) + assertionContext.launchConsumption(expectedConsumedRecords) + + Asserts.matchObject( + assertionContext, + valueAlias, + "$", + """{"foo":"bar","baz":{"qux":42,"quux":"corge"}}""" + ) + Asserts.matchObject( + assertionContext, + valueAlias, + "$", + """{"foo":"bar"}""" + ) + assertThrows[TestFailedException]( + Asserts.matchObject( + assertionContext, + valueAlias, + "$", + """{"foo":"bar","baz":{"qux":42,"quux":"corge","gralpy":false},"grault":true}""" + ) + ) + } + + it should "assert that two JSON objects are exactly equal" in { + val backgroundContext = mock[BackgroundContext] + val assertionContext = new AssertionContext(WhenStepsLive(backgroundContext, recordConsume, KapoeiraProducer.run _)) + val consumerRecord = new ConsumerRecord( + "topic", + 0, + 0, + "key", + """{"foo":"bar","baz":{"qux":42,"quux":"corge"}}""".getBytes.asInstanceOf[Any] + ) + val valueAlias = "valueAlias" + val keyValueRecord = KeyValueWithAliasesRecord("topic", "key", valueAlias) + (backgroundContext + .consumeTopic(_: String, _: Map[String, Int])(_: RecordConsumer)) + .expects(*, *, *) + .returning(Map("key" -> Seq(consumerRecord))) + val expectedConsumedRecords = List(keyValueRecord) + assertionContext.launchConsumption(expectedConsumedRecords) + + Asserts.matchExactObject( + assertionContext, + valueAlias, + "$", + """{"foo":"bar","baz":{"qux":42,"quux":"corge"}}""" + ) + Asserts.matchExactObject( + assertionContext, + valueAlias, + "$", + """{"baz":{"qux":42,"quux":"corge"},"foo":"bar"}""" + ) + assertThrows[TestFailedException]( + Asserts.matchExactObject( + assertionContext, + valueAlias, + "$", + """{"foo":"bar"}""" + ) + ) + assertThrows[TestFailedException]( + Asserts.matchExactObject( + assertionContext, + valueAlias, + "$", + """{"foo":"bar","baz":{"qux":42,"quux":"corge","gralpy":false},"grault":true}""" + ) + ) + } +} diff --git a/src/test/scala/com/lectra/kapoeira/glue/FunctionManagerTest.scala b/src/test/scala/com/lectra/kapoeira/glue/FunctionManagerTest.scala new file mode 100644 index 0000000..1955fbe --- /dev/null +++ b/src/test/scala/com/lectra/kapoeira/glue/FunctionManagerTest.scala @@ -0,0 +1,78 @@ +package com.lectra.kapoeira.glue + +import com.lectra.kapoeira.domain.BackgroundContext +import com.lectra.kapoeira.domain.functions.{Func, FunctionRepository, DefaultFunctionRepository} +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +class FunctionManagerTest extends AnyFlatSpec with Matchers { + + private val testRepo = new FunctionRepository { + override def functions: Map[String, Func] = Map( + "max" -> { case Array(fst, snd) => + val (intFst, intSnd) = (fst.toInt, snd.toInt) + if(intFst > intSnd) intFst else intSnd + } + ) + } + + it should "parse parameters correctly and resolve variables" in { + implicit val context: BackgroundContext = new BackgroundContext() + val functionManager = new FunctionManager(DefaultFunctionRepository) + context.addVariable("foo", "hello world") + val result = functionManager.resolveVariables("42 coucou ${foo}") + + assert(result.isRight) + result.fold(err => fail(err.toString), variable => assert(variable == List("42", "coucou", "hello world"))) + } + + it should "apply non variable parameters to existing function and successfully updates the context" in { + implicit val context: BackgroundContext = new BackgroundContext() + + val functionDef = "max" + val paramDef = "33 42" + val functionManager = new FunctionManager(testRepo) + val result = functionManager("maxValue", functionDef, paramDef) + result match { + case Right(_) => + context.getVariable("maxValue") + .foreach(value => assert(value == "42")) + case Left(reason) => + fail(reason) + + } + } + + it should "successfully resolve variables and apply function, and updates the context" in { + implicit val context: BackgroundContext = new BackgroundContext() + context.addVariable("foo", "33") + context.addVariable("bar", "43") + + val functionDef = "max" + val paramDef = "${foo} ${bar}" + val functionManager = new FunctionManager(testRepo) + val result = functionManager("maxValue", functionDef, paramDef) + result match { + case Right(_) => + context.getVariable("maxValue") + .foreach(value => assert(value == "43")) + case Left(reason) => + fail(reason) + } + } + + it should "return an error explaining that a params hasn't been resolved" in { + implicit val context: BackgroundContext = new BackgroundContext() + context.addVariable("foo", "33") + val functionDef = "max" + val paramDef = "${foo} ${bar}" + val functionManager = new FunctionManager(testRepo) + val result = functionManager("maxValue", functionDef, paramDef) + result match { + case Right(_) => + fail("result should be left") + case Left(reason) => + succeed + } + } +} diff --git a/src/test/scala/com/lectra/kapoeira/glue/InterpotaleTest.scala b/src/test/scala/com/lectra/kapoeira/glue/InterpotaleTest.scala new file mode 100644 index 0000000..ec1f735 --- /dev/null +++ b/src/test/scala/com/lectra/kapoeira/glue/InterpotaleTest.scala @@ -0,0 +1,67 @@ +package com.lectra.kapoeira.glue + +import com.lectra.kapoeira.domain.{BackgroundContext, RecordRead} +import org.scalatest.flatspec.AnyFlatSpec +import org.scalacheck.Prop.forAll +import org.scalacheck.Properties +import org.scalatest.matchers.should.Matchers + +import java.nio.charset.StandardCharsets + +object InterpotaleSpec extends Properties("String") { + val ctx = new BackgroundContext + + property("identity when context is empty") = forAll { (a: String) => + a.interpolate(ctx) == a + } +} + +class InterpotaleTest extends AnyFlatSpec with Matchers { + behavior of "interpolate" + it should "interpolate variables of a string" in { + val ctx = new BackgroundContext + ctx.addVariable("key1", "foo") + ctx.addVariable("key2", "bar") + val template = """${key1} is a ${key2}""" + + template.interpolate(ctx) shouldBe "foo is a bar" + } + + it should "interpolate variables of a recursive Map structure" in { + val ctx = new BackgroundContext + ctx.addVariable("key1", "foo") + ctx.addVariable("key2", "bar") + + val templateHeaders: Map[String, Any] = Map( + "${key1}" -> "${key2}", + "baz" -> Map( + "${key1}" -> 42, + "qux" -> List("quz", "${key2}") + ) + ) + val recordRead = RecordRead( + "${key1}Topic", + "${key2}Key", + "${key1}Value".getBytes(StandardCharsets.UTF_8), + templateHeaders + ) + + val RecordRead( + topic, + key, + value, + headers + ) = recordRead.interpolate(ctx) + + topic shouldBe "fooTopic" + key shouldBe "barKey" + new String(value) shouldBe "fooValue" + headers shouldBe Map( + "foo" -> "bar", + "baz" -> Map( + "foo" -> 42, + "qux" -> List("quz", "bar") + ) + ) + } +} diff --git a/src/test/scala/com/lectra/kapoeira/glue/JsonNodeOpsTest.scala b/src/test/scala/com/lectra/kapoeira/glue/JsonNodeOpsTest.scala new file mode 100644 index 0000000..b755619 --- /dev/null +++ b/src/test/scala/com/lectra/kapoeira/glue/JsonNodeOpsTest.scala @@ -0,0 +1,24 @@ +package com.lectra.kapoeira.glue + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import Asserts.JsonNodeOps + +class JsonNodeOpsTest extends AnyFlatSpec with Matchers { + behavior of "JsonNodeOps" + it should "convert to Map[String,Any] for any json object" in { + val actualSimpleObject = objectMapper.readTree("""{"aString":"bar","aNumber":0.42,"aBoolean":true,"aNull":null}""") + val expectedSimpleObject = Map("aString" -> "bar", "aNumber" -> 0.42, "aBoolean" -> true ).toSeq + + val actualNestedObject = objectMapper.readTree("""{"foo":{"bar":"baz"}}""") + val expectedNestedObject = Map("foo" -> Map("bar"-> "baz")).toSeq + + val actualObjectWithArrays = objectMapper.readTree("""{"foo":["item1","item2"]}""") + val expectedObjectWithArrays = Map("foo" -> Seq("item1","item2")).toSeq + + new JsonNodeOps(actualSimpleObject).toMap.toSeq.intersect(expectedSimpleObject) shouldEqual expectedSimpleObject + new JsonNodeOps(actualNestedObject).toMap.toSeq.intersect(expectedNestedObject) shouldEqual expectedNestedObject + new JsonNodeOps(actualObjectWithArrays).toMap.toSeq.intersect(expectedObjectWithArrays) shouldEqual expectedObjectWithArrays + + } +} diff --git a/src/test/scala/com/lectra/kapoeira/glue/ReadHeadersTest.scala b/src/test/scala/com/lectra/kapoeira/glue/ReadHeadersTest.scala new file mode 100644 index 0000000..c38d971 --- /dev/null +++ b/src/test/scala/com/lectra/kapoeira/glue/ReadHeadersTest.scala @@ -0,0 +1,25 @@ +package com.lectra.kapoeira.glue + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +class ReadHeadersTest extends AnyFlatSpec with Matchers { + behavior of "read headers from string" + + it should "parse a string to a scala Map[String,Any]" in { + val aJson = + """ + |{"foo":"bar","baz":42,"qux":false,"buzz":{"toto":"titi"}} + |""".stripMargin + + val actual = aJson.readHeaders + actual should contain theSameElementsAs Map( + "foo" -> "bar", + "baz" -> 42, + "qux" -> false, + "buzz" -> Map( + "toto" -> "titi" + ) + ) + } +} diff --git a/src/test/scala/com/lectra/kapoeira/glue/RecordReadTest.scala b/src/test/scala/com/lectra/kapoeira/glue/RecordReadTest.scala new file mode 100644 index 0000000..aa218b7 --- /dev/null +++ b/src/test/scala/com/lectra/kapoeira/glue/RecordReadTest.scala @@ -0,0 +1,10 @@ +package com.lectra.kapoeira.glue + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +class RecordReadTest extends AnyFlatSpec with Matchers{ + behavior of "RecordRead" + + //TODO +} diff --git a/version.txt b/version.txt new file mode 100644 index 0000000..4adba7e --- /dev/null +++ b/version.txt @@ -0,0 +1 @@ +0.0.1-SNAPSHOT \ No newline at end of file