diff --git a/README.md b/README.md index 835149cfc83..100ee5bb9bd 100644 --- a/README.md +++ b/README.md @@ -31,8 +31,8 @@ We believe that only an **open-source** solution to data movement can cover the **long tail of data sources** while empowering data engineers to **customize existing connectors**. Our ultimate vision is to help you move data from any source to any destination. Airbyte already provides [300+ connectors](https://docs.airbyte.com/integrations/) for popular APIs, databases, data warehouses and data lakes. Airbyte connectors can be implemented in any language and take the form of a Docker image that follows the [Airbyte specification](https://docs.airbyte.com/understanding-airbyte/airbyte-protocol/). You can create new connectors very fast with: - - The [low-code Connector Development Kit](https://docs.airbyte.com/connector-development/config-based/low-code-cdk-overview) (CDK) for API connectors ([demo](https://www.youtube.com/watch?v=i7VSL2bDvmw)) - - The [Python CDK](https://docs.airbyte.com/connector-development/cdk-python/) ([tutorial](https://docs.airbyte.com/connector-development/tutorials/cdk-speedrun)) +- The [low-code Connector Development Kit](https://docs.airbyte.com/connector-development/config-based/low-code-cdk-overview) (CDK) for API connectors ([demo](https://www.youtube.com/watch?v=i7VSL2bDvmw)) +- The [Python CDK](https://docs.airbyte.com/connector-development/cdk-python/) ([tutorial](https://docs.airbyte.com/connector-development/tutorials/cdk-speedrun)) Airbyte has a built-in scheduler and uses [Temporal](https://airbyte.com/blog/scale-workflow-orchestration-with-temporal) to orchestrate jobs and ensure reliability at scale. Airbyte leverages [dbt](https://www.youtube.com/watch?v=saXwh6SpeHA) to normalize extracted data and can trigger custom transformations in SQL and dbt. You can also orchestrate Airbyte syncs with [Airflow](https://docs.airbyte.com/operator-guides/using-the-airflow-airbyte-operator), [Prefect](https://docs.airbyte.com/operator-guides/using-prefect-task), [Dagster](https://docs.airbyte.com/operator-guides/using-dagster-integration), or [Kestra](https://docs.airbyte.com/operator-guides/using-kestra-plugin/). @@ -44,25 +44,35 @@ Explore our [demo app](https://demo.airbyte.io/). ### Run Airbyte locally -You can run Airbyte locally with Docker. +You can run Airbyte locally with `abctl`. + +## Setup & launch Airbyte + +- Install `Docker Desktop` \(see [instructions](https://docs.docker.com/desktop/install/mac-install/)\). +- After `Docker Desktop` is installed, you must enable `Kubernetes` \(see [instructions](https://docs.docker.com/desktop/kubernetes/)\). +- Download the latest version of `abctl` from the [releases page](https://github.com/airbytehq/abctl/releases) and run the following command: ```bash -git clone --depth 1 https://github.com/airbytehq/airbyte.git -cd airbyte -./run-ab-platform.sh +abctl local install ``` -Login to the web app at [http://localhost:8000](http://localhost:8000) by entering the default credentials found in your .env file. +- Your browser should open to the Airbyte Application, if it does not visit [http://localhost](http://localhost) +- You will be asked for a username and password. By default, that's username `airbyte` and password `password`. You can set these values through command line flags or environment variables. For example, to set the username and password to `foo` and `bar` respectively, you can run the following command: -``` -BASIC_AUTH_USERNAME=airbyte -BASIC_AUTH_PASSWORD=password +```bash +abctl local install --username foo --password bar + +# Or as Environment Variables +ABCTL_LOCAL_INSTALL_PASSWORD=foo +ABCTL_LOCAL_INSTALL_USERNAME=bar ``` Follow web app UI instructions to set up a source, destination and connection to replicate data. Connections support the most popular sync modes: full refresh, incremental and change data capture for databases. Read the [Airbyte docs](https://docs.airbyte.com). +The previous Docker Compose instructions are [here](https://docs.airbyte.com/deploying-airbyte/docker-compose). + ### Manage Airbyte configurations with code You can also programmatically manage sources, destinations, and connections with YAML files, [Octavia CLI](https://github.com/airbytehq/airbyte/tree/master/octavia-cli), and API. diff --git a/airbyte-analytics/build.gradle.kts b/airbyte-analytics/build.gradle.kts index e35e89dd2e1..dddec1041a0 100644 --- a/airbyte-analytics/build.gradle.kts +++ b/airbyte-analytics/build.gradle.kts @@ -1,40 +1,40 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("org.jetbrains.kotlin.jvm") - id("org.jetbrains.kotlin.kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + id("org.jetbrains.kotlin.jvm") + id("org.jetbrains.kotlin.kapt") } dependencies { - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) - api(libs.segment.java.analytics) - api(libs.micronaut.http) - api(libs.micronaut.cache.caffeine) - api(libs.bundles.micronaut.annotation) - api(libs.bundles.micronaut.kotlin) - api(libs.kotlin.logging) - api(project(":airbyte-commons")) - api(project(":airbyte-config:config-models")) - api(project(":airbyte-api")) + api(libs.segment.java.analytics) + api(libs.micronaut.http) + api(libs.micronaut.cache.caffeine) + api(libs.bundles.micronaut.annotation) + api(libs.bundles.micronaut.kotlin) + api(libs.kotlin.logging) + api(project(":airbyte-commons")) + api(project(":airbyte-config:config-models")) + api(project(":airbyte-api")) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - testImplementation(libs.mockk) - testImplementation(libs.kotlin.test.runner.junit5) - testRuntimeOnly(libs.junit.jupiter.engine) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + testImplementation(libs.mockk) + testImplementation(libs.kotlin.test.runner.junit5) + testRuntimeOnly(libs.junit.jupiter.engine) } // This is a workaround related to kaptBuild errors. // TODO: this should be removed when we move to kotlin 1.9.20 // TODO: we should write tests afterEvaluate { - tasks.named("kaptGenerateStubsTestKotlin") { - enabled = false - } + tasks.named("kaptGenerateStubsTestKotlin") { + enabled = false + } } \ No newline at end of file diff --git a/airbyte-api-server/Dockerfile b/airbyte-api-server/Dockerfile index 732938151a7..3fb85849db1 100644 --- a/airbyte-api-server/Dockerfile +++ b/airbyte-api-server/Dockerfile @@ -1,15 +1,15 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.1.0 -FROM ${JDK_IMAGE} AS server +ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.2.1 + +FROM scratch as builder +WORKDIR /app +ADD airbyte-app.tar /app + +FROM ${JDK_IMAGE} EXPOSE 8006 5005 ENV APPLICATION airbyte-api-server ENV VERSION ${VERSION} - WORKDIR /app - -# This is automatically unzipped by Docker -USER root -ADD airbyte-app.tar /app -RUN chown -R airbyte:airbyte /app +COPY --chown=airbyte:airbyte --from=builder /app /app USER airbyte:airbyte # wait for upstream dependencies to become available before starting server diff --git a/airbyte-api-server/build.gradle.kts b/airbyte-api-server/build.gradle.kts index 6e5d5fae649..94e87a6eb06 100644 --- a/airbyte-api-server/build.gradle.kts +++ b/airbyte-api-server/build.gradle.kts @@ -1,101 +1,106 @@ import java.util.Properties plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("io.airbyte.gradle.docker") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.docker") + kotlin("jvm") + kotlin("kapt") } dependencies { - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) - kapt(libs.micronaut.jaxrs.processor) + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + kapt(libs.micronaut.jaxrs.processor) - kaptTest(platform(libs.micronaut.platform)) - kaptTest(libs.bundles.micronaut.test.annotation.processor) - kaptTest(libs.micronaut.jaxrs.processor) + kaptTest(platform(libs.micronaut.platform)) + kaptTest(libs.bundles.micronaut.test.annotation.processor) + kaptTest(libs.micronaut.jaxrs.processor) - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - annotationProcessor(libs.micronaut.jaxrs.processor) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(libs.micronaut.jaxrs.processor) - implementation(project(":airbyte-analytics")) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) - implementation(platform(libs.micronaut.platform)) - implementation(libs.cron.utils) - implementation(libs.log4j.slf4j2.impl) - implementation(libs.bundles.jackson) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.micronaut.cache) - implementation(libs.bundles.micronaut.data.jdbc) - implementation(libs.bundles.micronaut.metrics) - implementation(libs.micronaut.jaxrs.server) - implementation(libs.micronaut.problem.json) - implementation(libs.micronaut.security) - implementation(libs.sentry.java) - implementation(libs.swagger.annotations) - implementation(libs.jakarta.ws.rs.api) - implementation(libs.airbyte.protocol) + implementation(project(":airbyte-analytics")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) + implementation(platform(libs.micronaut.platform)) + implementation(libs.cron.utils) + implementation(libs.log4j.slf4j2.impl) + implementation(libs.bundles.jackson) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.micronaut.cache) + implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.bundles.micronaut.metrics) + implementation(libs.micronaut.jaxrs.server) + implementation(libs.micronaut.problem.json) + implementation(libs.micronaut.security) + implementation(libs.sentry.java) + implementation(libs.swagger.annotations) + implementation(libs.jakarta.ws.rs.api) + implementation(libs.airbyte.protocol) - runtimeOnly(libs.javax.databind) - runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.javax.databind) + runtimeOnly(libs.snakeyaml) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testAnnotationProcessor(libs.micronaut.jaxrs.processor) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(libs.micronaut.jaxrs.processor) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.mockwebserver) - testImplementation(libs.mockito.inline) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.mockwebserver) + testImplementation(libs.mockito.inline) + testImplementation(libs.mockk) } kapt { - correctErrorTypes = true + correctErrorTypes = true } val env = Properties().apply { - load(rootProject.file(".env.dev").inputStream()) + load(rootProject.file(".env.dev").inputStream()) } airbyte { - application { - mainClass = "io.airbyte.api.server.ApplicationKt" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + application { + mainClass = "io.airbyte.api.server.ApplicationKt" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - @Suppress("UNCHECKED_CAST") - localEnvVars.putAll(env.toMutableMap() as Map) - localEnvVars.putAll(mapOf( - "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), - "AIRBYTE_VERSION" to env["VERSION"].toString(), - "MICRONAUT_ENVIRONMENTS" to "control-plane", - "SERVICE_NAME" to project.name, - "TRACKING_STRATEGY" to env["TRACKING_STRATEGY"].toString(), - )) - } - docker { - imageName = "airbyte-api-server" - } + @Suppress("UNCHECKED_CAST") + localEnvVars.putAll(env.toMutableMap() as Map) + localEnvVars.putAll( + mapOf( + "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), + "AIRBYTE_VERSION" to env["VERSION"].toString(), + "MICRONAUT_ENVIRONMENTS" to "control-plane", + "SERVICE_NAME" to project.name, + "TRACKING_STRATEGY" to env["TRACKING_STRATEGY"].toString(), + ) + ) + } + docker { + imageName = "airbyte-api-server" + } } tasks.named("test") { - environment(mapOf( - "AIRBYTE_VERSION" to env["VERSION"], - "MICRONAUT_ENVIRONMENTS" to "test", - "SERVICE_NAME" to project.name, - )) + environment( + mapOf( + "AIRBYTE_VERSION" to env["VERSION"], + "MICRONAUT_ENVIRONMENTS" to "test", + "SERVICE_NAME" to project.name, + ) + ) } // Even though Kotlin is excluded on Spotbugs, this projects) // still runs into spotbug issues. Working theory is that) // generated code is being picked up. Disable as a short-term fix.) tasks.named("spotbugsMain") { - enabled = false + enabled = false } diff --git a/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/controllers/ConnectionsController.kt b/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/controllers/ConnectionsController.kt index 2ef65f9dd66..9f6d7e70ea1 100644 --- a/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/controllers/ConnectionsController.kt +++ b/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/controllers/ConnectionsController.kt @@ -103,7 +103,14 @@ open class ConnectionsController( for (streamConfiguration in connectionCreateRequest.configurations.streams) { val validStreamAndConfig = validStreams[streamConfiguration.name] val schemaStream = validStreamAndConfig!!.stream - val schemaConfig = validStreamAndConfig.config + val updatedValidStreamAndConfig = AirbyteStreamAndConfiguration() + updatedValidStreamAndConfig.stream = schemaStream + updatedValidStreamAndConfig.config = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + validStreamAndConfig.config, + schemaStream, + streamConfiguration, + ) val validDestinationSyncModes = trackingHelper.callWithTracker( @@ -116,23 +123,21 @@ open class ConnectionsController( // set user configs trackingHelper.callWithTracker( { - AirbyteCatalogHelper.setAndValidateStreamConfig( - streamConfiguration, - validDestinationSyncModes, - schemaStream!!, - schemaConfig!!, + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = validDestinationSyncModes, + airbyteStream = schemaStream!!, ) }, CONNECTIONS_PATH, POST, userId, ) - configuredCatalog!!.addStreamsItem(validStreamAndConfig) + configuredCatalog!!.addStreamsItem(updatedValidStreamAndConfig) } } else { // no user supplied stream configs, return all streams with full refresh overwrite - configuredCatalog = airbyteCatalogFromDiscoverSchema - AirbyteCatalogHelper.setAllStreamsFullRefreshOverwrite(configuredCatalog!!) + configuredCatalog = AirbyteCatalogHelper.updateAllStreamsFullRefreshOverwrite(airbyteCatalogFromDiscoverSchema) } val finalConfiguredCatalog = configuredCatalog @@ -328,7 +333,14 @@ open class ConnectionsController( for (streamConfiguration in connectionPatchRequest.configurations.streams) { val validStreamAndConfig = validStreams[streamConfiguration.name] val schemaStream = validStreamAndConfig!!.stream - val schemaConfig = validStreamAndConfig.config + val updatedValidStreamAndConfig = AirbyteStreamAndConfiguration() + updatedValidStreamAndConfig.stream = schemaStream + updatedValidStreamAndConfig.config = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + validStreamAndConfig.config, + schemaStream, + streamConfiguration, + ) val validDestinationSyncModes = trackingHelper.callWithTracker( @@ -341,18 +353,17 @@ open class ConnectionsController( // set user configs trackingHelper.callWithTracker( { - AirbyteCatalogHelper.setAndValidateStreamConfig( - streamConfiguration, - validDestinationSyncModes, - schemaStream!!, - schemaConfig!!, + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = validDestinationSyncModes, + airbyteStream = schemaStream!!, ) }, CONNECTIONS_PATH, POST, userId, ) - configuredCatalog!!.addStreamsItem(validStreamAndConfig) + configuredCatalog!!.addStreamsItem(updatedValidStreamAndConfig) } } else { // no user supplied stream configs, return all existing streams diff --git a/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelper.kt b/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelper.kt index cd65f0cb471..4ab234a1d20 100644 --- a/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelper.kt +++ b/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelper.kt @@ -56,9 +56,19 @@ object AirbyteCatalogHelper { * * @param config config to be set */ - fun setConfigDefaultFullRefreshOverwrite(config: AirbyteStreamConfiguration?) { - config!!.syncMode = SyncMode.FULL_REFRESH - config.destinationSyncMode = DestinationSyncMode.OVERWRITE + fun updateConfigDefaultFullRefreshOverwrite(config: AirbyteStreamConfiguration?): AirbyteStreamConfiguration { + val updatedStreamConfiguration = AirbyteStreamConfiguration() + config?.let { + updatedStreamConfiguration.aliasName = config.aliasName + updatedStreamConfiguration.cursorField = config.cursorField + updatedStreamConfiguration.fieldSelectionEnabled = config.fieldSelectionEnabled + updatedStreamConfiguration.selected = config.selected + updatedStreamConfiguration.selectedFields = config.selectedFields + updatedStreamConfiguration.suggested = config.suggested + } + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + return updatedStreamConfiguration } /** @@ -66,11 +76,20 @@ object AirbyteCatalogHelper { * * @param airbyteCatalog The catalog to be modified */ - fun setAllStreamsFullRefreshOverwrite(airbyteCatalog: AirbyteCatalog) { - for (schemaStreams in airbyteCatalog.streams) { - val config = schemaStreams.config!! - setConfigDefaultFullRefreshOverwrite(config) + fun updateAllStreamsFullRefreshOverwrite(airbyteCatalog: AirbyteCatalog?): AirbyteCatalog { + val updatedAirbyteCatalog = AirbyteCatalog() + airbyteCatalog?.let { + updatedAirbyteCatalog.streams = + it.streams.stream().map { stream: AirbyteStreamAndConfiguration -> + val updatedAirbyteStreamAndConfiguration = + AirbyteStreamAndConfiguration() + updatedAirbyteStreamAndConfiguration.config = updateConfigDefaultFullRefreshOverwrite(stream.config) + updatedAirbyteStreamAndConfiguration.stream = stream.stream + updatedAirbyteStreamAndConfiguration + }.toList() } + + return updatedAirbyteCatalog } /** @@ -154,6 +173,85 @@ object AirbyteCatalogHelper { // check that the first seconds and hour values are not * } + fun updateAirbyteStreamConfiguration( + config: AirbyteStreamConfiguration?, + airbyteStream: AirbyteStream?, + streamConfiguration: StreamConfiguration, + ): AirbyteStreamConfiguration { + val updatedStreamConfiguration = AirbyteStreamConfiguration() + // Set stream config as selected + updatedStreamConfiguration.selected = true + updatedStreamConfiguration.aliasName = config?.aliasName + updatedStreamConfiguration.fieldSelectionEnabled = config?.fieldSelectionEnabled + updatedStreamConfiguration.suggested = config?.suggested + + if (streamConfiguration.syncMode == null) { + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE + updatedStreamConfiguration.cursorField = config?.cursorField + updatedStreamConfiguration.primaryKey = config?.primaryKey + } else { + when (streamConfiguration.syncMode) { + ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> { + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.APPEND + updatedStreamConfiguration.cursorField = config?.cursorField + updatedStreamConfiguration.primaryKey = config?.primaryKey + } + + ConnectionSyncModeEnum.INCREMENTAL_APPEND -> { + updatedStreamConfiguration.syncMode(SyncMode.INCREMENTAL) + updatedStreamConfiguration.destinationSyncMode(DestinationSyncMode.APPEND) + updatedStreamConfiguration.cursorField(selectCursorField(airbyteStream, streamConfiguration)) + updatedStreamConfiguration.primaryKey(selectPrimaryKey(airbyteStream, streamConfiguration)) + } + + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> { + updatedStreamConfiguration.syncMode = SyncMode.INCREMENTAL + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.APPEND_DEDUP + updatedStreamConfiguration.cursorField = selectCursorField(airbyteStream, streamConfiguration) + updatedStreamConfiguration.primaryKey = selectPrimaryKey(airbyteStream, streamConfiguration) + } + + else -> { + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE + updatedStreamConfiguration.cursorField = config?.cursorField + updatedStreamConfiguration.primaryKey = config?.primaryKey + } + } + } + + return updatedStreamConfiguration + } + + private fun selectCursorField( + airbyteStream: AirbyteStream?, + streamConfiguration: StreamConfiguration, + ): List? { + return if (airbyteStream?.sourceDefinedCursor != null && airbyteStream.sourceDefinedCursor!!) { + airbyteStream.defaultCursorField + } else if (streamConfiguration.cursorField != null && streamConfiguration.cursorField.isNotEmpty()) { + streamConfiguration.cursorField + } else { + airbyteStream?.defaultCursorField + } + } + + private fun selectPrimaryKey( + airbyteStream: AirbyteStream?, + streamConfiguration: StreamConfiguration, + ): List>? { + // if no source defined primary key + return if (airbyteStream?.sourceDefinedPrimaryKey == null || airbyteStream.sourceDefinedPrimaryKey!!.isEmpty()) { + streamConfiguration.primaryKey + } else if (streamConfiguration.primaryKey == null || streamConfiguration.primaryKey.isEmpty()) { + airbyteStream.sourceDefinedPrimaryKey + } else { + listOf() + } + } + /** * Validates a stream's configurations and sets those configurations in the * `AirbyteStreamConfiguration` object. Logic comes from @@ -162,19 +260,14 @@ object AirbyteCatalogHelper { * @param streamConfiguration The configuration input of a specific stream provided by the caller. * @param validDestinationSyncModes All the valid destination sync modes for a destination * @param airbyteStream The immutable schema defined by the source - * @param config The configuration of a stream consumed by the config-api * @return True if no exceptions. Needed so it can be used inside TrackingHelper.callWithTracker */ - fun setAndValidateStreamConfig( + fun validateStreamConfig( streamConfiguration: StreamConfiguration, - validDestinationSyncModes: List, + validDestinationSyncModes: List, airbyteStream: AirbyteStream, - config: AirbyteStreamConfiguration, ): Boolean { - // Set stream config as selected - config.selected = true if (streamConfiguration.syncMode == null) { - setConfigDefaultFullRefreshOverwrite(config) return true } @@ -187,46 +280,33 @@ object AirbyteCatalogHelper { validCombinedSyncModes, ) } - when (streamConfiguration.syncMode) { - ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> { - config.syncMode = SyncMode.FULL_REFRESH - config.destinationSyncMode = DestinationSyncMode.APPEND - } + when (streamConfiguration.syncMode) { ConnectionSyncModeEnum.INCREMENTAL_APPEND -> { - config.syncMode = SyncMode.INCREMENTAL - config.destinationSyncMode = DestinationSyncMode.APPEND - setAndValidateCursorField(streamConfiguration.cursorField, airbyteStream, config) + validateCursorField(streamConfiguration.cursorField, airbyteStream) } ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> { - config.syncMode = SyncMode.INCREMENTAL - config.destinationSyncMode = DestinationSyncMode.APPEND_DEDUP - setAndValidateCursorField(streamConfiguration.cursorField, airbyteStream, config) - setAndValidatePrimaryKey(streamConfiguration.primaryKey, airbyteStream, config) + validateCursorField(streamConfiguration.cursorField, airbyteStream) + validatePrimaryKey(streamConfiguration.primaryKey, airbyteStream) } - else -> { - // always valid - setConfigDefaultFullRefreshOverwrite(config) - } + else -> {} } return true } - private fun setAndValidateCursorField( + private fun validateCursorField( cursorField: List?, airbyteStream: AirbyteStream, - config: AirbyteStreamConfiguration, ) { if (airbyteStream.sourceDefinedCursor != null && airbyteStream.sourceDefinedCursor!!) { if (!cursorField.isNullOrEmpty()) { // if cursor given is not empty and is NOT the same as the default, throw error - if (java.util.Set.copyOf(cursorField) != java.util.Set.copyOf(airbyteStream.defaultCursorField)) { + if (cursorField != airbyteStream.defaultCursorField) { throw ConnectionConfigurationProblem.sourceDefinedCursorFieldProblem(airbyteStream.name, airbyteStream.defaultCursorField!!) } } - config.cursorField = airbyteStream.defaultCursorField // this probably isn't necessary and should be already set } else { if (!cursorField.isNullOrEmpty()) { // validate cursor field @@ -234,26 +314,24 @@ object AirbyteCatalogHelper { if (!validCursorFields.contains(cursorField)) { throw ConnectionConfigurationProblem.invalidCursorField(airbyteStream.name, validCursorFields) } - config.cursorField = cursorField } else { // no default or given cursor field if (airbyteStream.defaultCursorField == null || airbyteStream.defaultCursorField!!.isEmpty()) { throw ConnectionConfigurationProblem.missingCursorField(airbyteStream.name) } - config.cursorField = airbyteStream.defaultCursorField // this probably isn't necessary and should be already set } } } - private fun setAndValidatePrimaryKey( + private fun validatePrimaryKey( primaryKey: List>?, airbyteStream: AirbyteStream, - config: AirbyteStreamConfiguration, ) { // if no source defined primary key if (airbyteStream.sourceDefinedPrimaryKey == null || airbyteStream.sourceDefinedPrimaryKey!!.isEmpty()) { if (!primaryKey.isNullOrEmpty()) { // validate primary key + val validPrimaryKey: List> = getStreamFields(airbyteStream.jsonSchema!!) // todo maybe check that they don't provide the same primary key twice? @@ -262,7 +340,6 @@ object AirbyteCatalogHelper { throw ConnectionConfigurationProblem.invalidPrimaryKey(airbyteStream.name, validPrimaryKey) } } - config.primaryKey = primaryKey } else { throw ConnectionConfigurationProblem.missingPrimaryKey(airbyteStream.name) } @@ -270,8 +347,6 @@ object AirbyteCatalogHelper { // source defined primary key exists if (!primaryKey.isNullOrEmpty()) { throw ConnectionConfigurationProblem.primaryKeyAlreadyDefined(airbyteStream.name) - } else { - config.primaryKey = airbyteStream.sourceDefinedPrimaryKey // this probably isn't necessary and should be already set } } } @@ -287,7 +362,7 @@ object AirbyteCatalogHelper { validSourceSyncModes: List?, validDestinationSyncModes: List, ): Set { - val validCombinedSyncModes: MutableSet = HashSet() + val validCombinedSyncModes: MutableSet = mutableSetOf() for (sourceSyncMode in validSourceSyncModes!!) { for (destinationSyncMode in validDestinationSyncModes) { val combinedSyncMode: ConnectionSyncModeEnum? = @@ -313,8 +388,7 @@ object AirbyteCatalogHelper { fun getStreamFields(connectorSchema: JsonNode): List> { val yamlMapper = ObjectMapper(YAMLFactory()) val streamFields: MutableList> = ArrayList() - val spec: JsonNode - spec = + val spec: JsonNode = try { yamlMapper.readTree(connectorSchema.traverse()) } catch (e: IOException) { @@ -328,14 +402,14 @@ object AirbyteCatalogHelper { val propertyFields = paths.fields() while (propertyFields.hasNext()) { val (propertyName, nestedProperties) = propertyFields.next() - streamFields.add(java.util.List.of(propertyName)) + streamFields.add(listOf(propertyName)) // retrieve nested paths for (entry in getStreamFields(nestedProperties)) { if (entry.isEmpty()) { continue } - val streamFieldPath: MutableList = ArrayList(java.util.List.of(propertyName)) + val streamFieldPath: MutableList = mutableListOf(propertyName) streamFieldPath.addAll(entry) streamFields.add(streamFieldPath) } diff --git a/airbyte-api-server/src/test/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelperTest.kt b/airbyte-api-server/src/test/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelperTest.kt new file mode 100644 index 00000000000..a4a70d8406d --- /dev/null +++ b/airbyte-api-server/src/test/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelperTest.kt @@ -0,0 +1,603 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.server.helpers + +import io.airbyte.airbyte_api.model.generated.ConnectionSchedule +import io.airbyte.airbyte_api.model.generated.ConnectionSyncModeEnum +import io.airbyte.airbyte_api.model.generated.ScheduleTypeEnum +import io.airbyte.airbyte_api.model.generated.StreamConfiguration +import io.airbyte.airbyte_api.model.generated.StreamConfigurations +import io.airbyte.api.client.model.generated.AirbyteCatalog +import io.airbyte.api.client.model.generated.AirbyteStream +import io.airbyte.api.client.model.generated.AirbyteStreamAndConfiguration +import io.airbyte.api.client.model.generated.AirbyteStreamConfiguration +import io.airbyte.api.client.model.generated.DestinationSyncMode +import io.airbyte.api.client.model.generated.SelectedFieldInfo +import io.airbyte.api.client.model.generated.SyncMode +import io.airbyte.api.server.problems.ConnectionConfigurationProblem +import io.airbyte.commons.json.Jsons +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertThrows +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.EnumSource + +internal class AirbyteCatalogHelperTest { + @Test + internal fun `test that a stream configuration is not empty`() { + val streamConfigurations: StreamConfigurations = mockk() + + every { streamConfigurations.streams } returns listOf(mockk()) + + assertTrue(AirbyteCatalogHelper.hasStreamConfigurations(streamConfigurations)) + } + + @Test + internal fun `test that a stream configuration is empty`() { + val streamConfigurations: StreamConfigurations = mockk() + + every { streamConfigurations.streams } returns listOf() + + assertFalse(AirbyteCatalogHelper.hasStreamConfigurations(streamConfigurations)) + + every { streamConfigurations.streams } returns null + + assertFalse(AirbyteCatalogHelper.hasStreamConfigurations(streamConfigurations)) + + assertFalse(AirbyteCatalogHelper.hasStreamConfigurations(null)) + } + + @Test + internal fun `test that a copy of the AirbyteStreamConfiguration is returned when it is updated to full refresh overwrite mode`() { + val originalStreamConfiguration = createAirbyteStreamConfiguration() + + val updatedStreamConfiguration = AirbyteCatalogHelper.updateConfigDefaultFullRefreshOverwrite(config = originalStreamConfiguration) + assertFalse(originalStreamConfiguration === updatedStreamConfiguration) + assertEquals(SyncMode.FULL_REFRESH, updatedStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, updatedStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a copy of the AirbyteCatalog is returned when all of its streams are updated to full refresh overwrite mode`() { + val originalAirbyteCatalog = createAirbyteCatalog() + val updatedAirbyteCatalog = AirbyteCatalogHelper.updateAllStreamsFullRefreshOverwrite(airbyteCatalog = originalAirbyteCatalog) + assertFalse(originalAirbyteCatalog === updatedAirbyteCatalog) + updatedAirbyteCatalog.streams.stream().forEach { stream -> + assertEquals(SyncMode.FULL_REFRESH, stream.config?.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, stream.config?.destinationSyncMode) + } + } + + @Test + internal fun `test that streams can be validated`() { + val referenceCatalog = createAirbyteCatalog() + val streamConfiguration = StreamConfiguration() + streamConfiguration.name = "name1" + val streamConfigurations = StreamConfigurations() + streamConfigurations.streams = listOf(streamConfiguration) + + assertTrue(AirbyteCatalogHelper.validateStreams(referenceCatalog = referenceCatalog, streamConfigurations = streamConfigurations)) + } + + @Test + internal fun `test that a stream with an invalid name is considered to be invalid`() { + val referenceCatalog = createAirbyteCatalog() + val streamConfiguration = StreamConfiguration() + streamConfiguration.name = "unknown" + val streamConfigurations = StreamConfigurations() + streamConfigurations.streams = listOf(streamConfiguration) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreams(referenceCatalog = referenceCatalog, streamConfigurations = streamConfigurations) + } + assertEquals(true, throwable.message?.contains("Invalid stream found")) + } + + @Test + internal fun `test that streams with duplicate streams is considered to be invalid`() { + val referenceCatalog = createAirbyteCatalog() + val streamConfiguration1 = StreamConfiguration() + streamConfiguration1.name = "name1" + val streamConfiguration2 = StreamConfiguration() + streamConfiguration2.name = "name1" + val streamConfigurations = StreamConfigurations() + streamConfigurations.streams = listOf(streamConfiguration1, streamConfiguration2) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreams(referenceCatalog = referenceCatalog, streamConfigurations = streamConfigurations) + } + assertEquals(true, throwable.message?.contains("Duplicate stream found in configuration")) + } + + @Test + internal fun `test that valid streams can be retrieved from the AirbyteCatalog`() { + val airbyteCatalog = createAirbyteCatalog() + val validStreamNames = AirbyteCatalogHelper.getValidStreams(airbyteCatalog = airbyteCatalog) + assertEquals(airbyteCatalog.streams.map { it.stream?.name }.toSet(), validStreamNames.keys) + } + + @Test + internal fun `test that the cron configuration can be validated`() { + val connectionSchedule = ConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = "0 15 10 * * ? * UTC" + assertTrue(AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule)) + assertFalse(connectionSchedule.cronExpression.contains("UTC")) + + connectionSchedule.scheduleType = ScheduleTypeEnum.MANUAL + assertTrue(AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule)) + + assertTrue(AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = null)) + } + + @Test + internal fun `test that the cron configuration with a missing cron expression is invalid`() { + val connectionSchedule = ConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = null + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule) + } + assertEquals(true, throwable.message?.contains("Missing cron expression in the schedule.")) + } + + @Test + internal fun `test that the cron configuration with an invalid cron expression length is invalid`() { + val connectionSchedule = ConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = "0 15 10 * * ? * * * *" + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule) + } + assertEquals(true, throwable.message?.contains("Cron expression contains 10 parts but we expect one of [6, 7]")) + } + + @Test + internal fun `test that the cron configuration with an invalid cron expression is invalid`() { + val connectionSchedule = ConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = "not a valid cron expression string" + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule) + } + assertEquals(true, throwable.message?.contains("Failed to parse cron expression. Invalid chars in expression!")) + } + + @ParameterizedTest + @EnumSource(ConnectionSyncModeEnum::class) + internal fun `test that when a stream configuration is updated, the corret sync modes are set based on the stream configuration`( + connectionSyncMode: ConnectionSyncModeEnum, + ) { + val cursorField = "cursor" + val primayKeyColumn = "primary" + val airbyteStream = AirbyteStream() + val airbyteStreamConfiguration = createAirbyteStreamConfiguration() + val streamConfiguration = StreamConfiguration() + streamConfiguration.syncMode = connectionSyncMode + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primayKeyColumn)) + + val updatedAirbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = airbyteStreamConfiguration, + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + assertEquals(true, updatedAirbyteStreamConfiguration.selected) + assertEquals(getSyncMode(connectionSyncMode), updatedAirbyteStreamConfiguration.syncMode) + assertEquals(getDestinationSyncMode(connectionSyncMode), updatedAirbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that when a stream configuration does not have a configured sync mode, the updated configuration uses full refresh overwrite`() { + val cursorField = "cursor" + val primayKeyColumn = "primary" + val airbyteStream = AirbyteStream() + val airbyteStreamConfiguration = createAirbyteStreamConfiguration() + val streamConfiguration = StreamConfiguration() + streamConfiguration.syncMode = null + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primayKeyColumn)) + + val updatedAirbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = airbyteStreamConfiguration, + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertEquals(true, updatedAirbyteStreamConfiguration.selected) + assertEquals(SyncMode.FULL_REFRESH, updatedAirbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, updatedAirbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that when validating a stream without a sync mode, the sync mode is set to full refresh and the stream is considered valid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + streamConfiguration.syncMode = null + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue(AirbyteCatalogHelper.validateStreamConfig(streamConfiguration, listOf(), airbyteStream)) + assertEquals(SyncMode.FULL_REFRESH, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(true, airbyteStreamConfiguration.selected) + } + + @Test + internal fun `test that if the stream configuration contains an invalid sync mode, the stream is considered invalid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.syncMode = ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE + streamConfiguration.name = "stream-name" + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.OVERWRITE), + airbyteStream = airbyteStream, + ) + } + assertEquals(true, throwable.message?.contains("Cannot set sync mode to ${streamConfiguration.syncMode} for stream")) + } + + @Test + internal fun `test that a stream configuration with FULL_REFRESH_APPEND is always considered to be valid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.supportedSyncModes = listOf(SyncMode.FULL_REFRESH) + streamConfiguration.syncMode = ConnectionSyncModeEnum.FULL_REFRESH_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + createAirbyteStreamConfiguration(), + airbyteStream, + streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.FULL_REFRESH, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a stream configuration with FULL_REFRESH_OVERWRITE is always considered to be valid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.supportedSyncModes = listOf(SyncMode.FULL_REFRESH) + streamConfiguration.syncMode = ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.OVERWRITE), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.FULL_REFRESH, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is only valid if the source defined cursor field is also valid`() { + val cursorField = "cursor" + val airbyteStream = AirbyteStream() + val airbyteStreamConfiguration = createAirbyteStreamConfiguration() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.sourceDefinedCursor = true + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is invalid if the source defined cursor field is invalid`() { + val cursorField = "cursor" + val streamName = "stream-name" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.name = streamName + airbyteStream.sourceDefinedCursor = true + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf("other") + streamConfiguration.name = airbyteStream.name + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ) + } + assertEquals(true, throwable.message?.contains("Do not include a cursor field configuration for this stream")) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is only valid if the source cursor field is also valid`() { + val cursorField = "cursor" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}}}") + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is invalid if the source cursor field is invalid`() { + val cursorField = "cursor" + val otherCursorField = "other" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(otherCursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$otherCursorField\": {}}}") + airbyteStream.name = "name" + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ) + } + assertEquals( + true, + throwable.message?.contains( + "Invalid cursor field for stream: ${airbyteStream.name}. The list of valid cursor fields include: [[$otherCursorField]]", + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is invalid if there is no cursor field`() { + val cursorField = "cursor" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf() + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}}}") + airbyteStream.name = "name" + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf() + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ) + } + assertEquals( + true, + throwable.message?.contains( + "No default cursor field for stream: ${airbyteStream.name}. Please include a cursor field configuration for this stream.", + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that an INCREMENTAL_DEDUPED_HISTORY stream is only valid if the source defined cursor and primary key field are also valid`() { + val cursorField = "cursor" + val primaryKey = "primary" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}, \"$primaryKey\": {}}}") + airbyteStream.sourceDefinedCursor = true + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primaryKey)) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND_DEDUP), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND_DEDUP, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + assertEquals(listOf(listOf(primaryKey)), airbyteStreamConfiguration.primaryKey) + } + + @Test + internal fun `test that an INCREMENTAL_DEDUPED_HISTORY stream is only valid if the source cursor field and primary key field are also valid`() { + val cursorField = "cursor" + val primaryKey = "primary" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}, \"$primaryKey\": {}}}") + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primaryKey)) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND_DEDUP), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND_DEDUP, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + assertEquals(listOf(listOf(primaryKey)), airbyteStreamConfiguration.primaryKey) + } + + @Test + internal fun `test that the combined sync modes are valid`() { + val validSourceSyncModes = listOf(SyncMode.FULL_REFRESH) + val validDestinationSyncModes = listOf(DestinationSyncMode.OVERWRITE) + + val combinedSyncModes = + AirbyteCatalogHelper.validCombinedSyncModes( + validSourceSyncModes = validSourceSyncModes, + validDestinationSyncModes = validDestinationSyncModes, + ) + assertEquals(1, combinedSyncModes.size) + assertEquals(listOf(ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE).first(), combinedSyncModes.first()) + } + + private fun createAirbyteCatalog(): AirbyteCatalog { + val airbyteCatalog = AirbyteCatalog() + val streams = mutableListOf() + for (i in 1..5) { + val streamAndConfiguration = AirbyteStreamAndConfiguration() + val stream = AirbyteStream() + stream.name = "name$i" + stream.namespace = "namespace" + streamAndConfiguration.stream = stream + streamAndConfiguration.config = createAirbyteStreamConfiguration() + streams += streamAndConfiguration + } + airbyteCatalog.streams(streams) + return airbyteCatalog + } + + private fun createAirbyteStreamConfiguration(): AirbyteStreamConfiguration { + val airbyteStreamConfiguration = AirbyteStreamConfiguration() + airbyteStreamConfiguration.aliasName = "alias" + airbyteStreamConfiguration.cursorField = listOf("cursor") + airbyteStreamConfiguration.destinationSyncMode = DestinationSyncMode.APPEND + airbyteStreamConfiguration.fieldSelectionEnabled = true + airbyteStreamConfiguration.primaryKey = listOf(listOf("primary")) + airbyteStreamConfiguration.selected = false + airbyteStreamConfiguration.selectedFields = listOf(SelectedFieldInfo()) + airbyteStreamConfiguration.suggested = false + airbyteStreamConfiguration.syncMode = SyncMode.INCREMENTAL + return airbyteStreamConfiguration + } + + private fun getSyncMode(connectionSyncMode: ConnectionSyncModeEnum): SyncMode { + return when (connectionSyncMode) { + ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE -> SyncMode.FULL_REFRESH + ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> SyncMode.FULL_REFRESH + ConnectionSyncModeEnum.INCREMENTAL_APPEND -> SyncMode.INCREMENTAL + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> SyncMode.INCREMENTAL + } + } + + private fun getDestinationSyncMode(connectionSyncMode: ConnectionSyncModeEnum): DestinationSyncMode { + return when (connectionSyncMode) { + ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE -> DestinationSyncMode.OVERWRITE + ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> DestinationSyncMode.APPEND + ConnectionSyncModeEnum.INCREMENTAL_APPEND -> DestinationSyncMode.APPEND + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> DestinationSyncMode.APPEND_DEDUP + } + } +} diff --git a/airbyte-api/build.gradle b/airbyte-api/build.gradle index 311af90d7e5..070a35aacc0 100644 --- a/airbyte-api/build.gradle +++ b/airbyte-api/build.gradle @@ -16,7 +16,7 @@ def workloadSpecFile = "$projectDir/src/main/openapi/workload-openapi.yaml" def connectorBuilderServerSpecFile = project(":airbyte-connector-builder-server").file("src/main/openapi/openapi.yaml").getPath() def genApiServer = tasks.register("generateApiServer", GenerateTask) { - def serverOutputDir = "$buildDir/generated/api/server" + def serverOutputDir = "${getLayout().buildDirectory.get()}/generated/api/server" inputs.file specFile outputs.dir serverOutputDir @@ -69,7 +69,7 @@ def genApiServer = tasks.register("generateApiServer", GenerateTask) { } def genApiClient = tasks.register("generateApiClient", GenerateTask) { - def clientOutputDir = "$buildDir/generated/api/client" + def clientOutputDir = "${getLayout().buildDirectory.get()}/generated/api/client" inputs.file specFile outputs.dir clientOutputDir @@ -107,7 +107,7 @@ def genApiClient = tasks.register("generateApiClient", GenerateTask) { } def genApiClient2 = tasks.register("genApiClient2", GenerateTask) { - def clientOutputDir = "$buildDir/generated/api/client2" + def clientOutputDir = "${getLayout().buildDirectory.get()}/generated/api/client2" inputs.file specFile outputs.dir clientOutputDir @@ -136,22 +136,23 @@ def genApiClient2 = tasks.register("genApiClient2", GenerateTask) { generateApiDocumentation = false configOptions = [ + enumPropertyNaming : "UPPERCASE", generatePom : "false", interfaceOnly : "true" ] doLast { - def apiClientPath = 'build/generated/api/client2/src/main/kotlin/org/openapitools/client/infrastructure/ApiClient.kt' + def apiClientPath = "${clientOutputDir}/src/main/kotlin/org/openapitools/client/infrastructure/ApiClient.kt" updateApiClientWithFailsafe(apiClientPath) - updateDomainClientsWithFailsafe('build/generated/api/client2/src/main/kotlin/io/airbyte/api/client2/generated') + updateDomainClientsWithFailsafe("${clientOutputDir}/src/main/kotlin/io/airbyte/api/client2/generated") // a JsonNode adapter needs to be added to the kotlin client's serializer to handle JsonNode fields in requests - updateApiClientSerializerWithJsonNodeAdapter('build/generated/api/client2/src/main/kotlin/org/openapitools/client/infrastructure/Serializer.kt') + updateApiClientSerializerWithJsonNodeAdapter("${clientOutputDir}/src/main/kotlin/org/openapitools/client/infrastructure/Serializer.kt") } } def genApiDocs = tasks.register("generateApiDocs", GenerateTask) { - def docsOutputDir = "$buildDir/generated/api/docs" + def docsOutputDir = "${getLayout().buildDirectory.get()}/generated/api/docs" generatorName = "html" inputSpec = specFile @@ -183,7 +184,7 @@ def genApiDocs = tasks.register("generateApiDocs", GenerateTask) { } def genPublicApiServer = tasks.register('generatePublicApiServer', GenerateTask) { - def serverOutputDir = "$buildDir/generated/public_api/server" + def serverOutputDir = "${getLayout().buildDirectory.get()}/generated/public_api/server" inputs.file specFile outputs.dir serverOutputDir @@ -227,7 +228,7 @@ def genPublicApiServer = tasks.register('generatePublicApiServer', GenerateTask) } def genAirbyteApiServer = tasks.register('generateAirbyteApiServer', GenerateTask) { - def serverOutputDir = "$buildDir/generated/airbyte_api/server" + def serverOutputDir = "${getLayout().buildDirectory.get()}/generated/airbyte_api/server" inputs.file airbyteApiSpecFile outputs.dir serverOutputDir @@ -270,7 +271,7 @@ def genAirbyteApiServer = tasks.register('generateAirbyteApiServer', GenerateTas // TODO: Linked to document okhhtp def genWorkloadApiClient = tasks.register("genWorkloadApiClient", GenerateTask) { - def clientOutputDir = "$buildDir/generated/workloadapi/client" + def clientOutputDir = "${getLayout().buildDirectory.get()}/generated/workloadapi/client" inputs.file workloadSpecFile outputs.dir clientOutputDir @@ -304,9 +305,9 @@ def genWorkloadApiClient = tasks.register("genWorkloadApiClient", GenerateTask) ] doLast { - def apiClientPath = 'build/generated/workloadapi/client/src/main/kotlin/io/airbyte/workload/api/client/generated/infrastructure/ApiClient.kt' + def apiClientPath = "${clientOutputDir}/src/main/kotlin/io/airbyte/workload/api/client/generated/infrastructure/ApiClient.kt" updateApiClientWithFailsafe(apiClientPath) - def generatedDomainClientsPath = 'build/generated/workloadapi/client/src/main/kotlin/io/airbyte/workload/api/client/generated' + def generatedDomainClientsPath = "${clientOutputDir}/src/main/kotlin/io/airbyte/workload/api/client/generated" updateDomainClientsWithFailsafe(generatedDomainClientsPath) // the kotlin client (as opposed to the java client) doesn't include the response body in the exception message. updateDomainClientsToIncludeHttpResponseBodyOnClientException(generatedDomainClientsPath) @@ -316,7 +317,7 @@ def genWorkloadApiClient = tasks.register("genWorkloadApiClient", GenerateTask) } def genConnectorBuilderServerApiClient = tasks.register("genConnectorBuilderServerApiClient", GenerateTask) { - def clientOutputDir = "$buildDir/generated/connectorbuilderserverapi/client" + def clientOutputDir = "${getLayout().buildDirectory.get()}/generated/connectorbuilderserverapi/client" inputs.file connectorBuilderServerSpecFile outputs.dir clientOutputDir @@ -345,9 +346,9 @@ def genConnectorBuilderServerApiClient = tasks.register("genConnectorBuilderServ doLast { // Delete file generated by the client2 task - def dir = file('build/generated/connectorbuilderserverapi/client/src/main/kotlin/org').deleteDir() + def dir = file("${clientOutputDir}/src/main/kotlin/org").deleteDir() - def generatedDomainClientsPath = 'build/generated/connectorbuilderserverapi/client/src/main/kotlin/io/airbyte/connectorbuilderserver/api/client/generated' + def generatedDomainClientsPath = "${clientOutputDir}/src/main/kotlin/io/airbyte/connectorbuilderserver/api/client/generated" updateDomainClientsWithFailsafe(generatedDomainClientsPath) // the kotlin client (as opposed to the java client) doesn't include the response body in the exception message. updateDomainClientsToIncludeHttpResponseBodyOnClientException(generatedDomainClientsPath) @@ -385,20 +386,24 @@ dependencies { kapt libs.v3.swagger.annotations api libs.bundles.micronaut.annotation - api(libs.kotlin.logging) + api libs.micronaut.http + api libs.bundles.micronaut.metrics + api libs.failsafe.okhttp + api libs.okhttp + api libs.guava + api libs.java.jwt + api libs.google.auth.library.oauth2.http + api libs.kotlin.logging implementation platform(libs.micronaut.platform) implementation libs.bundles.micronaut implementation libs.commons.io - implementation libs.failsafe.okhttp - implementation libs.guava implementation libs.jakarta.annotation.api implementation libs.jakarta.ws.rs.api implementation libs.jakarta.validation.api implementation libs.jackson.datatype implementation libs.jackson.databind implementation libs.moshi.kotlin - implementation libs.okhttp implementation libs.openapi.jackson.databind.nullable implementation libs.reactor.core implementation libs.slf4j.api @@ -470,7 +475,12 @@ private def updateApiClientWithFailsafe(def clientPath) { 'val response = client.newCall(request).execute()', '''val call = client.newCall(request) val failsafeCall = FailsafeCall.with(policy).compose(call) - val response: Response = failsafeCall.execute()''') + val response: Response = failsafeCall.execute() + + return response.use { processResponse(response) } + } + + protected inline fun processResponse(response: Response): ApiResponse {''') // add imports if not exist if (!apiClientFileText.contains("import dev.failsafe.RetryPolicy")) { diff --git a/airbyte-api/src/main/kotlin/AirbyteApiClient2.kt b/airbyte-api/src/main/kotlin/AirbyteApiClient2.kt index daa46d0b2d1..8af917ca932 100644 --- a/airbyte-api/src/main/kotlin/AirbyteApiClient2.kt +++ b/airbyte-api/src/main/kotlin/AirbyteApiClient2.kt @@ -16,6 +16,7 @@ import io.airbyte.api.client2.generated.HealthApi import io.airbyte.api.client2.generated.JobRetryStatesApi import io.airbyte.api.client2.generated.JobsApi import io.airbyte.api.client2.generated.OperationApi +import io.airbyte.api.client2.generated.OrganizationApi import io.airbyte.api.client2.generated.PermissionApi import io.airbyte.api.client2.generated.SecretsPersistenceConfigApi import io.airbyte.api.client2.generated.SourceApi @@ -23,9 +24,15 @@ import io.airbyte.api.client2.generated.SourceDefinitionApi import io.airbyte.api.client2.generated.SourceDefinitionSpecificationApi import io.airbyte.api.client2.generated.StateApi import io.airbyte.api.client2.generated.StreamStatusesApi +import io.airbyte.api.client2.generated.UserApi +import io.airbyte.api.client2.generated.WebBackendApi import io.airbyte.api.client2.generated.WorkspaceApi +import io.micronaut.context.annotation.Requires +import io.micronaut.context.annotation.Value +import jakarta.inject.Named +import jakarta.inject.Singleton import okhttp3.OkHttpClient -import java.io.IOException +import okhttp3.Response /** * This class wraps all the generated API clients and provides a single entry point. This class is meant @@ -46,53 +53,39 @@ import java.io.IOException *
  • 3. Integrate failsafe (https://failsafe.dev/) for circuit breaking / retry
  • * policies. * - *

    - * todo (cgardens): The LogsApi is intentionally not included because in the java client we had to do some - * work to set the correct headers in the generated code. At some point we will need to test that that - * functionality works in the new client (and if necessary, patch it). Context: https://github.com/airbytehq/airbyte/pull/1799 */ @Suppress("MemberVisibilityCanBePrivate") +@Singleton +@Requires(property = "airbyte.internal-api.base-path") class AirbyteApiClient2 @JvmOverloads constructor( - basePath: String, - policy: RetryPolicy = RetryPolicy.ofDefaults(), - var httpClient: OkHttpClient = OkHttpClient(), - throwOn5xx: Boolean = true, + @Value("\${airbyte.internal-api.base-path}") basePath: String, + @Named("airbyteApiClientRetryPolicy") policy: RetryPolicy, + @Named("airbyteApiOkHttpClient") httpClient: OkHttpClient, ) { - init { - if (throwOn5xx) { - httpClient = httpClient.newBuilder().addInterceptor(ThrowOn5xxInterceptor()).build() - } - } - + val attemptApi = AttemptApi(basePath = basePath, client = httpClient, policy = policy) val connectionApi = ConnectionApi(basePath = basePath, client = httpClient, policy = policy) val connectorBuilderProjectApi = ConnectorBuilderProjectApi(basePath = basePath, client = httpClient, policy = policy) val deploymentMetadataApi = DeploymentMetadataApi(basePath = basePath, client = httpClient, policy = policy) - val destinationDefinitionApi = DestinationDefinitionApi(basePath = basePath, client = httpClient, policy = policy) val destinationApi = DestinationApi(basePath = basePath, client = httpClient, policy = policy) - val destinationSpecificationApi = DestinationDefinitionSpecificationApi(basePath = basePath, client = httpClient, policy = policy) + val destinationDefinitionApi = DestinationDefinitionApi(basePath = basePath, client = httpClient, policy = policy) + val destinationDefinitionSpecificationApi = + DestinationDefinitionSpecificationApi(basePath = basePath, client = httpClient, policy = policy) + val healthApi = HealthApi(basePath = basePath, client = httpClient, policy = policy) val jobsApi = JobsApi(basePath = basePath, client = httpClient, policy = policy) val jobRetryStatesApi = JobRetryStatesApi(basePath = basePath, client = httpClient, policy = policy) val operationApi = OperationApi(basePath = basePath, client = httpClient, policy = policy) - val sourceDefinitionApi = SourceDefinitionApi(basePath = basePath, client = httpClient, policy = policy) + val organizationApi = OrganizationApi(basePath = basePath, client = httpClient, policy = policy) + val permissionApi = PermissionApi(basePath = basePath, client = httpClient, policy = policy) + val secretPersistenceConfigApi = SecretsPersistenceConfigApi(basePath = basePath, client = httpClient, policy = policy) val sourceApi = SourceApi(basePath = basePath, client = httpClient, policy = policy) - val sourceDefinitionSpecificationApi = SourceDefinitionSpecificationApi(basePath = basePath, client = httpClient, policy = policy) - val workspaceApi = WorkspaceApi(basePath = basePath, client = httpClient, policy = policy) - val healthApi = HealthApi(basePath = basePath, client = httpClient, policy = policy) - val attemptApi = AttemptApi(basePath = basePath, client = httpClient, policy = policy) + val sourceDefinitionApi = SourceDefinitionApi(basePath = basePath, client = httpClient, policy = policy) + val sourceDefinitionSpecificationApi = + SourceDefinitionSpecificationApi(basePath = basePath, client = httpClient, policy = policy) val stateApi = StateApi(basePath = basePath, client = httpClient, policy = policy) val streamStatusesApi = StreamStatusesApi(basePath = basePath, client = httpClient, policy = policy) - val secretPersistenceConfigApi = SecretsPersistenceConfigApi(basePath = basePath, client = httpClient, policy = policy) - val permissonsApi = PermissionApi(basePath = basePath, client = httpClient, policy = policy) - } - -class ThrowOn5xxInterceptor : okhttp3.Interceptor { - override fun intercept(chain: okhttp3.Interceptor.Chain): okhttp3.Response { - val response = chain.proceed(chain.request()) - if (response.code >= 500) { - throw IOException("HTTP error: ${response.code} ${response.message}") - } - return response + val userApi = UserApi(basePath = basePath, client = httpClient, policy = policy) + val webBackendApi = WebBackendApi(basePath = basePath, client = httpClient, policy = policy) + val workspaceApi = WorkspaceApi(basePath = basePath, client = httpClient, policy = policy) } -} diff --git a/airbyte-api/src/main/kotlin/WorkloadApiClient.kt b/airbyte-api/src/main/kotlin/WorkloadApiClient.kt index 1e54abbec10..dbab77deb68 100644 --- a/airbyte-api/src/main/kotlin/WorkloadApiClient.kt +++ b/airbyte-api/src/main/kotlin/WorkloadApiClient.kt @@ -6,6 +6,10 @@ package io.airbyte.api.client import dev.failsafe.RetryPolicy import io.airbyte.workload.api.client.generated.WorkloadApi +import io.micronaut.context.annotation.Requires +import io.micronaut.context.annotation.Value +import jakarta.inject.Named +import jakarta.inject.Singleton import okhttp3.OkHttpClient import okhttp3.Response @@ -31,14 +35,16 @@ import okhttp3.Response *

    */ @SuppressWarnings("Parameter") +@Singleton +@Requires(property = "airbyte.workload-api.base-path") class WorkloadApiClient { - var workloadApi: WorkloadApi + val workloadApi: WorkloadApi @JvmOverloads constructor( - basePath: String, - policy: RetryPolicy = RetryPolicy.ofDefaults(), - httpClient: OkHttpClient = OkHttpClient(), + @Value("\${airbyte.workload-api.base-path}") basePath: String, + @Named("workloadApiClientRetryPolicy") policy: RetryPolicy, + @Named("workloadApiOkHttpClient") httpClient: OkHttpClient, ) { workloadApi = WorkloadApi(basePath = basePath, client = httpClient, policy = policy) } diff --git a/airbyte-api/src/main/kotlin/auth/AirbyteAuthHeader.kt b/airbyte-api/src/main/kotlin/auth/AirbyteAuthHeader.kt new file mode 100644 index 00000000000..b0c06239fdc --- /dev/null +++ b/airbyte-api/src/main/kotlin/auth/AirbyteAuthHeader.kt @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.auth + +/** + * Defines the custom Airbyte authentication header. + */ +interface AirbyteAuthHeader { + fun getHeaderName(): String + + fun getHeaderValue(): String +} diff --git a/airbyte-api/src/main/kotlin/auth/AirbyteAuthHeaderInterceptor.kt b/airbyte-api/src/main/kotlin/auth/AirbyteAuthHeaderInterceptor.kt new file mode 100644 index 00000000000..5fe6b59c89a --- /dev/null +++ b/airbyte-api/src/main/kotlin/auth/AirbyteAuthHeaderInterceptor.kt @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.auth + +import io.micronaut.context.annotation.Value +import io.micronaut.http.HttpHeaders +import jakarta.inject.Singleton +import okhttp3.Interceptor +import okhttp3.Request +import okhttp3.Response +import java.util.Optional + +/** + * Adds a custom Airbyte authentication header to requests made by a client. + */ +@Singleton +class AirbyteAuthHeaderInterceptor( + private val authHeaders: Optional, + @Value("\${micronaut.application.name}") private val userAgent: String, +) : Interceptor { + override fun intercept(chain: Interceptor.Chain): Response { + val originalRequest: Request = chain.request() + val builder: Request.Builder = originalRequest.newBuilder() + + if (originalRequest.header(HttpHeaders.USER_AGENT) == null) { + builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(userAgent)) + } + + authHeaders.ifPresent { h -> builder.addHeader(h.getHeaderName(), h.getHeaderValue()) } + + return chain.proceed(builder.build()) + } +} diff --git a/airbyte-api/src/main/kotlin/auth/InternalApiAuthenticationInterceptor.kt b/airbyte-api/src/main/kotlin/auth/InternalApiAuthenticationInterceptor.kt index fdc2e8f041c..2e4f744debf 100644 --- a/airbyte-api/src/main/kotlin/auth/InternalApiAuthenticationInterceptor.kt +++ b/airbyte-api/src/main/kotlin/auth/InternalApiAuthenticationInterceptor.kt @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.api.client.auth import com.google.common.base.CaseFormat @@ -9,7 +13,11 @@ import okhttp3.Interceptor import okhttp3.Request import okhttp3.Response -private val LOGGER = KotlinLogging.logger {} +private val logger = KotlinLogging.logger {} + +fun formatUserAgent(userAgent: String): String { + return CaseFormat.LOWER_HYPHEN.to(CaseFormat.UPPER_CAMEL, userAgent) +} @Singleton class InternalApiAuthenticationInterceptor( @@ -22,14 +30,14 @@ class InternalApiAuthenticationInterceptor( val builder: Request.Builder = originalRequest.newBuilder() if (originalRequest.header(HttpHeaders.USER_AGENT) == null) { - builder.addHeader(HttpHeaders.USER_AGENT, CaseFormat.LOWER_HYPHEN.to(CaseFormat.UPPER_CAMEL, userAgent)) + builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(userAgent)) } if (authHeaderName.isNotBlank() && authHeaderValue.isNotBlank()) { - LOGGER.debug { "Adding authorization header..." } + logger.debug { "Adding authorization header..." } builder.addHeader(authHeaderName, authHeaderValue) } else { - LOGGER.debug { "Bearer token not provided." } + logger.debug { "Bearer token not provided." } } return chain.proceed(builder.build()) diff --git a/airbyte-commons-auth/src/main/kotlin/io/airbyte/commons/auth/AuthenticationInterceptor.kt b/airbyte-api/src/main/kotlin/auth/WorkloadApiAuthenticationInterceptor.kt similarity index 57% rename from airbyte-commons-auth/src/main/kotlin/io/airbyte/commons/auth/AuthenticationInterceptor.kt rename to airbyte-api/src/main/kotlin/auth/WorkloadApiAuthenticationInterceptor.kt index a5ac636c988..3687a3fb886 100644 --- a/airbyte-commons-auth/src/main/kotlin/io/airbyte/commons/auth/AuthenticationInterceptor.kt +++ b/airbyte-api/src/main/kotlin/auth/WorkloadApiAuthenticationInterceptor.kt @@ -1,4 +1,8 @@ -package io.airbyte.commons.auth +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.auth import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Value @@ -9,31 +13,33 @@ import okhttp3.Request import okhttp3.Response import java.util.Base64 -private val LOGGER = KotlinLogging.logger {} +private val logger = KotlinLogging.logger {} @Singleton -class AuthenticationInterceptor( +class WorkloadApiAuthenticationInterceptor( @Value("\${airbyte.workload-api.bearer-token}") private val bearerToken: String, + @Value("\${micronaut.application.name}") private val userAgent: String, ) : Interceptor { override fun intercept(chain: Interceptor.Chain): Response { val originalRequest: Request = chain.request() val builder: Request.Builder = originalRequest.newBuilder() - builder.header(HttpHeaders.USER_AGENT, USER_AGENT_VALUE) + if (originalRequest.header(HttpHeaders.USER_AGENT) == null) { + builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(userAgent)) + } if (bearerToken.isNotBlank()) { - LOGGER.debug { "Adding authorization header..." } + logger.debug { "Adding authorization header..." } val encodedBearerToken = Base64.getEncoder().encodeToString(bearerToken.toByteArray()) - builder.header(HttpHeaders.AUTHORIZATION, "$BEARER_TOKEN_PREFIX $encodedBearerToken") + builder.addHeader(HttpHeaders.AUTHORIZATION, "$BEARER_TOKEN_PREFIX $encodedBearerToken") } else { - LOGGER.debug { "Bearer token not provided." } + logger.debug { "Bearer token not provided." } } return chain.proceed(builder.build()) } companion object { - const val USER_AGENT_VALUE = "WorkloadLauncherApp" const val BEARER_TOKEN_PREFIX = "Bearer" } } diff --git a/airbyte-api/src/main/kotlin/client/ThrowOn5xxInterceptor.kt b/airbyte-api/src/main/kotlin/client/ThrowOn5xxInterceptor.kt new file mode 100644 index 00000000000..52d722b9968 --- /dev/null +++ b/airbyte-api/src/main/kotlin/client/ThrowOn5xxInterceptor.kt @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client + +import io.micronaut.context.annotation.Requires +import io.micronaut.http.HttpStatus +import jakarta.inject.Named +import jakarta.inject.Singleton +import okhttp3.Interceptor +import okhttp3.Response +import java.io.IOException + +@Singleton +@Named("throwOn5xxInterceptor") +@Requires(property = "airbyte.internal.throws-on-5xx", value = "true", defaultValue = "true") +class ThrowOn5xxInterceptor : Interceptor { + override fun intercept(chain: Interceptor.Chain): Response { + val response = chain.proceed(chain.request()) + if (response.code >= HttpStatus.INTERNAL_SERVER_ERROR.code) { + throw IOException("HTTP error: ${response.code} ${response.message}") + } + return response + } +} diff --git a/airbyte-api/src/main/kotlin/config/ClientSupportFactory.kt b/airbyte-api/src/main/kotlin/config/ClientSupportFactory.kt new file mode 100644 index 00000000000..c2e8c357f05 --- /dev/null +++ b/airbyte-api/src/main/kotlin/config/ClientSupportFactory.kt @@ -0,0 +1,181 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.config + +import dev.failsafe.RetryPolicy +import io.airbyte.api.client.ThrowOn5xxInterceptor +import io.airbyte.api.client.auth.AirbyteAuthHeaderInterceptor +import io.airbyte.api.client.auth.InternalApiAuthenticationInterceptor +import io.airbyte.api.client.auth.WorkloadApiAuthenticationInterceptor +import io.github.oshai.kotlinlogging.KotlinLogging +import io.micrometer.core.instrument.MeterRegistry +import io.micronaut.context.annotation.Factory +import io.micronaut.context.annotation.Requires +import io.micronaut.context.annotation.Value +import jakarta.inject.Named +import jakarta.inject.Singleton +import okhttp3.HttpUrl +import okhttp3.OkHttpClient +import okhttp3.Response +import org.openapitools.client.infrastructure.ClientException +import org.openapitools.client.infrastructure.ServerException +import java.io.IOException +import java.time.Duration +import java.util.Optional + +private val logger = KotlinLogging.logger {} + +@Factory +class ClientSupportFactory { + @Singleton + @Named("airbyteApiClientRetryPolicy") + @Requires(property = "airbyte.internal-api.base-path") + fun defaultAirbyteApiRetryPolicy( + @Value("\${airbyte.internal-api.retries.delay-seconds:2}") retryDelaySeconds: Long, + @Value("\${airbyte.inernal-api.retries.max:5}") maxRetries: Int, + meterRegistry: Optional, + ): RetryPolicy { + return generateDefaultRetryPolicy(retryDelaySeconds, maxRetries, meterRegistry, "airbyte-api-client") + } + + @Singleton + @Named("airbyteApiOkHttpClient") + @Requires(property = "airbyte.internal-api.base-path") + fun defaultAirbyteApiOkHttpClient( + @Value("\${airbyte.internal-api.connect-timeout-seconds}") connectTimeoutSeconds: Long, + @Value("\${airbyte.internal-api.read-timeout-seconds}") readTimeoutSeconds: Long, + internalApiAuthenticationInterceptor: InternalApiAuthenticationInterceptor, + airbyteAuthHeaderInterceptor: AirbyteAuthHeaderInterceptor, + @Named("throwOn5xxInterceptor") throwOn5xxInterceptor: Optional, + ): OkHttpClient { + val builder: OkHttpClient.Builder = OkHttpClient.Builder() + builder.addInterceptor(internalApiAuthenticationInterceptor) + builder.addInterceptor(airbyteAuthHeaderInterceptor) + throwOn5xxInterceptor.ifPresent { builder.addInterceptor(it) } + builder.readTimeout(Duration.ofSeconds(readTimeoutSeconds)) + builder.connectTimeout(Duration.ofSeconds(connectTimeoutSeconds)) + return builder.build() + } + + @Singleton + @Named("workloadApiClientRetryPolicy") + @Requires(property = "airbyte.workload-api.base-path") + fun defaultWorkloadApiRetryPolicy( + @Value("\${airbyte.internal-api.retries.delay-seconds:2}") retryDelaySeconds: Long, + @Value("\${airbyte.inernal-api.retries.max:5}") maxRetries: Int, + meterRegistry: Optional, + ): RetryPolicy { + return generateDefaultRetryPolicy(retryDelaySeconds, maxRetries, meterRegistry, "workload-api-client") + } + + @Singleton + @Named("workloadApiOkHttpClient") + @Requires(property = "airbyte.workload-api.base-path") + fun defaultWorkloadApiOkHttpClient( + @Value("\${airbyte.workload-api.connect-timeout-seconds}") connectTimeoutSeconds: Long, + @Value("\${airbyte.workload-api.read-timeout-seconds}") readTimeoutSeconds: Long, + workloadApiAuthenticationInterceptor: WorkloadApiAuthenticationInterceptor, + airbyteAuthHeaderInterceptor: AirbyteAuthHeaderInterceptor, + ): OkHttpClient { + val builder: OkHttpClient.Builder = OkHttpClient.Builder() + builder.addInterceptor(workloadApiAuthenticationInterceptor) + builder.addInterceptor(airbyteAuthHeaderInterceptor) + builder.readTimeout(Duration.ofSeconds(readTimeoutSeconds)) + builder.connectTimeout(Duration.ofSeconds(connectTimeoutSeconds)) + return builder.build() + } + + private fun generateDefaultRetryPolicy( + retryDelaySeconds: Long, + maxRetries: Int, + meterRegistry: Optional, + metricPrefix: String, + ): RetryPolicy { + val metricTags = arrayOf("max-retries", maxRetries.toString()) + return RetryPolicy.builder() + .handle( + listOf( + IllegalStateException::class.java, + IOException::class.java, + UnsupportedOperationException::class.java, + ClientException::class.java, + ServerException::class.java, + ), + ) + // TODO move these metrics into a centralized metric registery as part of the MetricClient refactor/cleanup + .onAbort { l -> + logger.warn { "Attempt aborted. Attempt count ${l.attemptCount}" } + meterRegistry.ifPresent { + r -> + r.counter( + "$metricPrefix.abort", + *metricTags, + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), + *getUrlTags(l.result.request.url), + ).increment() + } + } + .onFailure { l -> + logger.error(l.exception) { "Failed to call ${l.result.request.url}. Last response: ${l.result}" } + meterRegistry.ifPresent { + r -> + r.counter( + "$metricPrefix.failure", + *metricTags, + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), + *getUrlTags(l.result.request.url), + ).increment() + } + } + .onRetry { l -> + logger.warn { "Retry attempt ${l.attemptCount} of $maxRetries. Last response: ${l.lastResult}" } + meterRegistry.ifPresent { + r -> + r.counter( + "$metricPrefix.retry", + *metricTags, + *arrayOf("retry-attempt", l.attemptCount.toString(), "url", "method", l.lastResult.request.method), + *getUrlTags(l.lastResult.request.url), + ).increment() + } + } + .onRetriesExceeded { l -> + logger.error(l.exception) { "Retry attempts exceeded." } + meterRegistry.ifPresent { + r -> + r.counter( + "$metricPrefix.retries_exceeded", + *metricTags, + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), + *getUrlTags(l.result.request.url), + ).increment() + } + } + .onSuccess { l -> + logger.debug { "Successfully called ${l.result.request.url}. Response: ${l.result}, isRetry: ${l.isRetry}" } + meterRegistry.ifPresent { + r -> + r.counter( + "$metricPrefix.success", + *metricTags, + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), + *getUrlTags(l.result.request.url), + ).increment() + } + } + .withDelay(Duration.ofSeconds(retryDelaySeconds)) + .withMaxRetries(maxRetries) + .build() + } + + private fun getUrlTags(httpUrl: HttpUrl): Array { + val last = httpUrl.pathSegments.last() + if (last.contains("[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}".toRegex())) { + return arrayOf("url", httpUrl.toString().removeSuffix(last), "workload-id", last) + } else { + return arrayOf("url", httpUrl.toString()) + } + } +} diff --git a/airbyte-api/src/main/kotlin/config/InternalApiAuthenticationFactory.kt b/airbyte-api/src/main/kotlin/config/InternalApiAuthenticationFactory.kt new file mode 100644 index 00000000000..e787d2c6d01 --- /dev/null +++ b/airbyte-api/src/main/kotlin/config/InternalApiAuthenticationFactory.kt @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.config + +import com.auth0.jwt.JWT +import com.auth0.jwt.JWTCreator +import com.google.auth.oauth2.ServiceAccountCredentials +import io.github.oshai.kotlinlogging.KotlinLogging +import io.micronaut.context.annotation.Factory +import io.micronaut.context.annotation.Primary +import io.micronaut.context.annotation.Prototype +import io.micronaut.context.annotation.Requires +import io.micronaut.context.annotation.Value +import jakarta.inject.Named +import jakarta.inject.Singleton +import java.io.FileInputStream +import java.security.interfaces.RSAPrivateKey +import java.util.Date +import java.util.concurrent.TimeUnit + +private val logger = KotlinLogging.logger {} + +@Factory +class InternalApiAuthenticationFactory { + @Primary + @Singleton + @Requires(property = "airbyte.internal-api.base-path") + @Requires(property = "airbyte.acceptance.test.enabled", value = "true") + @Named(INTERNAL_API_AUTH_TOKEN_BEAN_NAME) + fun testInternalApiAuthToken( + @Value("\${airbyte.internal-api.auth-header.value}") airbyteApiAuthHeaderValue: String, + ): String { + return airbyteApiAuthHeaderValue + } + + @Singleton + @Requires(property = "airbyte.internal-api.base-path") + @Requires(property = "airbyte.acceptance.test.enabled", value = "false") + @Requires(env = [CONTROL_PLANE]) + @Named(INTERNAL_API_AUTH_TOKEN_BEAN_NAME) + fun controlPlaneInternalApiAuthToken( + @Value("\${airbyte.internal-api.auth-header.value}") airbyteApiAuthHeaderValue: String, + ): String { + return airbyteApiAuthHeaderValue + } + + /** + * Generate an auth token based on configs. This is called by the Api Client's requestInterceptor + * for each request. Using Prototype annotation here to make sure each time it's used it will + * generate a new JWT Signature if it's on data plane. + * + * + * For Data Plane workers, generate a signed JWT as described here: + * https://cloud.google.com/endpoints/docs/openapi/service-account-authentication + */ + @Prototype + @Requires(property = "airbyte.internal-api.base-path") + @Requires(property = "airbyte.acceptance.test.enabled", value = "false") + @Requires(env = [DATA_PLANE]) + @Named(INTERNAL_API_AUTH_TOKEN_BEAN_NAME) + fun dataPlaneInternalApiAuthToken( + @Value("\${airbyte.control.plane.auth-endpoint}") controlPlaneAuthEndpoint: String, + @Value("\${airbyte.data.plane.service-account.email}") dataPlaneServiceAccountEmail: String, + @Value("\${airbyte.data.plane.service-account.credentials-path}") dataPlaneServiceAccountCredentialsPath: String, + ): String { + return try { + val now = Date() + val expTime = + Date(System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(JWT_TTL_MINUTES.toLong())) + // Build the JWT payload + val token: JWTCreator.Builder = + JWT.create() + .withIssuedAt(now) + .withExpiresAt(expTime) + .withIssuer(dataPlaneServiceAccountEmail) + .withAudience(controlPlaneAuthEndpoint) + .withSubject(dataPlaneServiceAccountEmail) + .withClaim(CLAIM_NAME, dataPlaneServiceAccountEmail) + + // TODO multi-cloud phase 2: check performance of on-demand token generation in load testing. might + // need to pull some of this outside of this method which is called for every API request + val stream = FileInputStream(dataPlaneServiceAccountCredentialsPath) + val cred = ServiceAccountCredentials.fromStream(stream) + val key = cred.privateKey as RSAPrivateKey + val algorithm: com.auth0.jwt.algorithms.Algorithm = com.auth0.jwt.algorithms.Algorithm.RSA256(null, key) + return "Bearer " + token.sign(algorithm) + } catch (e: Exception) { + logger.error(e) { "An issue occurred while generating a data plane auth token. Defaulting to empty string. Error Message: {}" } + return "" + } + } + + companion object { + const val CLAIM_NAME = "email" + const val CONTROL_PLANE = "control-plane" + const val DATA_PLANE = "data-plane" + const val INTERNAL_API_AUTH_TOKEN_BEAN_NAME = "internalApiAuthToken" + const val JWT_TTL_MINUTES = 5 + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/StatsDRegistryConfigurer.kt b/airbyte-api/src/main/kotlin/config/StatsDRegistryConfigurer.kt similarity index 80% rename from airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/StatsDRegistryConfigurer.kt rename to airbyte-api/src/main/kotlin/config/StatsDRegistryConfigurer.kt index 7bce2bc1c63..cc1ad6e147d 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/StatsDRegistryConfigurer.kt +++ b/airbyte-api/src/main/kotlin/config/StatsDRegistryConfigurer.kt @@ -1,4 +1,8 @@ -package io.airbyte.workers.config +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.config import io.github.oshai.kotlinlogging.KotlinLogging import io.micrometer.statsd.StatsdMeterRegistry @@ -12,10 +16,6 @@ import jakarta.inject.Singleton private val logger = KotlinLogging.logger {} -// TODO Temporarily copy this from the workload-launcher. Ultimately, this will move to airbyte-metrics/metrics-lib -// and would provide a mechanism to override/add additional tags and/or define the tags that will be included -// in metrics - /** * Custom Micronaut {@link MeterRegistryConfigurer} used to ensure that a common set of tags are * added to every Micrometer registry. Specifically, this class ensures that the tags for the @@ -28,11 +28,11 @@ private val logger = KotlinLogging.logger {} @RequiresMetrics class StatsDRegistryConfigurer : MeterRegistryConfigurer, Ordered { override fun configure(meterRegistry: StatsdMeterRegistry?) { - /* - * Use a LinkedHashSet to maintain order as items are added to the set. This ensures that the items - * are output as key1, value1, key2, value2, etc in order to maintain the relationship between key - * value pairs. - */ + /* + * Use a LinkedHashSet to maintain order as items are added to the set. This ensures that the items + * are output as key1, value1, key2, value2, etc in order to maintain the relationship between key + * value pairs. + */ val tags: MutableSet = LinkedHashSet() possiblyAddTag(DATA_DOG_SERVICE_TAG, "service", tags) diff --git a/airbyte-api/src/main/openapi/cloud-config.yaml b/airbyte-api/src/main/openapi/cloud-config.yaml index 3a21f899d51..00a7390a6a4 100644 --- a/airbyte-api/src/main/openapi/cloud-config.yaml +++ b/airbyte-api/src/main/openapi/cloud-config.yaml @@ -882,9 +882,6 @@ components: $ref: "#/components/schemas/UserStatus" companyName: type: string - email: - type: string - format: email news: type: boolean UserStatus: diff --git a/airbyte-api/src/main/openapi/config.yaml b/airbyte-api/src/main/openapi/config.yaml index 093caafe63c..cb4324380e3 100644 --- a/airbyte-api/src/main/openapi/config.yaml +++ b/airbyte-api/src/main/openapi/config.yaml @@ -2422,6 +2422,29 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" + /v1/connections/clear: + post: + tags: + - connection + summary: Clear the data for the connection. Deletes data generated by the connection in the destination. Clear any cursors back to initial state. + operationId: clearConnection + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionIdRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/JobInfoRead" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/connections/auto_disable: post: tags: @@ -2447,6 +2470,29 @@ paths: $ref: "#/components/schemas/InternalOperationResult" "404": $ref: "#/components/responses/NotFoundResponse" + /v1/connections/clear/stream: + post: + tags: + - connection + summary: Clear the data for a specific stream in the connection. Deletes data generated by the stream in the destination. Clear any cursors back to initial state. + operationId: clearConnectionStream + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionStreamRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/JobInfoRead" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/connections/reset/stream: post: tags: @@ -2470,6 +2516,29 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" + /v1/connections/refresh: + post: + tags: + - connection + summary: refresh the data for specific streams in the connection. If no stream is specify or the list of stream is empy, all the streams will be refreshed. Resets any cursors back to initial state. + operationId: refreshConnectionStream + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionStreamRefreshRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/BooleanRead" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/connections/get_task_queue_name: post: tags: @@ -3784,12 +3853,8 @@ paths: $ref: "#/components/schemas/PermissionUpdate" required: true responses: - "200": + "204": description: Successful operation - content: - application/json: - schema: - $ref: "#/components/schemas/PermissionRead" "403": $ref: "#/components/responses/ForbiddenResponse" "404": @@ -6099,6 +6164,7 @@ components: type: object required: - name + - organizationId properties: email: type: string @@ -6132,6 +6198,7 @@ components: required: - id - name + - organizationId properties: id: type: string @@ -6531,6 +6598,8 @@ components: type: string draftManifest: $ref: "#/components/schemas/DeclarativeManifest" + yamlManifest: + type: string ConnectorBuilderProjectDetailsRead: type: object required: @@ -7824,6 +7893,17 @@ components: type: array items: $ref: "#/components/schemas/ConnectionStream" + ConnectionStreamRefreshRequestBody: + type: object + required: + - connectionId + properties: + connectionId: + $ref: "#/components/schemas/ConnectionId" + streams: + type: array + items: + $ref: "#/components/schemas/ConnectionStream" DbMigrationRequestBody: type: object required: @@ -8920,9 +9000,6 @@ components: $ref: "#/components/schemas/UserStatus" companyName: type: string - email: - type: string - format: email news: type: boolean metadata: @@ -9142,6 +9219,7 @@ components: - get_spec - sync - reset_connection + - refresh JobCreate: type: object required: @@ -9563,6 +9641,7 @@ components: - refresh_schema - heartbeat_timeout - destination_timeout + - transient_error AttemptStatus: type: string enum: diff --git a/airbyte-api/src/test/kotlin/io/airbyte/api/client/AirbyteApiClientTest.kt b/airbyte-api/src/test/kotlin/io/airbyte/api/client/AirbyteApiClientTest.kt new file mode 100644 index 00000000000..4c05e62e314 --- /dev/null +++ b/airbyte-api/src/test/kotlin/io/airbyte/api/client/AirbyteApiClientTest.kt @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client2 + +import dev.failsafe.RetryPolicy +import io.mockk.mockk +import okhttp3.OkHttpClient +import okhttp3.Response +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test + +class AirbyteApiClientTest { + @Test + fun `test that the Airbyte API client creates the underlying API objects with the provided configuration`() { + val basePath = "base-path" + val client: OkHttpClient = mockk() + val policy: RetryPolicy = mockk() + + val airbyteApiClient = AirbyteApiClient2(basePath, policy, client) + assertNotNull(airbyteApiClient.attemptApi) + assertEquals(client, airbyteApiClient.attemptApi.client) + assertEquals(policy, airbyteApiClient.attemptApi.policy) + assertEquals(basePath, airbyteApiClient.attemptApi.baseUrl) + assertNotNull(airbyteApiClient.connectionApi) + assertEquals(client, airbyteApiClient.connectionApi.client) + assertEquals(policy, airbyteApiClient.connectionApi.policy) + assertEquals(basePath, airbyteApiClient.connectionApi.baseUrl) + assertNotNull(airbyteApiClient.connectorBuilderProjectApi) + assertEquals(client, airbyteApiClient.connectorBuilderProjectApi.client) + assertEquals(policy, airbyteApiClient.connectorBuilderProjectApi.policy) + assertEquals(basePath, airbyteApiClient.connectorBuilderProjectApi.baseUrl) + assertNotNull(airbyteApiClient.deploymentMetadataApi) + assertEquals(client, airbyteApiClient.deploymentMetadataApi.client) + assertEquals(policy, airbyteApiClient.deploymentMetadataApi.policy) + assertEquals(basePath, airbyteApiClient.deploymentMetadataApi.baseUrl) + assertNotNull(airbyteApiClient.destinationApi) + assertEquals(client, airbyteApiClient.destinationApi.client) + assertEquals(policy, airbyteApiClient.destinationApi.policy) + assertEquals(basePath, airbyteApiClient.destinationApi.baseUrl) + assertNotNull(airbyteApiClient.destinationDefinitionApi) + assertEquals(client, airbyteApiClient.destinationDefinitionApi.client) + assertEquals(policy, airbyteApiClient.destinationDefinitionApi.policy) + assertEquals(basePath, airbyteApiClient.destinationDefinitionApi.baseUrl) + assertNotNull(airbyteApiClient.destinationDefinitionSpecificationApi) + assertEquals(client, airbyteApiClient.destinationDefinitionSpecificationApi.client) + assertEquals(policy, airbyteApiClient.destinationDefinitionSpecificationApi.policy) + assertEquals(basePath, airbyteApiClient.destinationDefinitionSpecificationApi.baseUrl) + assertNotNull(airbyteApiClient.healthApi) + assertEquals(client, airbyteApiClient.healthApi.client) + assertEquals(policy, airbyteApiClient.healthApi.policy) + assertEquals(basePath, airbyteApiClient.healthApi.baseUrl) + assertNotNull(airbyteApiClient.jobsApi) + assertEquals(client, airbyteApiClient.jobsApi.client) + assertEquals(policy, airbyteApiClient.jobsApi.policy) + assertEquals(basePath, airbyteApiClient.jobsApi.baseUrl) + assertNotNull(airbyteApiClient.jobRetryStatesApi) + assertEquals(client, airbyteApiClient.jobRetryStatesApi.client) + assertEquals(policy, airbyteApiClient.jobRetryStatesApi.policy) + assertEquals(basePath, airbyteApiClient.jobRetryStatesApi.baseUrl) + assertNotNull(airbyteApiClient.operationApi) + assertEquals(client, airbyteApiClient.operationApi.client) + assertEquals(policy, airbyteApiClient.operationApi.policy) + assertEquals(basePath, airbyteApiClient.operationApi.baseUrl) + assertNotNull(airbyteApiClient.organizationApi) + assertEquals(client, airbyteApiClient.organizationApi.client) + assertEquals(policy, airbyteApiClient.organizationApi.policy) + assertEquals(basePath, airbyteApiClient.organizationApi.baseUrl) + assertNotNull(airbyteApiClient.permissionApi) + assertEquals(client, airbyteApiClient.permissionApi.client) + assertEquals(policy, airbyteApiClient.permissionApi.policy) + assertEquals(basePath, airbyteApiClient.permissionApi.baseUrl) + assertNotNull(airbyteApiClient.secretPersistenceConfigApi) + assertEquals(client, airbyteApiClient.secretPersistenceConfigApi.client) + assertEquals(policy, airbyteApiClient.secretPersistenceConfigApi.policy) + assertEquals(basePath, airbyteApiClient.secretPersistenceConfigApi.baseUrl) + assertNotNull(airbyteApiClient.sourceApi) + assertEquals(client, airbyteApiClient.sourceApi.client) + assertEquals(policy, airbyteApiClient.sourceApi.policy) + assertEquals(basePath, airbyteApiClient.sourceApi.baseUrl) + assertNotNull(airbyteApiClient.sourceDefinitionApi) + assertEquals(client, airbyteApiClient.sourceDefinitionApi.client) + assertEquals(policy, airbyteApiClient.sourceDefinitionApi.policy) + assertEquals(basePath, airbyteApiClient.sourceDefinitionApi.baseUrl) + assertNotNull(airbyteApiClient.sourceDefinitionSpecificationApi) + assertEquals(client, airbyteApiClient.sourceDefinitionSpecificationApi.client) + assertEquals(policy, airbyteApiClient.sourceDefinitionSpecificationApi.policy) + assertEquals(basePath, airbyteApiClient.sourceDefinitionSpecificationApi.baseUrl) + assertNotNull(airbyteApiClient.stateApi) + assertEquals(client, airbyteApiClient.stateApi.client) + assertEquals(policy, airbyteApiClient.stateApi.policy) + assertEquals(basePath, airbyteApiClient.stateApi.baseUrl) + assertNotNull(airbyteApiClient.streamStatusesApi) + assertEquals(client, airbyteApiClient.streamStatusesApi.client) + assertEquals(policy, airbyteApiClient.streamStatusesApi.policy) + assertEquals(basePath, airbyteApiClient.streamStatusesApi.baseUrl) + assertNotNull(airbyteApiClient.userApi) + assertEquals(client, airbyteApiClient.userApi.client) + assertEquals(policy, airbyteApiClient.userApi.policy) + assertEquals(basePath, airbyteApiClient.userApi.baseUrl) + assertNotNull(airbyteApiClient.webBackendApi) + assertEquals(client, airbyteApiClient.webBackendApi.client) + assertEquals(policy, airbyteApiClient.webBackendApi.policy) + assertEquals(basePath, airbyteApiClient.webBackendApi.baseUrl) + assertNotNull(airbyteApiClient.workspaceApi) + assertEquals(client, airbyteApiClient.workspaceApi.client) + assertEquals(policy, airbyteApiClient.workspaceApi.policy) + assertEquals(basePath, airbyteApiClient.workspaceApi.baseUrl) + } +} diff --git a/airbyte-api/src/test/kotlin/io/airbyte/api/client/ThrowOn5xxInterceptorTest.kt b/airbyte-api/src/test/kotlin/io/airbyte/api/client/ThrowOn5xxInterceptorTest.kt new file mode 100644 index 00000000000..10b0cbf2d47 --- /dev/null +++ b/airbyte-api/src/test/kotlin/io/airbyte/api/client/ThrowOn5xxInterceptorTest.kt @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client + +import io.micronaut.http.HttpStatus +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import okhttp3.Interceptor +import okhttp3.Request +import okhttp3.Response +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow +import org.junit.jupiter.api.assertThrows +import java.io.IOException + +internal class ThrowOn5xxInterceptorTest { + @Test + internal fun `test that when the response has an error status code, an exception is thrown`() { + val statusCode = HttpStatus.INTERNAL_SERVER_ERROR.code + val responseMessage = "error" + val chain: Interceptor.Chain = + mockk { + every { request() } returns mockk() + every { proceed(any()) } returns + mockk { + every { code } returns statusCode + every { message } returns responseMessage + } + } + + val interceptor = ThrowOn5xxInterceptor() + + val e = + assertThrows { + interceptor.intercept(chain) + } + assertEquals("HTTP error: $statusCode $responseMessage", e.message) + verify(exactly = 1) { chain.proceed(any()) } + } + + @Test + internal fun `test that when the response is not an error, an exception is not thrown`() { + val chain: Interceptor.Chain = + mockk { + every { request() } returns mockk() + every { proceed(any()) } returns + mockk { + every { code } returns HttpStatus.OK.code + } + } + + val interceptor = ThrowOn5xxInterceptor() + + assertDoesNotThrow { + interceptor.intercept(chain) + } + verify(exactly = 1) { chain.proceed(any()) } + } +} diff --git a/airbyte-api/src/test/kotlin/io/airbyte/api/client/WorkloadApiTest.kt b/airbyte-api/src/test/kotlin/io/airbyte/api/client/WorkloadApiTest.kt index 0f2eb60c42d..41156805619 100644 --- a/airbyte-api/src/test/kotlin/io/airbyte/api/client/WorkloadApiTest.kt +++ b/airbyte-api/src/test/kotlin/io/airbyte/api/client/WorkloadApiTest.kt @@ -23,7 +23,7 @@ class WorkloadApiTest { const val MESSAGE = "message" const val BODY = "body" const val STATUS_CODE = 400 - const val BASE_PATH = "basepath" + const val BASE_PATH = "http://basepath" } @Test diff --git a/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/AirbyteAuthHeaderInterceptorTest.kt b/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/AirbyteAuthHeaderInterceptorTest.kt new file mode 100644 index 00000000000..6a1e3bced82 --- /dev/null +++ b/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/AirbyteAuthHeaderInterceptorTest.kt @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.auth + +import io.micronaut.http.HttpHeaders +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import okhttp3.Interceptor +import okhttp3.Request +import okhttp3.Response +import org.junit.jupiter.api.Test +import java.util.Optional + +internal class AirbyteAuthHeaderInterceptorTest { + @Test + internal fun `test that when the Airbyte auth header is provided, the authentication header is added`() { + val applicationName = "the-application-name" + val headerName = "header-name" + val headerValue = "header-value" + val authHeader = + object : AirbyteAuthHeader { + override fun getHeaderName(): String { + return headerName + } + + override fun getHeaderValue(): String { + return headerValue + } + } + val interceptor = AirbyteAuthHeaderInterceptor(Optional.of(authHeader), applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify { builder.addHeader(headerName, headerValue) } + } + + @Test + internal fun `test that when the Airbyte auth header is not provided, the authentication header is not added`() { + val applicationName = "the-application-name" + val interceptor = AirbyteAuthHeaderInterceptor(Optional.empty(), applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify(exactly = 0) { builder.addHeader(any(), not(formatUserAgent(applicationName))) } + } +} diff --git a/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/InternalApiAuthenticationInterceptorTest.kt b/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/InternalApiAuthenticationInterceptorTest.kt new file mode 100644 index 00000000000..50db82e2daa --- /dev/null +++ b/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/InternalApiAuthenticationInterceptorTest.kt @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.auth + +import io.micronaut.http.HttpHeaders +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import okhttp3.Interceptor +import okhttp3.Request +import okhttp3.Response +import org.junit.jupiter.api.Test + +internal class InternalApiAuthenticationInterceptorTest { + @Test + internal fun `test that when the internal API header name is not blank, the authentication header is added`() { + val applicationName = "the-application-name" + val internalApiHeaderName = "header-name" + val authHeaderValue = "the token" + val interceptor = InternalApiAuthenticationInterceptor(internalApiHeaderName, authHeaderValue, applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify { builder.addHeader(internalApiHeaderName, authHeaderValue) } + } + + @Test + internal fun `test that when the internal API header name is not blank but the token value is missing, the authentication header is not added`() { + val applicationName = "the-application-name" + val internalApiHeaderName = "header-name" + val authHeaderValue = "the token" + val interceptor = InternalApiAuthenticationInterceptor(internalApiHeaderName, "", applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify(exactly = 0) { builder.addHeader(any(), authHeaderValue) } + } + + @Test + internal fun `test that when the internal API header name is blank, the authentication header is not added`() { + val applicationName = "the-application-name" + val internalApiHeaderName = "" + val authHeaderValue = "the token" + val interceptor = InternalApiAuthenticationInterceptor(internalApiHeaderName, authHeaderValue, applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify(exactly = 0) { builder.addHeader(any(), authHeaderValue) } + } +} diff --git a/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/WorkloadApiAuthenticationInterceptorTest.kt b/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/WorkloadApiAuthenticationInterceptorTest.kt new file mode 100644 index 00000000000..f893b2c2db0 --- /dev/null +++ b/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/WorkloadApiAuthenticationInterceptorTest.kt @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.auth + +import io.airbyte.api.client.auth.WorkloadApiAuthenticationInterceptor.Companion.BEARER_TOKEN_PREFIX +import io.micronaut.http.HttpHeaders +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import okhttp3.Interceptor +import okhttp3.Request +import okhttp3.Response +import org.junit.jupiter.api.Test +import java.util.Base64 + +internal class WorkloadApiAuthenticationInterceptorTest { + @Test + internal fun `test that when the bearer token is not blank, the authentication header is added`() { + val applicationName = "the-application-name" + val bearerToken = "a bearer token" + val expectedBearerToken = Base64.getEncoder().encodeToString(bearerToken.toByteArray()) + val interceptor = WorkloadApiAuthenticationInterceptor(bearerToken, applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify { builder.addHeader(HttpHeaders.AUTHORIZATION, "$BEARER_TOKEN_PREFIX $expectedBearerToken") } + } + + @Test + internal fun `test that when the bearer token is blank, the authentication header is not added`() { + val applicationName = "the-application-name" + val bearerToken = "" + val interceptor = WorkloadApiAuthenticationInterceptor(bearerToken, applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify(exactly = 0) { builder.addHeader(HttpHeaders.AUTHORIZATION, "$BEARER_TOKEN_PREFIX $bearerToken") } + } +} diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index 73531a2a4d2..661c601de76 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,11 +1,12 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.1.0 -FROM ${JDK_IMAGE} +ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.2.1 +FROM scratch as builder WORKDIR /app - -USER root ADD airbyte-app.tar /app -RUN chown -R airbyte:airbyte /app + +FROM ${JDK_IMAGE} +WORKDIR /app +COPY --chown=airbyte:airbyte --from=builder /app /app USER airbyte:airbyte ENTRYPOINT ["/bin/bash", "-c", "airbyte-app/bin/airbyte-bootloader"] diff --git a/airbyte-bootloader/build.gradle.kts b/airbyte-bootloader/build.gradle.kts index be0509dea66..0b5e10e82e9 100644 --- a/airbyte-bootloader/build.gradle.kts +++ b/airbyte-bootloader/build.gradle.kts @@ -1,81 +1,83 @@ import java.util.Properties plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } configurations.all { - resolutionStrategy { - force(libs.flyway.core, libs.jooq) - } + resolutionStrategy { + force(libs.flyway.core, libs.jooq) + } } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.flyway) - implementation(libs.jooq) - implementation(libs.guava) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.flyway) + implementation(libs.jooq) + implementation(libs.guava) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-micronaut")) - implementation(project(":airbyte-config:init")) - implementation(project(":airbyte-config:specs")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-config:config-secrets")) - implementation(project(":airbyte-data")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-featureflag")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-persistence:job-persistence")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-micronaut")) + implementation(project(":airbyte-config:init")) + implementation(project(":airbyte-config:specs")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-config:config-secrets")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-featureflag")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-persistence:job-persistence")) - runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.snakeyaml) - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testCompileOnly(libs.lombok) + testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.bundles.junit) - testImplementation(libs.junit.jupiter.system.stubs) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.bundles.junit) + testImplementation(libs.junit.jupiter.system.stubs) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) - testRuntimeOnly(libs.junit.jupiter.engine) + testRuntimeOnly(libs.junit.jupiter.engine) } val env = Properties().apply { - load(rootProject.file(".env.dev").inputStream()) + load(rootProject.file(".env.dev").inputStream()) } airbyte { - application { - mainClass = "io.airbyte.bootloader.Application" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - @Suppress("UNCHECKED_CAST") - localEnvVars.putAll(env.toMutableMap() as Map) - localEnvVars.putAll(mapOf( - "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), - "AIRBYTE_VERSION" to env["VERSION"].toString(), - "DATABASE_URL" to "jdbc:postgresql://localhost:5432/airbyte", - )) - } + application { + mainClass = "io.airbyte.bootloader.Application" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + @Suppress("UNCHECKED_CAST") + localEnvVars.putAll(env.toMutableMap() as Map) + localEnvVars.putAll( + mapOf( + "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), + "AIRBYTE_VERSION" to env["VERSION"].toString(), + "DATABASE_URL" to "jdbc:postgresql://localhost:5432/airbyte", + ) + ) + } - docker { - imageName = "bootloader" - } + docker { + imageName = "bootloader" + } } diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java index 6123a60588d..1da5d11698a 100644 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java +++ b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java @@ -97,8 +97,8 @@ class BootloaderTest { // ⚠️ This line should change with every new migration to show that you meant to make a new // migration to the prod database - private static final String CURRENT_CONFIGS_MIGRATION_VERSION = "0.55.1.001"; - private static final String CURRENT_JOBS_MIGRATION_VERSION = "0.50.4.003"; + private static final String CURRENT_CONFIGS_MIGRATION_VERSION = "0.55.1.003"; + private static final String CURRENT_JOBS_MIGRATION_VERSION = "0.57.2.001"; private static final String CDK_VERSION = "1.2.3"; @BeforeEach diff --git a/airbyte-commons-auth/build.gradle.kts b/airbyte-commons-auth/build.gradle.kts index 60eb54ada69..55f82472a47 100644 --- a/airbyte-commons-auth/build.gradle.kts +++ b/airbyte-commons-auth/build.gradle.kts @@ -1,36 +1,36 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - kapt(libs.bundles.micronaut.annotation.processor) + kapt(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.keycloak.client) - implementation(libs.bundles.micronaut) - implementation(libs.failsafe.okhttp) - implementation(libs.kotlin.logging) - implementation(libs.okhttp) - implementation(project(":airbyte-commons")) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.keycloak.client) + implementation(libs.bundles.micronaut) + implementation(libs.failsafe.okhttp) + implementation(libs.kotlin.logging) + implementation(libs.okhttp) + implementation(project(":airbyte-commons")) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockito.inline) - testImplementation(libs.mockk) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockito.inline) + testImplementation(libs.mockk) } tasks.named("test") { - maxHeapSize = "2g" + maxHeapSize = "2g" } // The DuplicatesStrategy will be required while this module is mixture of kotlin and java _with_ lombok dependencies. @@ -39,5 +39,5 @@ tasks.named("test") { // keepJavacAnnotationProcessors enabled, which causes duplicate META-INF files to be generated. // Once lombok has been removed, this can also be removed. tasks.withType().configureEach { - duplicatesStrategy = DuplicatesStrategy.EXCLUDE + duplicatesStrategy = DuplicatesStrategy.EXCLUDE } diff --git a/airbyte-commons-auth/src/test/kotlin/io/airbyte/commons/auth/AuthenticationInterceptorTest.kt b/airbyte-commons-auth/src/test/kotlin/io/airbyte/commons/auth/AuthenticationInterceptorTest.kt deleted file mode 100644 index 14c424a0461..00000000000 --- a/airbyte-commons-auth/src/test/kotlin/io/airbyte/commons/auth/AuthenticationInterceptorTest.kt +++ /dev/null @@ -1,56 +0,0 @@ -package io.airbyte.commons.auth - -import io.airbyte.commons.auth.AuthenticationInterceptor.Companion.BEARER_TOKEN_PREFIX -import io.airbyte.commons.auth.AuthenticationInterceptor.Companion.USER_AGENT_VALUE -import io.micronaut.http.HttpHeaders -import io.mockk.every -import io.mockk.mockk -import io.mockk.verify -import okhttp3.Interceptor -import okhttp3.Request -import okhttp3.Response -import org.junit.jupiter.api.Test -import java.util.Base64 - -class AuthenticationInterceptorTest { - @Test - fun `test that when the bearer token is not blank, the authentication header is added`() { - val bearerToken = "a bearer token" - val expectedBearerToken = Base64.getEncoder().encodeToString(bearerToken.toByteArray()) - val interceptor = AuthenticationInterceptor(bearerToken) - val chain: Interceptor.Chain = mockk() - val builder: Request.Builder = mockk() - val request: Request = mockk() - - every { builder.header(any(), any()) }.returns(builder) - every { builder.build() }.returns(mockk()) - every { request.newBuilder() }.returns(builder) - every { chain.request() }.returns(request) - every { chain.proceed(any()) }.returns(mockk()) - - interceptor.intercept(chain) - - verify { builder.header(HttpHeaders.USER_AGENT, USER_AGENT_VALUE) } - verify { builder.header(HttpHeaders.AUTHORIZATION, "$BEARER_TOKEN_PREFIX $expectedBearerToken") } - } - - @Test - fun `test that when the bearer token is blank, the authentication header is not added`() { - val bearerToken = "" - val interceptor = AuthenticationInterceptor(bearerToken) - val chain: Interceptor.Chain = mockk() - val builder: Request.Builder = mockk() - val request: Request = mockk() - - every { builder.header(any(), any()) }.returns(builder) - every { builder.build() }.returns(mockk()) - every { request.newBuilder() }.returns(builder) - every { chain.request() }.returns(request) - every { chain.proceed(any()) }.returns(mockk()) - - interceptor.intercept(chain) - - verify { builder.header(HttpHeaders.USER_AGENT, USER_AGENT_VALUE) } - verify(exactly = 0) { builder.header(HttpHeaders.AUTHORIZATION, "$BEARER_TOKEN_PREFIX $bearerToken") } - } -} diff --git a/airbyte-commons-converters/build.gradle.kts b/airbyte-commons-converters/build.gradle.kts index 531e53ff050..fafa7eeb404 100644 --- a/airbyte-commons-converters/build.gradle.kts +++ b/airbyte-commons-converters/build.gradle.kts @@ -1,36 +1,36 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) - implementation(libs.apache.commons.text) + implementation(libs.apache.commons.text) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-persistence:job-persistence")) - implementation(libs.airbyte.protocol) - implementation(libs.guava) - implementation(libs.slf4j.api) - implementation(libs.bundles.datadog) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-persistence:job-persistence")) + implementation(libs.airbyte.protocol) + implementation(libs.guava) + implementation(libs.slf4j.api) + implementation(libs.bundles.datadog) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testAnnotationProcessor(libs.jmh.annotations) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(libs.jmh.annotations) - testImplementation(libs.bundles.micronaut.test) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) + testImplementation(libs.bundles.micronaut.test) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.junit.pioneer) } diff --git a/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/ConfigReplacer.java b/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/ConfigReplacer.java index 313c58b10d1..de7757f20d6 100644 --- a/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/ConfigReplacer.java +++ b/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/ConfigReplacer.java @@ -10,6 +10,9 @@ import io.airbyte.config.AllowedHosts; import io.airbyte.config.constants.AlwaysAllowedHosts; import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -64,7 +67,7 @@ public AllowedHosts getAllowedHosts(final AllowedHosts allowedHosts, final JsonN } if (value != null) { - valuesMap.put(fullKey, value); + valuesMap.put(fullKey, sanitize(value)); } } else if (type == JsonToken.START_OBJECT) { if (jsonParser.getCurrentName() != null) { @@ -98,4 +101,30 @@ public AllowedHosts getAllowedHosts(final AllowedHosts allowedHosts, final JsonN return resolvedAllowedHosts; } + public String sanitize(String s) { + try { + final String withProtocol = s.contains("://") ? s : "x://" + s; + final URI uri = new URI(withProtocol); + return uri.toURL().getHost(); + } catch (MalformedURLException | URISyntaxException e) { + // some hosts will be provided from the connector config with a protocol, like ftp://site.com or + // mongodb+srv://cluster0.abcd1.mongodb.net + String[] parts = s.split("://"); + s = parts.length > 1 ? parts[1] : parts[0]; + + // some hosts might have a trailing path. We only want the first chunk in all cases (e.g. + // http://site.com/path/foo/bar) + parts = s.split("/"); + s = parts[0]; + + // some hosts will have a username or password, like https://user:passowrd@site.com + parts = s.split("@"); + s = parts.length > 1 ? parts[1] : parts[0]; + + // remove slashes - we only want hostnames, not paths + s = s.replace("/", ""); + return s; + } + } + } diff --git a/airbyte-commons-license/build.gradle.kts b/airbyte-commons-license/build.gradle.kts index 7fc4b9b0ffa..e9c5f9dfe5e 100644 --- a/airbyte-commons-license/build.gradle.kts +++ b/airbyte-commons-license/build.gradle.kts @@ -1,29 +1,29 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.guava) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.guava) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-micronaut")) - implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-micronaut")) + implementation(project(":airbyte-config:config-models")) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockito.inline) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockito.inline) } tasks.named("test") { - maxHeapSize = "2g" + maxHeapSize = "2g" } diff --git a/airbyte-commons-micronaut-security/build.gradle.kts b/airbyte-commons-micronaut-security/build.gradle.kts index 51f6bc23022..3e725a866d8 100644 --- a/airbyte-commons-micronaut-security/build.gradle.kts +++ b/airbyte-commons-micronaut-security/build.gradle.kts @@ -1,30 +1,30 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.micronaut.security) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.micronaut.security) - implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons")) - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testCompileOnly(libs.lombok) + testAnnotationProcessor(libs.lombok) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockito.inline) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockito.inline) } tasks.named("test") { - maxHeapSize = "2g" + maxHeapSize = "2g" } diff --git a/airbyte-commons-micronaut/build.gradle.kts b/airbyte-commons-micronaut/build.gradle.kts index 6ff31d9bf6b..972c5096088 100644 --- a/airbyte-commons-micronaut/build.gradle.kts +++ b/airbyte-commons-micronaut/build.gradle.kts @@ -1,28 +1,28 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.micronaut.security) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.micronaut.security) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockito.inline) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockito.inline) } tasks.named("test") { - maxHeapSize = "2g" + maxHeapSize = "2g" } diff --git a/airbyte-commons-protocol/build.gradle.kts b/airbyte-commons-protocol/build.gradle.kts index e337070f6e3..22a55a5b0f4 100644 --- a/airbyte-commons-protocol/build.gradle.kts +++ b/airbyte-commons-protocol/build.gradle.kts @@ -1,27 +1,27 @@ plugins { - id("io.airbyte.gradle.jvm") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-json-validation")) - implementation(libs.bundles.micronaut.annotation) - implementation(libs.airbyte.protocol) - implementation(libs.guava) - implementation(libs.bundles.jackson) + implementation(libs.bundles.micronaut.annotation) + implementation(libs.airbyte.protocol) + implementation(libs.guava) + implementation(libs.bundles.jackson) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) - testRuntimeOnly(libs.junit.jupiter.engine) + testRuntimeOnly(libs.junit.jupiter.engine) } diff --git a/airbyte-commons-server/build.gradle.kts b/airbyte-commons-server/build.gradle.kts index 5ee46f69885..d4632552000 100644 --- a/airbyte-commons-server/build.gradle.kts +++ b/airbyte-commons-server/build.gradle.kts @@ -1,98 +1,100 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("org.jetbrains.kotlin.jvm") - id("org.jetbrains.kotlin.kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } configurations.all { - resolutionStrategy { - // Ensure that the versions defined in deps.toml are used - // instead of versions from transitive dependencies - force(libs.flyway.core, libs.s3, libs.aws.java.sdk.s3) - } + resolutionStrategy { + // Ensure that the versions defined in deps.toml are used + // instead of versions from transitive dependencies + force(libs.flyway.core, libs.s3, libs.aws.java.sdk.s3) + } } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) - kapt(libs.micronaut.jaxrs.processor) + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + kapt(libs.micronaut.jaxrs.processor) - kaptTest(platform(libs.micronaut.platform)) - kaptTest(libs.bundles.micronaut.test.annotation.processor) + kaptTest(platform(libs.micronaut.platform)) + kaptTest(libs.bundles.micronaut.test.annotation.processor) - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - annotationProcessor(libs.micronaut.jaxrs.processor) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(libs.micronaut.jaxrs.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.micronaut.cache.caffeine) - implementation(libs.micronaut.inject) - implementation(libs.micronaut.jaxrs.server) - implementation(libs.micronaut.security) - implementation(libs.bundles.flyway) - implementation(libs.s3) - implementation(libs.aws.java.sdk.s3) - implementation(libs.sts) - implementation(libs.bundles.apache) - implementation(libs.slugify) - implementation(libs.quartz.scheduler) - implementation(libs.temporal.sdk) - implementation(libs.swagger.annotations) - implementation(libs.bundles.log4j) - implementation(libs.commons.io) - implementation(project(":airbyte-analytics")) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-converters")) - implementation(project(":airbyte-commons-license")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-commons-temporal-core")) - implementation(project(":airbyte-commons-with-dependencies")) - implementation(project(":airbyte-config:init")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-config:config-secrets")) - implementation(project(":airbyte-config:specs")) - implementation(project(":airbyte-data")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-oauth")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-persistence:job-persistence")) - implementation(project(":airbyte-worker-models")) - implementation(project(":airbyte-notification")) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.micronaut.cache.caffeine) + implementation(libs.micronaut.inject) + implementation(libs.micronaut.jaxrs.server) + implementation(libs.micronaut.security) + implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.bundles.micronaut.kotlin) + implementation(libs.bundles.flyway) + implementation(libs.s3) + implementation(libs.aws.java.sdk.s3) + implementation(libs.sts) + implementation(libs.bundles.apache) + implementation(libs.slugify) + implementation(libs.quartz.scheduler) + implementation(libs.temporal.sdk) + implementation(libs.swagger.annotations) + implementation(libs.bundles.log4j) + implementation(libs.commons.io) + implementation(project(":airbyte-analytics")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-converters")) + implementation(project(":airbyte-commons-license")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-commons-temporal-core")) + implementation(project(":airbyte-commons-with-dependencies")) + implementation(project(":airbyte-config:init")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-config:config-secrets")) + implementation(project(":airbyte-config:specs")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-oauth")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-persistence:job-persistence")) + implementation(project(":airbyte-worker-models")) + implementation(project(":airbyte-notification")) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.mockwebserver) - testImplementation(libs.mockito.inline) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.micronaut.http) - testImplementation(libs.mockk) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.mockwebserver) + testImplementation(libs.mockito.inline) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.micronaut.http) + testImplementation(libs.mockk) - testRuntimeOnly(libs.junit.jupiter.engine) + testRuntimeOnly(libs.junit.jupiter.engine) } // Even though Kotlin is excluded on Spotbugs, this project // still runs into spotbug issues. Working theory is that // generated code is being picked up. Disable as a short-term fix. tasks.named("spotbugsMain") { - enabled = false + enabled = false } tasks.withType() { - duplicatesStrategy = DuplicatesStrategy.EXCLUDE + duplicatesStrategy = DuplicatesStrategy.EXCLUDE } \ No newline at end of file diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java index ef22be86620..1f05e99c42d 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java @@ -5,8 +5,6 @@ package io.airbyte.commons.server.handlers; import static io.airbyte.commons.converters.ConnectionHelper.validateCatalogDoesntContainDuplicateStreamNames; -import static io.airbyte.persistence.job.JobNotifier.CONNECTION_DISABLED_NOTIFICATION; -import static io.airbyte.persistence.job.JobNotifier.CONNECTION_DISABLED_WARNING_NOTIFICATION; import static io.airbyte.persistence.job.models.Job.REPLICATION_TYPES; import static java.time.temporal.ChronoUnit.DAYS; @@ -148,9 +146,10 @@ public class ConnectionsHandler { private final Integer maxDaysOfOnlyFailedJobsBeforeConnectionDisable; private final Integer maxFailedJobsInARowBeforeConnectionDisable; private final int maxJobLookback = 10; + private final StreamRefreshesHandler streamRefreshesHandler; @Inject - public ConnectionsHandler( + public ConnectionsHandler(final StreamRefreshesHandler streamRefreshesHandler, final JobPersistence jobPersistence, final ConfigRepository configRepository, @Named("uuidGenerator") final Supplier uuidGenerator, @@ -177,6 +176,7 @@ public ConnectionsHandler( this.jobNotifier = jobNotifier; this.maxDaysOfOnlyFailedJobsBeforeConnectionDisable = maxDaysOfOnlyFailedJobsBeforeConnectionDisable; this.maxFailedJobsInARowBeforeConnectionDisable = maxFailedJobsInARowBeforeConnectionDisable; + this.streamRefreshesHandler = streamRefreshesHandler; } /** @@ -338,9 +338,6 @@ InternalOperationResult autoDisableConnection(final UUID connectionId, final Ins } else if (numFailures == maxFailedJobsInARowBeforeConnectionDisableWarning && !warningPreviouslySentForMaxDays) { // warn if number of consecutive failures hits 50% of MaxFailedJobsInARow jobNotifier.autoDisableConnectionWarning(optionalLastJob.get(), attemptStats); - // explicitly send to email if customer.io api key is set, since email notification cannot be set by - // configs through UI yet - jobNotifier.notifyJobByEmail(null, CONNECTION_DISABLED_WARNING_NOTIFICATION, optionalLastJob.get(), attemptStats); return new InternalOperationResult().succeeded(false); } @@ -372,9 +369,6 @@ InternalOperationResult autoDisableConnection(final UUID connectionId, final Ins if (firstReplicationOlderThanMaxDisableWarningDays && successOlderThanPrevFailureByMaxWarningDays) { jobNotifier.autoDisableConnectionWarning(optionalLastJob.get(), attemptStats); - // explicitly send to email if customer.io api key is set, since email notification cannot be set by - // configs through UI yet - jobNotifier.notifyJobByEmail(null, CONNECTION_DISABLED_WARNING_NOTIFICATION, optionalLastJob.get(), attemptStats); } return new InternalOperationResult().succeeded(false); } @@ -388,9 +382,6 @@ private void disableConnection(final StandardSync standardSync, final Job lastJo attemptStats.add(jobPersistence.getAttemptStats(lastJob.getId(), attempt.getAttemptNumber())); } jobNotifier.autoDisableConnection(lastJob, attemptStats); - // explicitly send to email if customer.io api key is set, since email notification cannot be set by - // configs through UI yet - jobNotifier.notifyJobByEmail(null, CONNECTION_DISABLED_NOTIFICATION, lastJob, attemptStats); } private int getDaysSinceTimestamp(final long currentTimestampInSeconds, final long timestampInSeconds) { @@ -806,6 +797,7 @@ public Optional getConnectionAirbyteCatalog(final UUID connectio public void deleteConnection(final UUID connectionId) throws JsonValidationException, ConfigNotFoundException, IOException { connectionHelper.deleteConnection(connectionId); eventRunner.forceDeleteConnection(connectionId); + streamRefreshesHandler.deleteRefreshesForConnection(connectionId); } private ConnectionRead buildConnectionRead(final UUID connectionId) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandler.java index d828a355ca6..17c1339f485 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandler.java @@ -27,6 +27,7 @@ import io.airbyte.api.model.generated.SourceDefinitionIdBody; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.server.handlers.helpers.BuilderProjectUpdater; import io.airbyte.commons.server.handlers.helpers.DeclarativeSourceManifestInjector; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ConfigSchema; @@ -77,6 +78,8 @@ public class ConnectorBuilderProjectsHandler { private final ConfigRepository configRepository; + + private final BuilderProjectUpdater buildProjectUpdater; private final Supplier uuidSupplier; private final DeclarativeSourceManifestInjector manifestInjector; private final CdkVersionProvider cdkVersionProvider; @@ -89,11 +92,12 @@ public class ConnectorBuilderProjectsHandler { private final JsonSecretsProcessor secretsProcessor; private final ConnectorBuilderServerApi connectorBuilderServerApiClient; - static final String SPEC_FIELD = "spec"; - static final String CONNECTION_SPECIFICATION_FIELD = "connection_specification"; + public static final String SPEC_FIELD = "spec"; + public static final String CONNECTION_SPECIFICATION_FIELD = "connection_specification"; @Inject public ConnectorBuilderProjectsHandler(final ConfigRepository configRepository, + final BuilderProjectUpdater builderProjectUpdater, final CdkVersionProvider cdkVersionProvider, @Named("uuidGenerator") final Supplier uuidSupplier, final DeclarativeSourceManifestInjector manifestInjector, @@ -106,6 +110,7 @@ public ConnectorBuilderProjectsHandler(final ConfigRepository configRepository, @Named("jsonSecretsProcessorWithCopy") final JsonSecretsProcessor secretsProcessor, final ConnectorBuilderServerApi connectorBuilderServerApiClient) { this.configRepository = configRepository; + this.buildProjectUpdater = builderProjectUpdater; this.cdkVersionProvider = cdkVersionProvider; this.uuidSupplier = uuidSupplier; this.manifestInjector = manifestInjector; @@ -154,22 +159,12 @@ public ConnectorBuilderProjectIdWithWorkspaceId createConnectorBuilderProject(fi } public void updateConnectorBuilderProject(final ExistingConnectorBuilderProjectWithWorkspaceId projectUpdate) - throws IOException, ConfigNotFoundException { + throws ConfigNotFoundException, IOException { + final ConnectorBuilderProject connectorBuilderProject = configRepository.getConnectorBuilderProject(projectUpdate.getBuilderProjectId(), false); validateProjectUnderRightWorkspace(connectorBuilderProject, projectUpdate.getWorkspaceId()); - if (connectorBuilderProject.getActorDefinitionId() != null) { - configRepository.updateBuilderProjectAndActorDefinition(projectUpdate.getBuilderProjectId(), - projectUpdate.getWorkspaceId(), - projectUpdate.getBuilderProject().getName(), - projectUpdate.getBuilderProject().getDraftManifest(), - connectorBuilderProject.getActorDefinitionId()); - } else { - configRepository.writeBuilderProjectDraft(projectUpdate.getBuilderProjectId(), - projectUpdate.getWorkspaceId(), - projectUpdate.getBuilderProject().getName(), - projectUpdate.getBuilderProject().getDraftManifest()); - } + buildProjectUpdater.persistBuilderProjectUpdate(projectUpdate); } public void deleteConnectorBuilderProject(final ConnectorBuilderProjectIdWithWorkspaceId projectDelete) @@ -392,7 +387,6 @@ private ConnectorBuilderHttpRequest convertHttpRequest(@Nullable final HttpReque ? new ConnectorBuilderHttpRequest() .url(request.getUrl()) .httpMethod(ConnectorBuilderHttpRequest.HttpMethodEnum.fromString(request.getHttpMethod().getValue())) - .parameters(request.getParameters()) .body(request.getBody()) .headers(request.getHeaders()) : null; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java index cde865497c9..a718ae4a962 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java @@ -226,8 +226,6 @@ public Object getJobInput(final SyncInput input) { .withDestinationConfiguration(attemptSyncConfig.getDestinationConfiguration()) .withOperationSequence(config.getOperationSequence()) .withWebhookOperationConfigs(config.getWebhookOperationConfigs()) - .withCatalog(config.getConfiguredAirbyteCatalog()) - .withState(attemptSyncConfig.getState()) .withSyncResourceRequirements(config.getSyncResourceRequirements()) .withConnectionId(connectionId) .withWorkspaceId(config.getWorkspaceId()) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobsHandler.java index e042e4bf5e2..a7ab2251e42 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobsHandler.java @@ -80,7 +80,7 @@ public InternalOperationResult jobFailure(final JobFailureRequest input) { if (!job.getConfigType().equals(JobConfig.ConfigType.RESET_CONNECTION)) { jobNotifier.failJob(job, attemptStats); } - jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_FAILED_BY_RELEASE_STAGE, job); + jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_FAILED_BY_RELEASE_STAGE, job, input); final UUID connectionId = UUID.fromString(job.getScope()); if (!connectionId.equals(input.getConnectionId())) { @@ -160,7 +160,7 @@ public InternalOperationResult jobSuccessWithAttemptNumber(final JobSuccessWithA if (!job.getConfigType().equals(JobConfig.ConfigType.RESET_CONNECTION)) { jobNotifier.successJob(job, attemptStats); } - jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_SUCCEEDED_BY_RELEASE_STAGE, job); + jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_SUCCEEDED_BY_RELEASE_STAGE, job, input); jobCreationAndStatusUpdateHelper.trackCompletion(job, JobStatus.SUCCEEDED); return new InternalOperationResult().succeeded(true); @@ -238,7 +238,6 @@ public void persistJobCancellation(final UUID connectionId, final long jobId, fi attemptStats.add(jobPersistence.getAttemptStats(jobId, attempt.getAttemptNumber())); } jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_CANCELLED_BY_RELEASE_STAGE, job); - jobNotifier.failJob(job, attemptStats); jobCreationAndStatusUpdateHelper.trackCompletion(job, JobStatus.FAILED); } catch (final IOException e) { jobCreationAndStatusUpdateHelper.trackCompletionForInternalFailure(jobId, connectionId, attemptNumber, diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OrganizationsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OrganizationsHandler.java index bbe77adfb96..8b4da769343 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OrganizationsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OrganizationsHandler.java @@ -10,6 +10,7 @@ import io.airbyte.api.model.generated.OrganizationRead; import io.airbyte.api.model.generated.OrganizationReadList; import io.airbyte.api.model.generated.OrganizationUpdateRequestBody; +import io.airbyte.commons.server.errors.ConflictException; import io.airbyte.config.ConfigSchema; import io.airbyte.config.Organization; import io.airbyte.config.Permission; @@ -17,7 +18,8 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository.ResourcesByUserQueryPaginated; import io.airbyte.config.persistence.OrganizationPersistence; -import io.airbyte.config.persistence.PermissionPersistence; +import io.airbyte.data.services.PermissionRedundantException; +import io.airbyte.data.services.PermissionService; import jakarta.inject.Inject; import jakarta.inject.Named; import jakarta.inject.Singleton; @@ -28,8 +30,6 @@ import java.util.function.Supplier; import java.util.stream.Collectors; import org.jooq.tools.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * OrganizationHandler for handling organization resource related operation. @@ -39,18 +39,17 @@ @Singleton public class OrganizationsHandler { - private static final Logger LOGGER = LoggerFactory.getLogger(OrganizationsHandler.class); - private final PermissionPersistence permissionPersistence; + private final PermissionService permissionService; private final OrganizationPersistence organizationPersistence; private final Supplier uuidGenerator; @Inject public OrganizationsHandler(final OrganizationPersistence organizationPersistence, - final PermissionPersistence permissionPersistence, + final PermissionService permissionService, @Named("uuidGenerator") final Supplier uuidGenerator) { this.organizationPersistence = organizationPersistence; - this.permissionPersistence = permissionPersistence; + this.permissionService = permissionService; this.uuidGenerator = uuidGenerator; } @@ -80,13 +79,17 @@ public OrganizationRead createOrganization(final OrganizationCreateRequestBody o .withPba(pba) .withOrgLevelBilling(orgLevelBilling); organizationPersistence.createOrganization(organization); - // Also create an OrgAdmin permission. - final Permission orgAdminPermission = new Permission() - .withPermissionId(uuidGenerator.get()) - .withUserId(userId) - .withOrganizationId(orgId) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN); - permissionPersistence.writePermission(orgAdminPermission); + + try { + // Also create an OrgAdmin permission. + permissionService.createPermission(new Permission() + .withPermissionId(uuidGenerator.get()) + .withUserId(userId) + .withOrganizationId(orgId) + .withPermissionType(PermissionType.ORGANIZATION_ADMIN)); + } catch (final PermissionRedundantException e) { + throw new ConflictException(e.getMessage(), e); + } return buildOrganizationRead(organization); } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/PermissionHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/PermissionHandler.java index 8bebee24eac..2cbe9121542 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/PermissionHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/PermissionHandler.java @@ -17,13 +17,16 @@ import io.airbyte.api.model.generated.PermissionsCheckMultipleWorkspacesRequest; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.lang.Exceptions; +import io.airbyte.commons.server.errors.ConflictException; import io.airbyte.commons.server.errors.OperationNotAllowedException; import io.airbyte.config.ConfigSchema; import io.airbyte.config.Permission; import io.airbyte.config.helpers.PermissionHelper; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.PermissionPersistence; -import io.airbyte.config.persistence.SQLOperationNotAllowedException; +import io.airbyte.data.services.PermissionRedundantException; +import io.airbyte.data.services.PermissionService; +import io.airbyte.data.services.RemoveLastOrgAdminPermissionException; import io.airbyte.data.services.WorkspaceService; import io.airbyte.validation.json.JsonValidationException; import jakarta.inject.Named; @@ -35,7 +38,6 @@ import java.util.UUID; import java.util.function.Supplier; import java.util.stream.Collectors; -import org.jooq.exception.DataAccessException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,14 +52,17 @@ public class PermissionHandler { private final Supplier uuidGenerator; private final PermissionPersistence permissionPersistence; private final WorkspaceService workspaceService; + private final PermissionService permissionService; public PermissionHandler( final PermissionPersistence permissionPersistence, final WorkspaceService workspaceService, - @Named("uuidGenerator") final Supplier uuidGenerator) { + @Named("uuidGenerator") final Supplier uuidGenerator, + final PermissionService permissionService) { this.uuidGenerator = uuidGenerator; this.permissionPersistence = permissionPersistence; this.workspaceService = workspaceService; + this.permissionService = permissionService; } /** @@ -91,15 +96,11 @@ public PermissionRead createPermission(final PermissionCreate permissionCreate) .withWorkspaceId(permissionCreate.getWorkspaceId()) .withOrganizationId(permissionCreate.getOrganizationId()); - permissionPersistence.writePermission(permission); - final PermissionRead result; try { - result = buildPermissionRead(permissionId); - } catch (final ConfigNotFoundException ex) { - LOGGER.error("Config not found for permissionId: {} in CreatePermission.", permissionId); - throw new IOException(ex); + return buildPermissionRead(permissionService.createPermission(permission)); + } catch (final PermissionRedundantException e) { + throw new ConflictException(e.getMessage(), e); } - return result; } private Permission getPermissionById(final UUID permissionId) throws ConfigNotFoundException, IOException { @@ -185,12 +186,11 @@ public PermissionRead getPermission(final PermissionIdRequestBody permissionIdRe * "workspace_xxx"/"instance_admin" * * @param permissionUpdate The permission update. - * @return The updated permission. * @throws IOException if unable to update the permissions. * @throws ConfigNotFoundException if unable to update the permissions. * @throws OperationNotAllowedException if update is prevented by business logic. */ - public PermissionRead updatePermission(final PermissionUpdate permissionUpdate) + public void updatePermission(final PermissionUpdate permissionUpdate) throws IOException, ConfigNotFoundException, OperationNotAllowedException, JsonValidationException { // INSTANCE_ADMIN permissions are only created in special cases, so we block them here. @@ -207,15 +207,10 @@ public PermissionRead updatePermission(final PermissionUpdate permissionUpdate) .withWorkspaceId(existingPermission.getWorkspaceId()) // cannot be updated .withUserId(existingPermission.getUserId()); // cannot be updated try { - permissionPersistence.writePermission(updatedPermission); - } catch (final DataAccessException e) { - if (e.getCause() instanceof SQLOperationNotAllowedException) { - throw new OperationNotAllowedException(e.getCause().getMessage(), e); - } else { - throw new IOException(e); - } + permissionService.updatePermission(updatedPermission); + } catch (final RemoveLastOrgAdminPermissionException e) { + throw new ConflictException(e.getMessage(), e); } - return buildPermissionRead(permissionUpdate.getPermissionId()); } /** @@ -399,32 +394,35 @@ public PermissionReadList listPermissionsByUser(final UUID userId) throws IOExce * Deletes a permission. * * @param permissionIdRequestBody The permission to be deleted. - * @throws IOException if unable to delete the permission. - * @throws OperationNotAllowedException if deletion is prevented by business logic. + * @throws ConflictException if deletion is prevented by business logic. */ - public void deletePermission(final PermissionIdRequestBody permissionIdRequestBody) throws IOException { + public void deletePermission(final PermissionIdRequestBody permissionIdRequestBody) { try { - permissionPersistence.deletePermissionById(permissionIdRequestBody.getPermissionId()); - } catch (final DataAccessException e) { - if (e.getCause() instanceof SQLOperationNotAllowedException) { - throw new OperationNotAllowedException(e.getCause().getMessage(), e); - } else { - throw new IOException(e); - } + permissionService.deletePermission(permissionIdRequestBody.getPermissionId()); + } catch (final RemoveLastOrgAdminPermissionException e) { + throw new ConflictException(e.getMessage(), e); } } /** * Delete all permission records that match a particular userId and workspaceId. */ - public void deleteUserFromWorkspace(final PermissionDeleteUserFromWorkspaceRequestBody deleteUserFromWorkspaceRequestBody) throws IOException { + public void deleteUserFromWorkspace(final PermissionDeleteUserFromWorkspaceRequestBody deleteUserFromWorkspaceRequestBody) + throws IOException { final UUID userId = deleteUserFromWorkspaceRequestBody.getUserIdToRemove(); final UUID workspaceId = deleteUserFromWorkspaceRequestBody.getWorkspaceId(); // delete all workspace-level permissions that match the userId and workspaceId - permissionPersistence.listPermissionsByUser(userId).stream() + final List userWorkspacePermissionIds = permissionPersistence.listPermissionsByUser(userId).stream() .filter(permission -> permission.getWorkspaceId() != null && permission.getWorkspaceId().equals(workspaceId)) - .forEach(permission -> Exceptions.toRuntime(() -> permissionPersistence.deletePermissionById(permission.getPermissionId()))); + .map(Permission::getPermissionId) + .toList(); + + try { + permissionService.deletePermissions(userWorkspacePermissionIds); + } catch (final RemoveLastOrgAdminPermissionException e) { + throw new ConflictException(e.getMessage(), e); + } } } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java index 28eb8372761..f975f7da396 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java @@ -82,6 +82,7 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.StreamResetPersistence; +import io.airbyte.config.persistence.domain.StreamRefresh; import io.airbyte.config.secrets.SecretsRepositoryWriter; import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; import io.airbyte.data.services.SecretPersistenceConfigService; @@ -161,6 +162,7 @@ public class SchedulerHandler { private final ConnectorDefinitionSpecificationHandler connectorDefinitionSpecificationHandler; private final WorkspaceService workspaceService; private final SecretPersistenceConfigService secretPersistenceConfigService; + private final StreamRefreshesHandler streamRefreshesHandler; @VisibleForTesting public SchedulerHandler(final ConfigRepository configRepository, @@ -184,7 +186,8 @@ public SchedulerHandler(final ConfigRepository configRepository, final JobTracker jobTracker, final ConnectorDefinitionSpecificationHandler connectorDefinitionSpecificationHandler, final WorkspaceService workspaceService, - final SecretPersistenceConfigService secretPersistenceConfigService) { + final SecretPersistenceConfigService secretPersistenceConfigService, + final StreamRefreshesHandler streamRefreshesHandler) { this.configRepository = configRepository; this.secretsRepositoryWriter = secretsRepositoryWriter; this.synchronousSchedulerClient = synchronousSchedulerClient; @@ -210,6 +213,7 @@ public SchedulerHandler(final ConfigRepository configRepository, configRepository, jobNotifier, jobTracker); + this.streamRefreshesHandler = streamRefreshesHandler; } public CheckConnectionRead checkSourceConnectionFromSourceId(final SourceIdRequestBody sourceIdRequestBody) @@ -585,6 +589,7 @@ public JobInfoRead createJob(final JobCreate jobCreate) throws JsonValidationExc final StandardSync standardSync = configRepository.getStandardSync(jobCreate.getConnectionId()); final List streamsToReset = streamResetPersistence.getStreamResets(jobCreate.getConnectionId()); log.info("Found the following streams to reset for connection {}: {}", jobCreate.getConnectionId(), streamsToReset); + final List streamsToRefresh = streamRefreshesHandler.getRefreshesForConnection(jobCreate.getConnectionId()); if (!streamsToReset.isEmpty()) { final DestinationConnection destination = configRepository.getDestinationConnection(standardSync.getDestinationId()); @@ -631,9 +636,17 @@ public JobInfoRead createJob(final JobCreate jobCreate) throws JsonValidationExc ? jobPersistence.getLastReplicationJob(standardSync.getConnectionId()).orElseThrow(() -> new RuntimeException("No job available")).getId() : jobIdOptional.get(); + return jobConverter.getJobInfoRead(jobPersistence.getJob(jobId)); + } else if (!streamsToRefresh.isEmpty()) { + final long jobId = jobFactory.createRefresh(jobCreate.getConnectionId(), streamsToRefresh); + + log.info("New refresh job created, with id: " + jobId); + final Job job = jobPersistence.getJob(jobId); + jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_CREATED_BY_RELEASE_STAGE, job); + return jobConverter.getJobInfoRead(jobPersistence.getJob(jobId)); } else { - final long jobId = jobFactory.create(jobCreate.getConnectionId()); + final long jobId = jobFactory.createSync(jobCreate.getConnectionId()); log.info("New job created, with id: " + jobId); final Job job = jobPersistence.getJob(jobId); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/UserHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/UserHandler.java index 7b4178aaa54..44158f34d09 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/UserHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/UserHandler.java @@ -25,6 +25,7 @@ import io.airbyte.api.model.generated.UserUpdate; import io.airbyte.api.model.generated.UserWithPermissionInfoRead; import io.airbyte.api.model.generated.UserWithPermissionInfoReadList; +import io.airbyte.api.model.generated.WorkspaceCreateWithId; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.api.model.generated.WorkspaceRead; import io.airbyte.api.model.generated.WorkspaceReadList; @@ -35,7 +36,9 @@ import io.airbyte.commons.auth.config.InitialUserConfiguration; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.server.errors.ConflictException; import io.airbyte.commons.server.errors.OperationNotAllowedException; +import io.airbyte.commons.server.handlers.helpers.WorkspaceHelpersKt; import io.airbyte.commons.server.support.UserAuthenticationResolver; import io.airbyte.config.ConfigSchema; import io.airbyte.config.Organization; @@ -49,6 +52,8 @@ import io.airbyte.config.persistence.PermissionPersistence; import io.airbyte.config.persistence.SQLOperationNotAllowedException; import io.airbyte.config.persistence.UserPersistence; +import io.airbyte.data.services.PermissionRedundantException; +import io.airbyte.data.services.PermissionService; import io.airbyte.validation.json.JsonValidationException; import jakarta.inject.Named; import jakarta.inject.Singleton; @@ -74,31 +79,37 @@ public class UserHandler { private final Supplier uuidGenerator; private final UserPersistence userPersistence; private final PermissionPersistence permissionPersistence; + private final PermissionService permissionService; private final PermissionHandler permissionHandler; private final WorkspacesHandler workspacesHandler; private final OrganizationPersistence organizationPersistence; private final UserAuthenticationResolver userAuthenticationResolver; private final Optional initialUserConfiguration; + private final ResourceBootstrapHandlerInterface resourceBootstrapHandler; @VisibleForTesting public UserHandler( final UserPersistence userPersistence, final PermissionPersistence permissionPersistence, + final PermissionService permissionService, final OrganizationPersistence organizationPersistence, final PermissionHandler permissionHandler, final WorkspacesHandler workspacesHandler, @Named("uuidGenerator") final Supplier uuidGenerator, final UserAuthenticationResolver userAuthenticationResolver, - final Optional initialUserConfiguration) { + final Optional initialUserConfiguration, + final ResourceBootstrapHandlerInterface resourceBootstrapHandler) { this.uuidGenerator = uuidGenerator; this.userPersistence = userPersistence; this.organizationPersistence = organizationPersistence; this.permissionPersistence = permissionPersistence; + this.permissionService = permissionService; this.workspacesHandler = workspacesHandler; this.permissionHandler = permissionHandler; this.userAuthenticationResolver = userAuthenticationResolver; this.initialUserConfiguration = initialUserConfiguration; + this.resourceBootstrapHandler = resourceBootstrapHandler; } /** @@ -221,18 +232,13 @@ public UserRead updateUser(final UserUpdate userUpdate) throws ConfigNotFoundExc final User user = buildUser(userRead); - // We do not allow update on these fields: userId, authUserId, authProvider. + // We do not allow update on these fields: userId, authUserId, authProvider, and email boolean hasUpdate = false; if (userUpdate.getName() != null) { user.setName(userUpdate.getName()); hasUpdate = true; } - if (userUpdate.getEmail() != null) { - user.setEmail(userUpdate.getEmail()); - hasUpdate = true; - } - if (userUpdate.getCompanyName() != null) { user.setCompanyName(userUpdate.getCompanyName()); hasUpdate = true; @@ -299,7 +305,6 @@ private void deleteUser(final UserRead userRead) throws ConfigNotFoundException, .authProvider(userRead.getAuthProvider()) .status(UserStatus.DISABLED) .companyName(userRead.getCompanyName()) - .email(userRead.getEmail()) .news(userRead.getNews()); updateUser(userUpdate); } @@ -381,13 +386,16 @@ private UserRead createUserFromIncomingUser(final User incomingUser, final UserA return createUser(userCreate); } - private void handleUserPermissionsAndWorkspace(final UserRead createdUser) throws IOException, JsonValidationException, ConfigNotFoundException { + private void handleUserPermissionsAndWorkspace(final UserRead createdUser) + throws IOException, JsonValidationException, ConfigNotFoundException { createInstanceAdminPermissionIfInitialUser(createdUser); final Optional ssoOrg = getSsoOrganizationIfExists(); if (ssoOrg.isPresent()) { + // SSO users will have some additional logic but will ultimately call createDefaultWorkspaceForUser handleSsoUser(createdUser, ssoOrg.get()); } else { - handleNonSsoUser(createdUser); + // non-SSO users will just create a default workspace + createDefaultWorkspaceForUser(createdUser, Optional.empty()); } } @@ -415,28 +423,42 @@ private void handleSsoUser(final UserRead user, final Organization organization) new ListWorkspacesInOrganizationRequestBody().organizationId(organization.getOrganizationId())); if (orgWorkspaces.getWorkspaces().isEmpty()) { - final WorkspaceRead defaultWorkspace = createDefaultWorkspaceForUser(user, Optional.of(organization)); - createPermissionForUserAndWorkspace(user.getUserId(), defaultWorkspace.getWorkspaceId(), PermissionType.WORKSPACE_ADMIN); + // Now calls bootstrap which includes all permissions and updates userRead. + createDefaultWorkspaceForUser(user, Optional.of(organization)); } } - private void handleNonSsoUser(final UserRead user) throws JsonValidationException, ConfigNotFoundException, IOException { - final WorkspaceRead defaultWorkspace = createDefaultWorkspaceForUser(user, Optional.empty()); - createPermissionForUserAndWorkspace(user.getUserId(), defaultWorkspace.getWorkspaceId(), PermissionType.WORKSPACE_ADMIN); - } - - private WorkspaceRead createDefaultWorkspaceForUser(final UserRead createdUser, final Optional organization) + protected void createDefaultWorkspaceForUser(final UserRead user, final Optional organization) throws JsonValidationException, IOException, ConfigNotFoundException { - final WorkspaceRead defaultWorkspace = workspacesHandler.createDefaultWorkspaceForUser(createdUser, organization); + // Only do this if the user doesn't already have a default workspace. + if (user.getDefaultWorkspaceId() != null) { + return; + } + + // Logic stolen from workspaceHandler.createDefaultWorkspaceForUser + final String companyName = user.getCompanyName(); + final String email = user.getEmail(); + final Boolean news = user.getNews(); + // otherwise, create a default workspace for this user + final WorkspaceCreateWithId workspaceCreate = new WorkspaceCreateWithId() + .name(WorkspaceHelpersKt.getDefaultWorkspaceName(organization, companyName, email)) + .organizationId(organization.map(Organization::getOrganizationId).orElse(null)) + .email(email) + .news(news) + .anonymousDataCollection(false) + .securityUpdates(false) + .displaySetupWizard(true) + .id(uuidGenerator.get()); + + final WorkspaceRead defaultWorkspace = resourceBootstrapHandler.bootStrapWorkspaceForCurrentUser(workspaceCreate); // set default workspace id in User table final UserUpdate userUpdateDefaultWorkspace = new UserUpdate() - .userId(createdUser.getUserId()) + .userId(user.getUserId()) .defaultWorkspaceId(defaultWorkspace.getWorkspaceId()); updateUser(userUpdateDefaultWorkspace); - return defaultWorkspace; } private Optional getSsoOrganizationIfExists() throws IOException { @@ -460,7 +482,7 @@ private void createPermissionForUserAndWorkspace(final UUID userId, final UUID w .permissionType(permissionType)); } - private void createInstanceAdminPermissionIfInitialUser(final UserRead createdUser) throws IOException, JsonValidationException { + private void createInstanceAdminPermissionIfInitialUser(final UserRead createdUser) { if (initialUserConfiguration.isEmpty()) { // do nothing if initial_user bean is not present. return; @@ -482,12 +504,16 @@ private void createInstanceAdminPermissionIfInitialUser(final UserRead createdUs LOGGER.info("creating instance_admin permission for user ID {} because their email matches this instance's configured initial_user", createdUser.getUserId()); - permissionPersistence.writePermission(new Permission() - .withPermissionId(uuidGenerator.get()) - .withUserId(createdUser.getUserId()) - .withPermissionType(Permission.PermissionType.INSTANCE_ADMIN) - .withOrganizationId(null) - .withWorkspaceId(null)); + try { + permissionService.createPermission(new Permission() + .withPermissionId(uuidGenerator.get()) + .withUserId(createdUser.getUserId()) + .withPermissionType(Permission.PermissionType.INSTANCE_ADMIN) + .withOrganizationId(null) + .withWorkspaceId(null)); + } catch (final PermissionRedundantException e) { + throw new ConflictException(e.getMessage(), e); + } } private WorkspaceUserReadList buildWorkspaceUserReadList(final List userPermissions, final UUID workspaceId) { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java index 90d479bb149..d9e24e9db0f 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java @@ -63,6 +63,11 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.ConfigRepository.StandardSyncQuery; +import io.airbyte.featureflag.Connection; +import io.airbyte.featureflag.FeatureFlagClient; +import io.airbyte.featureflag.Multi; +import io.airbyte.featureflag.UseClear; +import io.airbyte.featureflag.Workspace; import io.airbyte.persistence.job.models.JobStatusSummary; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.validation.json.JsonValidationException; @@ -101,6 +106,7 @@ public class WebBackendConnectionsHandler { @Deprecated private final ConfigRepository configRepositoryDoNotUse; private final ActorDefinitionVersionHelper actorDefinitionVersionHelper; + private final FeatureFlagClient featureFlagClient; public WebBackendConnectionsHandler(final ConnectionsHandler connectionsHandler, final StateHandler stateHandler, @@ -111,7 +117,8 @@ public WebBackendConnectionsHandler(final ConnectionsHandler connectionsHandler, final OperationsHandler operationsHandler, final EventRunner eventRunner, final ConfigRepository configRepositoryDoNotUse, - final ActorDefinitionVersionHelper actorDefinitionVersionHelper) { + final ActorDefinitionVersionHelper actorDefinitionVersionHelper, + final FeatureFlagClient featureFlagClient) { this.connectionsHandler = connectionsHandler; this.stateHandler = stateHandler; this.sourceHandler = sourceHandler; @@ -122,6 +129,7 @@ public WebBackendConnectionsHandler(final ConnectionsHandler connectionsHandler, this.eventRunner = eventRunner; this.configRepositoryDoNotUse = configRepositoryDoNotUse; this.actorDefinitionVersionHelper = actorDefinitionVersionHelper; + this.featureFlagClient = featureFlagClient; } public WebBackendWorkspaceStateResult getWorkspaceState(final WebBackendWorkspaceState webBackendWorkspaceState) throws IOException { @@ -553,6 +561,7 @@ public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConne final UUID connectionId = webBackendConnectionPatch.getConnectionId(); final ConnectionRead originalConnectionRead = connectionsHandler.getConnection(connectionId); boolean breakingChange = originalConnectionRead.getBreakingChange() != null && originalConnectionRead.getBreakingChange(); + boolean shouldRunSyncAfterClear = false; // If there have been changes to the sync catalog, check whether these changes result in or fix a // broken connection @@ -577,6 +586,10 @@ public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConne connectionsHandler.getDiff(newAirbyteCatalog, CatalogConverter.toApi(mostRecentAirbyteCatalog, sourceVersion), CatalogConverter.toConfiguredProtocol(newAirbyteCatalog)); breakingChange = containsBreakingChange(catalogDiff); + + shouldRunSyncAfterClear = !featureFlagClient.boolVariation(UseClear.INSTANCE, new Multi(List.of( + new Connection(connectionId), + new Workspace(source.getWorkspaceId())))); } } @@ -595,7 +608,7 @@ public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConne final ConnectionRead updatedConnectionRead = connectionsHandler.updateConnection(connectionPatch); // detect if any streams need to be reset based on the patch and initial catalog, if so, reset them - resetStreamsIfNeeded(webBackendConnectionPatch, oldConfiguredCatalog, updatedConnectionRead, originalConnectionRead); + resetStreamsIfNeeded(webBackendConnectionPatch, oldConfiguredCatalog, updatedConnectionRead, originalConnectionRead, shouldRunSyncAfterClear); /* * This catalog represents the full catalog that was used to create the configured catalog. It will * have all streams that were present at the time. It will have no configuration set. @@ -621,7 +634,8 @@ public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConne private void resetStreamsIfNeeded(final WebBackendConnectionUpdate webBackendConnectionPatch, final ConfiguredAirbyteCatalog oldConfiguredCatalog, final ConnectionRead updatedConnectionRead, - final ConnectionRead oldConnectionRead) + final ConnectionRead oldConnectionRead, + final boolean shouldRunSyncAfterClear) throws IOException, JsonValidationException, ConfigNotFoundException { final UUID connectionId = webBackendConnectionPatch.getConnectionId(); @@ -650,7 +664,8 @@ private void resetStreamsIfNeeded(final WebBackendConnectionUpdate webBackendCon } eventRunner.resetConnection( connectionId, - streamsToReset, true); + streamsToReset, + shouldRunSyncAfterClear); } } } @@ -798,7 +813,8 @@ private record Stream(String name, String namespace) { } - private boolean containsBreakingChange(final CatalogDiff diff) { + @VisibleForTesting + protected boolean containsBreakingChange(final CatalogDiff diff) { for (final StreamTransform streamTransform : diff.getTransforms()) { if (streamTransform.getTransformType() != TransformTypeEnum.UPDATE_STREAM) { continue; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java index bcc55a67c84..1ca54aceb7b 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java @@ -39,8 +39,10 @@ import io.airbyte.commons.server.converters.NotificationSettingsConverter; import io.airbyte.commons.server.converters.WorkspaceConverter; import io.airbyte.commons.server.converters.WorkspaceWebhookConfigsConverter; +import io.airbyte.commons.server.errors.BadObjectSchemaKnownException; import io.airbyte.commons.server.errors.InternalServerKnownException; import io.airbyte.commons.server.errors.ValueConflictKnownException; +import io.airbyte.commons.server.handlers.helpers.WorkspaceHelpersKt; import io.airbyte.config.Organization; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.persistence.ConfigNotFoundException; @@ -137,6 +139,13 @@ public WorkspaceRead createWorkspace(final WorkspaceCreate workspaceCreate) public WorkspaceRead createWorkspaceIfNotExist(final WorkspaceCreateWithId workspaceCreateWithId) throws JsonValidationException, IOException, ValueConflictKnownException, ConfigNotFoundException { + // We expect that the caller is specifying the workspace ID. + // Since this code is currently only called by OSS, it's enforced in the public API and the UI + // currently. + if (workspaceCreateWithId.getOrganizationId() == null) { + throw new BadObjectSchemaKnownException("Workspace missing org ID."); + } + final String email = workspaceCreateWithId.getEmail(); final Boolean anonymousDataCollection = workspaceCreateWithId.getAnonymousDataCollection(); final Boolean news = workspaceCreateWithId.getNews(); @@ -187,7 +196,7 @@ public WorkspaceRead createDefaultWorkspaceForUser(final UserRead user, final Op final Boolean news = user.getNews(); // otherwise, create a default workspace for this user final WorkspaceCreate workspaceCreate = new WorkspaceCreate() - .name(getDefaultWorkspaceName(organization, companyName, email)) + .name(WorkspaceHelpersKt.getDefaultWorkspaceName(organization, companyName, email)) .organizationId(organization.map(Organization::getOrganizationId).orElse(null)) .email(email) .news(news) @@ -197,24 +206,6 @@ public WorkspaceRead createDefaultWorkspaceForUser(final UserRead user, final Op return createWorkspace(workspaceCreate); } - private String getDefaultWorkspaceName(final Optional organization, final String companyName, final String email) { - String defaultWorkspaceName = ""; - if (organization.isPresent()) { - // use organization name as default workspace name - defaultWorkspaceName = organization.get().getName().trim(); - } - // if organization name is not available or empty, use user's company name (note: this is an - // optional field) - if (defaultWorkspaceName.isEmpty() && companyName != null) { - defaultWorkspaceName = companyName.trim(); - } - // if company name is still empty, use user's email (note: this is a required field) - if (defaultWorkspaceName.isEmpty()) { - defaultWorkspaceName = email; - } - return defaultWorkspaceName; - } - public void deleteWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) throws JsonValidationException, IOException, ConfigNotFoundException { // get existing implementation diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/BuilderProjectUpdater.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/BuilderProjectUpdater.java new file mode 100644 index 00000000000..824a8d02c3c --- /dev/null +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/BuilderProjectUpdater.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.config.persistence.ConfigNotFoundException; +import java.io.IOException; + +public interface BuilderProjectUpdater { + + void persistBuilderProjectUpdate(final ExistingConnectorBuilderProjectWithWorkspaceId projectUpdate) throws ConfigNotFoundException, IOException; + +} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CompositeBuilderProjectUpdater.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CompositeBuilderProjectUpdater.java new file mode 100644 index 00000000000..caf2e9a3249 --- /dev/null +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CompositeBuilderProjectUpdater.java @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.config.persistence.ConfigNotFoundException; +import java.io.IOException; +import java.util.List; + +public class CompositeBuilderProjectUpdater implements BuilderProjectUpdater { + /* + * Update multiple builder project updaters sequentially. The update method is intentionally not + * atomic as this is an experimental features. We don't want a problematic updater to prevent others + * from succeeding. This means it is possible for them to get out of sync. + */ + + private final List updaters; + + public CompositeBuilderProjectUpdater(final List updaters) { + this.updaters = updaters; + } + + @Override + public void persistBuilderProjectUpdate(ExistingConnectorBuilderProjectWithWorkspaceId projectUpdate) throws ConfigNotFoundException, IOException { + for (BuilderProjectUpdater updater : updaters) { + updater.persistBuilderProjectUpdate(projectUpdate); + } + } + +} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConfigRepositoryBuilderProjectUpdater.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConfigRepositoryBuilderProjectUpdater.java new file mode 100644 index 00000000000..2225a2316e8 --- /dev/null +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConfigRepositoryBuilderProjectUpdater.java @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.config.ConnectorBuilderProject; +import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.config.persistence.ConfigRepository; +import java.io.IOException; + +public class ConfigRepositoryBuilderProjectUpdater implements BuilderProjectUpdater { + + private final ConfigRepository configRepository; + + public ConfigRepositoryBuilderProjectUpdater(final ConfigRepository configRepository) { + + this.configRepository = configRepository; + } + + @Override + public void persistBuilderProjectUpdate(ExistingConnectorBuilderProjectWithWorkspaceId projectUpdate) throws ConfigNotFoundException, IOException { + final ConnectorBuilderProject connectorBuilderProject = configRepository.getConnectorBuilderProject(projectUpdate.getBuilderProjectId(), false); + + if (connectorBuilderProject.getActorDefinitionId() != null) { + configRepository.updateBuilderProjectAndActorDefinition(projectUpdate.getBuilderProjectId(), + projectUpdate.getWorkspaceId(), + projectUpdate.getBuilderProject().getName(), + projectUpdate.getBuilderProject().getDraftManifest(), + connectorBuilderProject.getActorDefinitionId()); + } else { + configRepository.writeBuilderProjectDraft(projectUpdate.getBuilderProjectId(), + projectUpdate.getWorkspaceId(), + projectUpdate.getBuilderProject().getName(), + projectUpdate.getBuilderProject().getDraftManifest()); + } + + } + +} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelper.java index 83c7457e28a..850f3c37f15 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelper.java @@ -10,6 +10,8 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; +import io.airbyte.api.model.generated.JobFailureRequest; +import io.airbyte.api.model.generated.JobSuccessWithAttemptNumberRequest; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.server.JobStatus; import io.airbyte.config.ActorDefinitionVersion; @@ -251,6 +253,8 @@ public List getJobToReleaseStages(final Job job) throws IOExceptio final List actorDefVersionIds = switch (job.getConfig().getConfigType()) { case SYNC -> List.of(job.getConfig().getSync().getDestinationDefinitionVersionId(), job.getConfig().getSync().getSourceDefinitionVersionId()); case RESET_CONNECTION -> List.of(job.getConfig().getResetConnection().getDestinationDefinitionVersionId()); + case REFRESH -> List.of(job.getConfig().getRefresh().getSourceDefinitionVersionId(), + job.getConfig().getRefresh().getDestinationDefinitionVersionId()); default -> throw new IllegalArgumentException("Unexpected config type: " + job.getConfigType()); }; @@ -258,6 +262,38 @@ public List getJobToReleaseStages(final Job job) throws IOExceptio } public void emitJobToReleaseStagesMetric(final OssMetricsRegistry metric, final Job job) throws IOException { + emitToReleaseStagesMetricHelper(metric, job, Collections.emptyList()); + } + + public void emitJobToReleaseStagesMetric(final OssMetricsRegistry metric, final Job job, final JobSuccessWithAttemptNumberRequest input) + throws IOException { + List additionalAttributes = new ArrayList<>(); + if (job.getConfigType() == SYNC) { + final var sync = job.getConfig().getSync(); + additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_ID, sync.getSourceDefinitionVersionId().toString())); + additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_IMAGE, sync.getSourceDockerImage())); + additionalAttributes.add(new MetricAttribute(MetricTags.DESTINATION_IMAGE, sync.getDestinationDockerImage())); + additionalAttributes.add(new MetricAttribute(MetricTags.WORKSPACE_ID, sync.getWorkspaceId().toString())); + additionalAttributes.add(new MetricAttribute(MetricTags.CONNECTION_ID, input.getConnectionId().toString())); + } + emitToReleaseStagesMetricHelper(metric, job, additionalAttributes); + } + + public void emitJobToReleaseStagesMetric(final OssMetricsRegistry metric, final Job job, final JobFailureRequest input) throws IOException { + List additionalAttributes = new ArrayList<>(); + if (job.getConfigType() == SYNC) { + final var sync = job.getConfig().getSync(); + additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_ID, sync.getSourceDefinitionVersionId().toString())); + additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_IMAGE, sync.getSourceDockerImage())); + additionalAttributes.add(new MetricAttribute(MetricTags.DESTINATION_IMAGE, sync.getDestinationDockerImage())); + additionalAttributes.add(new MetricAttribute(MetricTags.WORKSPACE_ID, sync.getWorkspaceId().toString())); + additionalAttributes.add(new MetricAttribute(MetricTags.CONNECTION_ID, input.getConnectionId().toString())); + } + emitToReleaseStagesMetricHelper(metric, job, additionalAttributes); + } + + private void emitToReleaseStagesMetricHelper(final OssMetricsRegistry metric, final Job job, List additionalAttributes) + throws IOException { final var releaseStages = getJobToReleaseStages(job); if (releaseStages.isEmpty()) { return; @@ -265,8 +301,11 @@ public void emitJobToReleaseStagesMetric(final OssMetricsRegistry metric, final for (final ReleaseStage stage : releaseStages) { if (stage != null) { - MetricClientFactory.getMetricClient().count(metric, 1, - new MetricAttribute(MetricTags.RELEASE_STAGE, MetricTags.getReleaseStage(stage))); + List attributes = new ArrayList<>(); + attributes.add(new MetricAttribute(MetricTags.RELEASE_STAGE, MetricTags.getReleaseStage(stage))); + attributes.addAll(additionalAttributes); + + MetricClientFactory.getMetricClient().count(metric, 1, attributes.toArray(new MetricAttribute[0])); } } } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/LocalFileSystemBuilderProjectUpdater.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/LocalFileSystemBuilderProjectUpdater.java new file mode 100644 index 00000000000..8bf1cb47f77 --- /dev/null +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/LocalFileSystemBuilderProjectUpdater.java @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LocalFileSystemBuilderProjectUpdater implements BuilderProjectUpdater { + + private static final Logger LOGGER = LoggerFactory.getLogger(LocalFileSystemBuilderProjectUpdater.class); + + @Override + public void persistBuilderProjectUpdate(ExistingConnectorBuilderProjectWithWorkspaceId projectUpdate) throws IOException { + try { + writeJsonNodeToYamlFile(projectUpdate.getBuilderProject().getYamlManifest(), "/connectors", projectUpdate.getBuilderProject().getName()); + } catch (Exception e) { + /* + * While this flow is only meant to be used for local development, we swallow all exceptions to + * ensure this cannot affect the platform. Users can look through the logs if they suspect this is + * failing + */ + LOGGER.warn("Error writing manifest to local filesystem. Exception: {}. Builder Project: {}", e, projectUpdate.getBuilderProject()); + } + } + + public static void writeJsonNodeToYamlFile(String manifest, String basePath, String projectName) throws IOException { + + // Construct the file path + String filePath = Paths.get(basePath, "source-" + projectName, "source_" + projectName, "manifest.yaml").toString(); + + File file = new File(filePath); + + // Only try writing the file already exists + // This isn't meant to be used for creating new connectors + // We can revisit the flow in the future + if (file.exists()) { + Files.write(Paths.get(filePath), manifest.getBytes()); + } + } + +} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthNettyServerCustomizer.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthNettyServerCustomizer.java index d45abaf9bd5..ca50a04e5ad 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthNettyServerCustomizer.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthNettyServerCustomizer.java @@ -30,11 +30,20 @@ public class AuthNettyServerCustomizer implements BeanCreatedEventListener>, + ): ConnectionConfigurationProblem { return ConnectionConfigurationProblem( - "Primary key for stream: $streamName is already pre-defined. Please do NOT include a primary key configuration for this stream.", + "Primary key for stream: $streamName is already pre-defined. Please remove the primaryKey or provide the value as $allowedPrimaryKey.", ) } @@ -88,6 +91,15 @@ class ConnectionConfigurationProblem private constructor(message: String) : Abst ) } + fun duplicatePrimaryKey( + streamName: String, + key: List?>, + ): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem( + "Duplicate primary key detected for stream: $streamName, please don't provide the same column more than once. Key: $key", + ) + } + fun invalidCronExpressionUnderOneHour(cronExpression: String): ConnectionConfigurationProblem { return ConnectionConfigurationProblem( "The cron expression " + cronExpression + diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandler.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandler.kt index 30fac69b663..373638d4587 100644 --- a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandler.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandler.kt @@ -14,9 +14,10 @@ import io.airbyte.config.Organization import io.airbyte.config.Permission import io.airbyte.config.Permission.PermissionType import io.airbyte.config.User -import io.airbyte.config.persistence.PermissionPersistence import io.airbyte.data.exceptions.ConfigNotFoundException import io.airbyte.data.services.OrganizationService +import io.airbyte.data.services.PermissionRedundantException +import io.airbyte.data.services.PermissionService import io.airbyte.data.services.WorkspaceService import jakarta.inject.Named import jakarta.inject.Singleton @@ -32,10 +33,10 @@ open class ResourceBootstrapHandler( @Named("uuidGenerator") private val uuidSupplier: Supplier, private val workspaceService: WorkspaceService, private val organizationService: OrganizationService, - private val permissionPersistence: PermissionPersistence, + private val permissionService: PermissionService, private val currentUserService: CurrentUserService, private val apiAuthorizationHelper: ApiAuthorizationHelper, -) { +) : ResourceBootstrapHandlerInterface { companion object { val LOGGER = LoggerFactory.getLogger(ResourceBootstrapHandler::class.java) } @@ -43,7 +44,7 @@ open class ResourceBootstrapHandler( /** * This is for bootstrapping a workspace and all the necessary links (organization) and permissions (workspace & organization). */ - fun bootStrapWorkspaceForCurrentUser(workspaceCreateWithId: WorkspaceCreateWithId): WorkspaceRead { + override fun bootStrapWorkspaceForCurrentUser(workspaceCreateWithId: WorkspaceCreateWithId): WorkspaceRead { val user = currentUserService.getCurrentUser() // The organization to use to set up the new workspace val organization = @@ -66,12 +67,21 @@ open class ResourceBootstrapHandler( workspaceService.writeWorkspaceWithSecrets(standardWorkspace) val workspacePermission = buildDefaultWorkspacePermission(user.userId, standardWorkspace.workspaceId) - permissionPersistence.writePermission(workspacePermission) + + kotlin.runCatching { permissionService.createPermission(workspacePermission) }.onFailure { e -> + when (e) { + is PermissionRedundantException -> + LOGGER.info( + "Skipped redundant workspace permission creation for workspace ${standardWorkspace.workspaceId}", + ) + else -> throw e + } + } return WorkspaceConverter.domainToApiModel(standardWorkspace) } - private fun findOrCreateOrganizationAndPermission(user: User): Organization { + public fun findOrCreateOrganizationAndPermission(user: User): Organization { findExistingOrganization(user)?.let { return it } val organization = @@ -86,7 +96,7 @@ open class ResourceBootstrapHandler( organizationService.writeOrganization(organization) val organizationPermission = buildDefaultOrganizationPermission(user.userId, organization.organizationId) - permissionPersistence.writePermission(organizationPermission) + permissionService.createPermission(organizationPermission) return organization } @@ -94,7 +104,7 @@ open class ResourceBootstrapHandler( * Tries to find an existing organization for the user. Permission checks will happen elsewhere. */ open fun findExistingOrganization(user: User): Organization? { - val organizationPermissionList = permissionPersistence.listPermissionsByUser(user.userId).filter { it.organizationId != null } + val organizationPermissionList = permissionService.getPermissionsForUser(user.userId).filter { it.organizationId != null } val hasSingleOrganization = organizationPermissionList.size == 1 val hasNoOrganization = organizationPermissionList.isEmpty() diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandlerInterface.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandlerInterface.kt new file mode 100644 index 00000000000..bd17e0de4c0 --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandlerInterface.kt @@ -0,0 +1,8 @@ +package io.airbyte.commons.server.handlers + +import io.airbyte.api.model.generated.WorkspaceCreateWithId +import io.airbyte.api.model.generated.WorkspaceRead + +interface ResourceBootstrapHandlerInterface { + fun bootStrapWorkspaceForCurrentUser(workspaceCreateWithId: WorkspaceCreateWithId): WorkspaceRead +} diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/StreamRefreshesHandler.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/StreamRefreshesHandler.kt new file mode 100644 index 00000000000..3b07e1b3138 --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/StreamRefreshesHandler.kt @@ -0,0 +1,95 @@ +package io.airbyte.commons.server.handlers + +import io.airbyte.api.model.generated.ConnectionStream +import io.airbyte.commons.server.scheduler.EventRunner +import io.airbyte.config.persistence.StreamRefreshesRepository +import io.airbyte.config.persistence.domain.StreamRefresh +import io.airbyte.data.services.ConnectionService +import io.airbyte.data.services.WorkspaceService +import io.airbyte.featureflag.ActivateRefreshes +import io.airbyte.featureflag.Connection +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.Multi +import io.airbyte.featureflag.Workspace +import io.airbyte.protocol.models.StreamDescriptor +import jakarta.inject.Singleton +import java.util.UUID + +@Singleton +class StreamRefreshesHandler( + private val connectionService: ConnectionService, + private val streamRefreshesRepository: StreamRefreshesRepository, + private val eventRunner: EventRunner, + private val workspaceService: WorkspaceService, + private val featureFlagClient: FeatureFlagClient, +) { + fun deleteRefreshesForConnection(connectionId: UUID) { + streamRefreshesRepository.deleteByConnectionId(connectionId) + } + + open fun createRefreshesForConnection( + connectionId: UUID, + streams: List, + ): Boolean { + val workspaceId = workspaceService.getStandardWorkspaceFromConnection(connectionId, false).workspaceId + val shouldRunRefresh = + featureFlagClient.boolVariation( + ActivateRefreshes, + Multi( + listOf( + Workspace(workspaceId), + Connection(connectionId), + ), + ), + ) + + if (!shouldRunRefresh) { + return false + } + + val streamDescriptors: List = + if (streams.isNotEmpty()) { + connectionStreamsToStreamDescriptors(streams) + } else { + connectionService.getAllStreamsForConnection(connectionId) + } + + createRefreshesForStreams(connectionId, streamDescriptors) + + eventRunner.startNewManualSync(connectionId) + + return true + } + + open fun getRefreshesForConnection(connectionId: UUID): List { + return streamRefreshesRepository.findByConnectionId(connectionId) + } + + private fun createRefreshesForStreams( + connectionId: UUID, + streams: List, + ) { + val streamRefreshes: List = streamDescriptorsToStreamRefreshes(connectionId, streams) + + streamRefreshesRepository.saveAll(streamRefreshes) + } + + companion object { + open fun connectionStreamsToStreamDescriptors(connectionStreams: List): List { + return connectionStreams.map { connectionStream -> + StreamDescriptor() + .withName(connectionStream.streamName) + .withNamespace(connectionStream.streamNamespace) + } + } + + open fun streamDescriptorsToStreamRefreshes( + connectionId: UUID, + streamDescriptors: List, + ): List { + return streamDescriptors.map { streamDescriptor -> + StreamRefresh(connectionId = connectionId, streamName = streamDescriptor.name, streamNamespace = streamDescriptor.namespace) + } + } + } +} diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/helpers/WorkspaceHelpers.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/helpers/WorkspaceHelpers.kt index 9adb9dbd112..40b697b2146 100644 --- a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/helpers/WorkspaceHelpers.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/helpers/WorkspaceHelpers.kt @@ -11,9 +11,12 @@ import io.airbyte.commons.server.converters.WorkspaceWebhookConfigsConverter import io.airbyte.config.Geography import io.airbyte.config.Organization import io.airbyte.config.StandardWorkspace +import java.util.Optional import java.util.UUID import java.util.function.Supplier +// These helpers exist so that we can get some of the utility of working with workspaces but without needing to inject WorkspacesHandler + fun buildStandardWorkspace( workspaceCreateWithId: WorkspaceCreateWithId, organization: Organization, @@ -40,8 +43,7 @@ fun buildStandardWorkspace( val notificationSettings: NotificationSettings = patchNotificationSettingsWithDefaultValue(workspaceCreateWithId) return StandardWorkspace().apply { - this.workspaceId = uuidSupplier.get() - this.workspaceId = workspaceCreateWithId.id + this.workspaceId = workspaceCreateWithId.id ?: uuidSupplier.get() this.customerId = uuidSupplier.get() // "customer_id" should be deprecated this.name = workspaceCreateWithId.name this.slug = uuidSupplier.get().toString() @@ -78,3 +80,25 @@ private fun patchNotificationSettingsWithDefaultValue(workspaceCreateWithId: Wor workspaceCreateWithId.notificationSettings?.sendOnBreakingChangeSyncsDisabled ?: defaultNotificationType } } + +fun getDefaultWorkspaceName( + organization: Optional, + companyName: String?, + email: String, +): String { + var defaultWorkspaceName = "" + if (organization.isPresent) { + // use organization name as default workspace name + defaultWorkspaceName = organization.get().name.trim() + } + // if organization name is not available or empty, use user's company name (note: this is an + // optional field) + if (defaultWorkspaceName.isEmpty() && companyName != null) { + defaultWorkspaceName = companyName.trim() + } + // if company name is still empty, use user's email (note: this is a required field) + if (defaultWorkspaceName.isEmpty()) { + defaultWorkspaceName = email + } + return defaultWorkspaceName +} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java index bbc3bc6090c..1aa02eae445 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java @@ -232,6 +232,7 @@ class ConnectionsHandlerTest { private DestinationHandler destinationHandler; private SourceHandler sourceHandler; + private StreamRefreshesHandler streamRefreshesHandler; private JobNotifier jobNotifier; private Job job; @@ -319,6 +320,7 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio .withGeography(Geography.US); jobPersistence = mock(JobPersistence.class); + streamRefreshesHandler = mock(StreamRefreshesHandler.class); configRepository = mock(ConfigRepository.class); uuidGenerator = mock(Supplier.class); workspaceHelper = mock(WorkspaceHelper.class); @@ -383,6 +385,7 @@ class UnMockedConnectionHelper { @BeforeEach void setUp() throws JsonValidationException, ConfigNotFoundException, IOException { connectionsHandler = new ConnectionsHandler( + streamRefreshesHandler, jobPersistence, configRepository, uuidGenerator, @@ -641,6 +644,7 @@ void testDeleteConnection() throws JsonValidationException, ConfigNotFoundExcept connectionsHandler.deleteConnection(connectionId); verify(connectionHelper).deleteConnection(connectionId); + verify(streamRefreshesHandler).deleteRefreshesForConnection(connectionId); } @Test @@ -718,7 +722,6 @@ void testWarningNotificationsForAutoDisablingMaxNumFailures() throws IOException assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, times(1)).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, times(1)).autoDisableConnectionWarning(any(), any()); } @@ -738,7 +741,6 @@ void testWarningNotificationsForAutoDisablingMaxDaysOfFailure() throws IOExcepti assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, times(1)).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, times(1)).autoDisableConnectionWarning(any(), any()); } @@ -760,7 +762,6 @@ void testWarningNotificationsDoesNotSpam() throws IOException, JsonValidationExc assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); } @@ -782,7 +783,6 @@ void testWarningNotificationsDoesNotSpamAfterConsecutiveFailures() throws IOExce assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); } @@ -801,7 +801,6 @@ void testOnlyFailuresButFirstJobYoungerThanMaxDaysWarning() throws IOException, assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); } @@ -841,7 +840,6 @@ void testLessThanMaxFailuresInARow() throws IOException, JsonValidationException assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); } @@ -857,7 +855,6 @@ void testNoRuns() throws IOException, JsonValidationException, ConfigNotFoundExc assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); } @@ -890,7 +887,6 @@ void testIgnoreOnlyCancelledRuns() throws IOException, JsonValidationException, assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).notifyJobByEmail(any(), any(), any(), any()); } private void verifyDisabled() throws IOException { @@ -898,7 +894,6 @@ private void verifyDisabled() throws IOException { argThat(standardSync -> (standardSync.getStatus().equals(Status.INACTIVE) && standardSync.getConnectionId().equals(connectionId)))); verify(configRepository, times(1)).writeStandardSync(standardSync); verify(jobNotifier, times(1)).autoDisableConnection(eq(job), any()); - verify(jobNotifier, times(1)).notifyJobByEmail(any(), any(), eq(job), any()); verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); } @@ -1502,6 +1497,7 @@ class ConnectionHistory { @BeforeEach void setUp() { connectionsHandler = new ConnectionsHandler( + streamRefreshesHandler, jobPersistence, configRepository, uuidGenerator, @@ -1752,6 +1748,7 @@ class StreamConfigurationDiff { @BeforeEach void setUp() { connectionsHandler = new ConnectionsHandler( + streamRefreshesHandler, jobPersistence, configRepository, uuidGenerator, @@ -2235,6 +2232,7 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio when(workspaceHelper.getWorkspaceForSourceIdIgnoreExceptions(SOURCE_ID)).thenReturn(WORKSPACE_ID); when(workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(DESTINATION_ID)).thenReturn(WORKSPACE_ID); connectionsHandler = new ConnectionsHandler( + streamRefreshesHandler, jobPersistence, configRepository, uuidGenerator, diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java index d7b84353f89..a6d82215a73 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java @@ -42,6 +42,7 @@ import io.airbyte.api.model.generated.SourceDefinitionIdBody; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.server.handlers.helpers.BuilderProjectUpdater; import io.airbyte.commons.server.handlers.helpers.DeclarativeSourceManifestInjector; import io.airbyte.config.ActorDefinitionConfigInjection; import io.airbyte.config.ActorDefinitionVersion; @@ -113,6 +114,7 @@ class ConnectorBuilderProjectsHandlerTest { } private ConfigRepository configRepository; + private BuilderProjectUpdater builderProjectUpdater; private ConnectorBuilderProjectsHandler connectorBuilderProjectsHandler; private Supplier uuidSupplier; private DeclarativeSourceManifestInjector manifestInjector; @@ -167,6 +169,7 @@ class ConnectorBuilderProjectsHandlerTest { @BeforeEach void setUp() throws JsonProcessingException { configRepository = mock(ConfigRepository.class); + builderProjectUpdater = mock(BuilderProjectUpdater.class); uuidSupplier = mock(Supplier.class); manifestInjector = mock(DeclarativeSourceManifestInjector.class); cdkVersionProvider = mock(CdkVersionProvider.class); @@ -184,7 +187,8 @@ void setUp() throws JsonProcessingException { workspaceId = UUID.randomUUID(); connectorBuilderProjectsHandler = - new ConnectorBuilderProjectsHandler(configRepository, cdkVersionProvider, uuidSupplier, manifestInjector, workspaceService, featureFlagClient, + new ConnectorBuilderProjectsHandler(configRepository, builderProjectUpdater, cdkVersionProvider, uuidSupplier, manifestInjector, + workspaceService, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, connectorBuilderService, secretsProcessor, connectorBuilderServerApiClient); } @@ -233,46 +237,9 @@ void testUpdateConnectorBuilderProject() throws IOException, ConfigNotFoundExcep connectorBuilderProjectsHandler.updateConnectorBuilderProject(update); - verify(configRepository, times(1)) - .writeBuilderProjectDraft( - project.getBuilderProjectId(), project.getWorkspaceId(), project.getName(), project.getManifestDraft()); - } - - @Test - void givenActorDefinitionAssociatedWithProjectWhenUpdateConnectorBuilderProjectThenUpdateProjectAndDefinition() throws Exception { - when(configRepository.getConnectorBuilderProject(A_BUILDER_PROJECT_ID, false)).thenReturn(anyBuilderProject() - .withBuilderProjectId(A_BUILDER_PROJECT_ID) - .withWorkspaceId(A_WORKSPACE_ID) - .withActorDefinitionId(A_SOURCE_DEFINITION_ID)); - - connectorBuilderProjectsHandler.updateConnectorBuilderProject(new ExistingConnectorBuilderProjectWithWorkspaceId() - .builderProject(new ConnectorBuilderProjectDetails() - .name(A_SOURCE_NAME) - .draftManifest(A_MANIFEST)) - .workspaceId(A_WORKSPACE_ID) - .builderProjectId(A_BUILDER_PROJECT_ID)); - - verify(configRepository, times(1)) - .updateBuilderProjectAndActorDefinition( - A_BUILDER_PROJECT_ID, A_WORKSPACE_ID, A_SOURCE_NAME, A_MANIFEST, A_SOURCE_DEFINITION_ID); - } - - @Test - @DisplayName("updateConnectorBuilderProject should update an existing project removing the draft") - void testUpdateConnectorBuilderProjectWipeDraft() throws IOException, ConfigNotFoundException { - final ConnectorBuilderProject project = generateBuilderProject(); - - when(configRepository.getConnectorBuilderProject(project.getBuilderProjectId(), false)).thenReturn(project); - - final ExistingConnectorBuilderProjectWithWorkspaceId update = new ExistingConnectorBuilderProjectWithWorkspaceId() - .builderProject(new ConnectorBuilderProjectDetails().name(project.getName())) - .workspaceId(workspaceId).builderProjectId(project.getBuilderProjectId()); - - connectorBuilderProjectsHandler.updateConnectorBuilderProject(update); - - verify(configRepository, times(1)) - .writeBuilderProjectDraft( - project.getBuilderProjectId(), project.getWorkspaceId(), project.getName(), null); + verify(builderProjectUpdater, times(1)) + .persistBuilderProjectUpdate( + update); } @Test @@ -664,7 +631,7 @@ private void testStreamReadForProject(ConnectorBuilderProject project, JsonNode final String responseBody = "[" + Jsons.serialize(record1) + "," + Jsons.serialize(record2) + "]"; final String requestUrl = "https://api.com/users"; final int responseStatus = 200; - final HttpRequest httpRequest = new HttpRequest(requestUrl, HttpMethod.GET, null, null, null); + final HttpRequest httpRequest = new HttpRequest(requestUrl, HttpMethod.GET, null, null); final HttpResponse httpResponse = new HttpResponse(responseStatus, responseBody, null); final StreamRead streamRead = new StreamRead(Collections.emptyList(), List.of( new StreamReadSlicesInner(List.of(new StreamReadSlicesInnerPagesInner(List.of(record1, record2), httpRequest, httpResponse)), null, null)), diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobInputHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobInputHandlerTest.java index e9096cd61af..9ecdcda9a96 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobInputHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobInputHandlerTest.java @@ -191,8 +191,6 @@ void testGetSyncWorkflowInput() throws JsonValidationException, ConfigNotFoundEx .withDestinationId(DESTINATION_ID) .withSourceConfiguration(SOURCE_CONFIG_WITH_OAUTH_AND_INJECTED_CONFIG) .withDestinationConfiguration(DESTINATION_CONFIG_WITH_OAUTH) - .withState(STATE) - .withCatalog(jobSyncConfig.getConfiguredAirbyteCatalog()) .withIsReset(false); final JobRunConfig expectedJobRunConfig = new JobRunConfig() @@ -266,8 +264,6 @@ void testGetResetSyncWorkflowInput() throws IOException, ApiException, JsonValid .withDestinationId(DESTINATION_ID) .withSourceConfiguration(Jsons.emptyObject()) .withDestinationConfiguration(DESTINATION_CONFIG_WITH_OAUTH) - .withState(STATE) - .withCatalog(jobResetConfig.getConfiguredAirbyteCatalog()) .withWebhookOperationConfigs(jobResetConfig.getWebhookOperationConfigs()) .withIsReset(true); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobsHandlerTest.java index 3e4643ad1bd..d1a3481d7db 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobsHandlerTest.java @@ -211,7 +211,6 @@ void persistJobCancellationSuccess() throws Exception { verify(jobPersistence).failAttempt(JOB_ID, ATTEMPT_NUMBER); verify(jobPersistence).writeAttemptFailureSummary(JOB_ID, ATTEMPT_NUMBER, failureSummary); verify(jobPersistence).cancelJob(JOB_ID); - verify(jobNotifier).failJob(eq(mockJob), any()); verify(helper).trackCompletion(any(), eq(JobStatus.FAILED)); } @@ -328,4 +327,29 @@ void setJobFailureWithNullJobSyncConfig() throws IOException { verify(jobErrorReporter).reportSyncJobFailure(eq(CONNECTION_ID), eq(failureSummary), Mockito.any(), Mockito.any()); } + @Test + void testCancelledJobsDoNotNotify() throws IOException { + + final AttemptFailureSummary failureSummary = new AttemptFailureSummary() + .withFailures(Collections.singletonList( + new FailureReason() + .withFailureOrigin(FailureOrigin.SOURCE))); + + final Attempt mAttempt = Mockito.mock(Attempt.class); + Mockito.when(mAttempt.getFailureSummary()).thenReturn(Optional.of(failureSummary)); + + final JobConfig mJobConfig = Mockito.mock(JobConfig.class); + Mockito.when(mJobConfig.getSync()).thenReturn(null); + + final Job mJob = Mockito.mock(Job.class); + Mockito.when(mJob.getScope()).thenReturn(CONNECTION_ID.toString()); + Mockito.when(mJob.getConfig()).thenReturn(mJobConfig); + Mockito.when(mJob.getLastFailedAttempt()).thenReturn(Optional.of(mAttempt)); + Mockito.when(mJob.getConfigType()).thenReturn(SYNC); + Mockito.when(jobPersistence.getJob(JOB_ID)).thenReturn(mJob); + + jobsHandler.persistJobCancellation(CONNECTION_ID, JOB_ID, ATTEMPT_NUMBER, failureSummary); + verify(jobNotifier, never()).failJob(Mockito.any(), any()); + } + } diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OrganizationsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OrganizationsHandlerTest.java index 45cc226a9a0..d8d3c318bc3 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OrganizationsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OrganizationsHandlerTest.java @@ -6,7 +6,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -19,7 +18,7 @@ import io.airbyte.api.model.generated.Pagination; import io.airbyte.config.Organization; import io.airbyte.config.persistence.OrganizationPersistence; -import io.airbyte.config.persistence.PermissionPersistence; +import io.airbyte.data.services.PermissionService; import java.util.List; import java.util.Optional; import java.util.UUID; @@ -36,17 +35,17 @@ class OrganizationsHandlerTest { private static final String ORGANIZATION_SSO_REALM = "realm"; private static final Organization ORGANIZATION = new Organization().withOrganizationId(ORGANIZATION_ID_1).withEmail(ORGANIZATION_EMAIL).withName(ORGANIZATION_NAME); - private PermissionPersistence permissionPersistence; + private PermissionService permissionService; private OrganizationPersistence organizationPersistence; private Supplier uuidSupplier; private OrganizationsHandler organizationsHandler; @BeforeEach void setup() { - permissionPersistence = mock(PermissionPersistence.class); + permissionService = mock(PermissionService.class); uuidSupplier = mock(Supplier.class); organizationPersistence = mock(OrganizationPersistence.class); - organizationsHandler = new OrganizationsHandler(organizationPersistence, permissionPersistence, uuidSupplier); + organizationsHandler = new OrganizationsHandler(organizationPersistence, permissionService, uuidSupplier); } @Test @@ -56,7 +55,6 @@ void testCreateOrganization() throws Exception { .withName(ORGANIZATION_NAME); when(uuidSupplier.get()).thenReturn(ORGANIZATION_ID_1); when(organizationPersistence.createOrganization(newOrganization)).thenReturn(newOrganization); - doNothing().when(permissionPersistence).writePermission(any()); final OrganizationRead result = organizationsHandler.createOrganization( new OrganizationCreateRequestBody().organizationName(ORGANIZATION_NAME).email(ORGANIZATION_EMAIL)); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/PermissionHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/PermissionHandlerTest.java index 379b517b139..3eaf74740b2 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/PermissionHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/PermissionHandlerTest.java @@ -9,8 +9,8 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import io.airbyte.api.model.generated.PermissionCheckRead; @@ -23,14 +23,15 @@ import io.airbyte.api.model.generated.PermissionUpdate; import io.airbyte.api.model.generated.PermissionsCheckMultipleWorkspacesRequest; import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.server.errors.OperationNotAllowedException; +import io.airbyte.commons.server.errors.ConflictException; import io.airbyte.config.Permission; import io.airbyte.config.Permission.PermissionType; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.User; import io.airbyte.config.persistence.PermissionPersistence; -import io.airbyte.config.persistence.SQLOperationNotAllowedException; import io.airbyte.data.exceptions.ConfigNotFoundException; +import io.airbyte.data.services.PermissionService; +import io.airbyte.data.services.RemoveLastOrgAdminPermissionException; import io.airbyte.data.services.WorkspaceService; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -39,7 +40,6 @@ import java.util.Set; import java.util.UUID; import java.util.function.Supplier; -import org.jooq.exception.DataAccessException; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; @@ -50,18 +50,19 @@ @SuppressWarnings("PMD.AvoidDuplicateLiterals") class PermissionHandlerTest { - public static final String BLOCKED = "blocked"; private Supplier uuidSupplier; private PermissionPersistence permissionPersistence; private WorkspaceService workspaceService; private PermissionHandler permissionHandler; + private PermissionService permissionService; @BeforeEach void setUp() { permissionPersistence = mock(PermissionPersistence.class); uuidSupplier = mock(Supplier.class); workspaceService = mock(WorkspaceService.class); - permissionHandler = new PermissionHandler(permissionPersistence, workspaceService, uuidSupplier); + permissionService = mock(PermissionService.class); + permissionHandler = new PermissionHandler(permissionPersistence, workspaceService, uuidSupplier, permissionService); } @Test @@ -88,15 +89,15 @@ class CreatePermission { .withPermissionType(PermissionType.WORKSPACE_ADMIN); @Test - void testCreatePermission() throws IOException, JsonValidationException { + void testCreatePermission() throws Exception { final List existingPermissions = List.of(); - when(permissionPersistence.listPermissionsByUser(any())).thenReturn(existingPermissions); + when(permissionService.getPermissionsForUser(any())).thenReturn(existingPermissions); when(uuidSupplier.get()).thenReturn(PERMISSION_ID); - when(permissionPersistence.getPermission(any())).thenReturn(Optional.of(PERMISSION)); final PermissionCreate permissionCreate = new PermissionCreate() .permissionType(io.airbyte.api.model.generated.PermissionType.WORKSPACE_OWNER) .userId(USER_ID) .workspaceId(WORKSPACE_ID); + when(permissionService.createPermission(any())).thenReturn(PERMISSION); final PermissionRead actualRead = permissionHandler.createPermission(permissionCreate); final PermissionRead expectedRead = new PermissionRead() .permissionId(PERMISSION_ID) @@ -163,30 +164,14 @@ void updatesPermission() throws Exception { .permissionId(PERMISSION_WORKSPACE_READER.getPermissionId()) .permissionType(io.airbyte.api.model.generated.PermissionType.WORKSPACE_ADMIN); // changing to workspace_admin - final PermissionRead expectedPermissionRead = new PermissionRead() - .permissionId(PERMISSION_WORKSPACE_READER.getPermissionId()) - .permissionType(io.airbyte.api.model.generated.PermissionType.WORKSPACE_ADMIN) - .userId(PERMISSION_WORKSPACE_READER.getUserId()) - .workspaceId(PERMISSION_WORKSPACE_READER.getWorkspaceId()); - - // after the update, getPermission will be called to build the response, so we need to mock it with - // the updated permission type - when(permissionPersistence.getPermission(PERMISSION_WORKSPACE_READER.getPermissionId())) - .thenReturn(Optional.of(new Permission() - .withPermissionId(PERMISSION_WORKSPACE_READER.getPermissionId()) - .withPermissionType(PermissionType.WORKSPACE_ADMIN) // updated - .withWorkspaceId(PERMISSION_WORKSPACE_READER.getWorkspaceId()) - .withUserId(PERMISSION_WORKSPACE_READER.getUserId()))); - - final PermissionRead actualPermissionRead = permissionHandler.updatePermission(update); + permissionHandler.updatePermission(update); - verify(permissionPersistence).writePermission(new Permission() + verify(permissionService).updatePermission(new Permission() .withPermissionId(PERMISSION_WORKSPACE_READER.getPermissionId()) .withPermissionType(PermissionType.WORKSPACE_ADMIN) .withUserId(PERMISSION_WORKSPACE_READER.getUserId()) .withWorkspaceId(PERMISSION_WORKSPACE_READER.getWorkspaceId()) .withOrganizationId(null)); - assertEquals(expectedPermissionRead, actualPermissionRead); } @Test @@ -198,25 +183,24 @@ void testUpdateToInstanceAdminPermissionThrows() { } @Test - void throwsOperationNotAllowedIfPersistenceBlocksUpdate() throws Exception { + void throwsConflictExceptionIfServiceBlocksUpdate() throws Exception { final PermissionUpdate update = new PermissionUpdate() .permissionId(PERMISSION_ORGANIZATION_ADMIN.getPermissionId()) .permissionType(io.airbyte.api.model.generated.PermissionType.ORGANIZATION_EDITOR); // changing to organization_editor - doThrow(new DataAccessException(BLOCKED, new SQLOperationNotAllowedException(BLOCKED))).when(permissionPersistence).writePermission(any()); - assertThrows(OperationNotAllowedException.class, () -> permissionHandler.updatePermission(update)); + doThrow(RemoveLastOrgAdminPermissionException.class).when(permissionService).updatePermission(any()); + assertThrows(ConflictException.class, () -> permissionHandler.updatePermission(update)); } @Test - void workspacePermissionUpdatesDoNotModifyIdFields() - throws JsonValidationException, io.airbyte.config.persistence.ConfigNotFoundException, IOException { + void workspacePermissionUpdatesDoNotModifyIdFields() throws Exception { final PermissionUpdate workspacePermissionUpdate = new PermissionUpdate() .permissionId(PERMISSION_WORKSPACE_READER.getPermissionId()) .permissionType(io.airbyte.api.model.generated.PermissionType.WORKSPACE_EDITOR); // changing to workspace_editor permissionHandler.updatePermission(workspacePermissionUpdate); - verify(permissionPersistence).writePermission(new Permission() + verify(permissionService).updatePermission(new Permission() .withPermissionId(PERMISSION_WORKSPACE_READER.getPermissionId()) .withPermissionType(PermissionType.WORKSPACE_EDITOR) .withWorkspaceId(PERMISSION_WORKSPACE_READER.getWorkspaceId()) // workspace ID preserved from original permission @@ -224,15 +208,14 @@ void workspacePermissionUpdatesDoNotModifyIdFields() } @Test - void organizationPermissionUpdatesDoNotModifyIdFields() - throws JsonValidationException, io.airbyte.config.persistence.ConfigNotFoundException, IOException { + void organizationPermissionUpdatesDoNotModifyIdFields() throws Exception { final PermissionUpdate orgPermissionUpdate = new PermissionUpdate() .permissionId(PERMISSION_ORGANIZATION_ADMIN.getPermissionId()) .permissionType(io.airbyte.api.model.generated.PermissionType.ORGANIZATION_EDITOR); // changing to organization_editor permissionHandler.updatePermission(orgPermissionUpdate); - verify(permissionPersistence).writePermission(new Permission() + verify(permissionService).updatePermission(new Permission() .withPermissionId(PERMISSION_ORGANIZATION_ADMIN.getPermissionId()) .withPermissionType(PermissionType.ORGANIZATION_EDITOR) .withOrganizationId(PERMISSION_ORGANIZATION_ADMIN.getOrganizationId()) // organization ID preserved from original permission @@ -271,14 +254,14 @@ void deletesPermission() throws Exception { permissionHandler.deletePermission(new PermissionIdRequestBody().permissionId(PERMISSION_WORKSPACE_READER.getPermissionId())); - verify(permissionPersistence).deletePermissionById(PERMISSION_WORKSPACE_READER.getPermissionId()); + verify(permissionService).deletePermission(PERMISSION_WORKSPACE_READER.getPermissionId()); } @Test - void throwsOperationNotAllowedIfPersistenceBlocks() throws Exception { - doThrow(new DataAccessException(BLOCKED, new SQLOperationNotAllowedException(BLOCKED))).when(permissionPersistence) - .deletePermissionById(any()); - assertThrows(OperationNotAllowedException.class, () -> permissionHandler.deletePermission( + void throwsConflictIfPersistenceBlocks() throws Exception { + doThrow(RemoveLastOrgAdminPermissionException.class).when(permissionService).deletePermission(any()); + + assertThrows(ConflictException.class, () -> permissionHandler.deletePermission( new PermissionIdRequestBody().permissionId(PERMISSION_ORGANIZATION_ADMIN.getPermissionId()))); } @@ -714,7 +697,7 @@ class DeleteUserFromWorkspace { private static final UUID USER_ID = UUID.randomUUID(); @Test - void testDeleteUserFromWorkspace() throws IOException { + void testDeleteUserFromWorkspace() throws Exception { // should be deleted final Permission workspacePermission = new Permission() .withPermissionId(UUID.randomUUID()) @@ -736,42 +719,16 @@ void testDeleteUserFromWorkspace() throws IOException { .withOrganizationId(UUID.randomUUID()) .withPermissionType(PermissionType.ORGANIZATION_ADMIN); - // should not be deleted, different user - final Permission otherUserPermission = new Permission() - .withPermissionId(UUID.randomUUID()) - .withUserId(UUID.randomUUID()) - .withWorkspaceId(WORKSPACE_ID) - .withPermissionType(PermissionType.WORKSPACE_ADMIN); - when(permissionPersistence.listPermissionsByUser(USER_ID)).thenReturn( List.of(workspacePermission, otherWorkspacePermission, orgPermission)); permissionHandler.deleteUserFromWorkspace(new PermissionDeleteUserFromWorkspaceRequestBody().userIdToRemove(USER_ID).workspaceId(WORKSPACE_ID)); // verify the intended permission was deleted - verify(permissionPersistence).deletePermissionById(workspacePermission.getPermissionId()); + verify(permissionService).deletePermissions(List.of(workspacePermission.getPermissionId())); // verify the other permissions were not deleted - verify(permissionPersistence, never()).deletePermissionById(otherWorkspacePermission.getPermissionId()); - verify(permissionPersistence, never()).deletePermissionById(otherUserPermission.getPermissionId()); - verify(permissionPersistence, never()).deletePermissionById(orgPermission.getPermissionId()); - } - - @Test - void testDeleteUserFromWorkspaceThrows() throws IOException { - final Permission permission = new Permission() - .withPermissionId(UUID.randomUUID()) - .withUserId(USER_ID) - .withWorkspaceId(WORKSPACE_ID) - .withPermissionType(PermissionType.WORKSPACE_ADMIN); - - when(permissionPersistence.listPermissionsByUser(USER_ID)).thenReturn(List.of(permission)); - - doThrow(new IOException()).when(permissionPersistence).deletePermissionById(permission.getPermissionId()); - - assertThrows(RuntimeException.class, () -> permissionHandler.deleteUserFromWorkspace(new PermissionDeleteUserFromWorkspaceRequestBody() - .userIdToRemove(USER_ID) - .workspaceId(WORKSPACE_ID))); + verifyNoMoreInteractions(permissionService); } } diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java index 41611f5a757..c679057a28c 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java @@ -108,6 +108,7 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.StreamResetPersistence; +import io.airbyte.config.persistence.domain.StreamRefresh; import io.airbyte.config.secrets.SecretsRepositoryWriter; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.WorkspaceService; @@ -265,6 +266,7 @@ class SchedulerHandlerTest { private ConnectorDefinitionSpecificationHandler connectorDefinitionSpecificationHandler; private WorkspaceService workspaceService; private SecretPersistenceConfigService secretPersistenceConfigService; + private StreamRefreshesHandler streamRefreshesHandler; @BeforeEach void setup() throws JsonValidationException, ConfigNotFoundException, IOException { @@ -313,6 +315,9 @@ void setup() throws JsonValidationException, ConfigNotFoundException, IOExceptio .supportedDestinationSyncModes( List.of(io.airbyte.api.model.generated.DestinationSyncMode.OVERWRITE, io.airbyte.api.model.generated.DestinationSyncMode.APPEND))); + streamRefreshesHandler = mock(StreamRefreshesHandler.class); + when(streamRefreshesHandler.getRefreshesForConnection(any())).thenReturn(new ArrayList<>()); + schedulerHandler = new SchedulerHandler( configRepository, secretsRepositoryWriter, @@ -335,13 +340,14 @@ void setup() throws JsonValidationException, ConfigNotFoundException, IOExceptio jobTracker, connectorDefinitionSpecificationHandler, workspaceService, - secretPersistenceConfigService); + secretPersistenceConfigService, + streamRefreshesHandler); } @Test @DisplayName("Test job creation") void createJob() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(jobFactory.create(CONNECTION_ID)) + Mockito.when(jobFactory.createSync(CONNECTION_ID)) .thenReturn(JOB_ID); Mockito.when(configRepository.getStandardSync(CONNECTION_ID)) .thenReturn(Mockito.mock(StandardSync.class)); @@ -355,6 +361,27 @@ void createJob() throws JsonValidationException, ConfigNotFoundException, IOExce Assertions.assertThat(output.getJob().getId()).isEqualTo(JOB_ID); } + @Test + @DisplayName("Test refresh job creation") + void createRefreshJob() throws JsonValidationException, ConfigNotFoundException, IOException { + when(jobFactory.createRefresh(eq(CONNECTION_ID), any())) + .thenReturn(JOB_ID); + when(configRepository.getStandardSync(CONNECTION_ID)) + .thenReturn(mock(StandardSync.class)); + when(jobPersistence.getJob(JOB_ID)) + .thenReturn(job); + when(jobConverter.getJobInfoRead(job)) + .thenReturn(new JobInfoRead().job(new JobRead().id(JOB_ID))); + when(streamRefreshesHandler.getRefreshesForConnection(CONNECTION_ID)) + .thenReturn(List.of( + new StreamRefresh(UUID.randomUUID(), CONNECTION_ID, "name", "namespace", null))); + + final JobInfoRead output = schedulerHandler.createJob(new JobCreate().connectionId(CONNECTION_ID)); + + verify(jobFactory).createRefresh(eq(CONNECTION_ID), any()); + Assertions.assertThat(output.getJob().getId()).isEqualTo(JOB_ID); + } + @Test @DisplayName("Test reset job creation") void createResetJob() throws JsonValidationException, ConfigNotFoundException, IOException { diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/UserHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/UserHandlerTest.java index d44cbb83480..3725d23cd04 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/UserHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/UserHandlerTest.java @@ -10,7 +10,6 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.argThat; -import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -50,6 +49,7 @@ import io.airbyte.config.persistence.OrganizationPersistence; import io.airbyte.config.persistence.PermissionPersistence; import io.airbyte.config.persistence.UserPersistence; +import io.airbyte.data.services.PermissionService; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.util.Arrays; @@ -83,6 +83,7 @@ class UserHandlerTest { OrganizationsHandler organizationsHandler; JwtUserAuthenticationResolver jwtUserAuthenticationResolver; InitialUserConfiguration initialUserConfiguration; + PermissionService permissionService; private static final UUID USER_ID = UUID.randomUUID(); private static final String USER_NAME = "user 1"; @@ -98,11 +99,13 @@ class UserHandlerTest { .withAuthProvider(AuthProvider.GOOGLE_IDENTITY_PLATFORM) .withStatus(Status.INVITED) .withName(USER_NAME); + private ResourceBootstrapHandler resourceBootstrapHandler; @BeforeEach void setUp() { userPersistence = mock(UserPersistence.class); permissionPersistence = mock(PermissionPersistence.class); + permissionService = mock(PermissionService.class); permissionHandler = mock(PermissionHandler.class); workspacesHandler = mock(WorkspacesHandler.class); organizationPersistence = mock(OrganizationPersistence.class); @@ -110,9 +113,11 @@ void setUp() { uuidSupplier = mock(Supplier.class); jwtUserAuthenticationResolver = mock(JwtUserAuthenticationResolver.class); initialUserConfiguration = mock(InitialUserConfiguration.class); + resourceBootstrapHandler = mock(ResourceBootstrapHandler.class); - userHandler = new UserHandler(userPersistence, permissionPersistence, organizationPersistence, permissionHandler, workspacesHandler, - uuidSupplier, jwtUserAuthenticationResolver, Optional.of(initialUserConfiguration)); + userHandler = + new UserHandler(userPersistence, permissionPersistence, permissionService, organizationPersistence, permissionHandler, workspacesHandler, + uuidSupplier, jwtUserAuthenticationResolver, Optional.of(initialUserConfiguration), resourceBootstrapHandler); } @Test @@ -324,7 +329,7 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio when(jwtUserAuthenticationResolver.resolveUser(NEW_AUTH_USER_ID)).thenReturn(newUser); when(uuidSupplier.get()).thenReturn(NEW_USER_ID); when(userPersistence.getUser(NEW_USER_ID)).thenReturn(Optional.of(newUser)); - when(workspacesHandler.createDefaultWorkspaceForUser(any(), any())).thenReturn(defaultWorkspace); + when(resourceBootstrapHandler.bootStrapWorkspaceForCurrentUser(any())).thenReturn(defaultWorkspace); } @ParameterizedTest @@ -351,8 +356,9 @@ void testNewUserCreation(final AuthProvider authProvider, } else { // replace default user handler with one that doesn't use initial user config (ie to test what // happens in Cloud) - userHandler = new UserHandler(userPersistence, permissionPersistence, organizationPersistence, permissionHandler, workspacesHandler, - uuidSupplier, jwtUserAuthenticationResolver, Optional.empty()); + userHandler = new UserHandler(userPersistence, permissionPersistence, permissionService, organizationPersistence, permissionHandler, + workspacesHandler, + uuidSupplier, jwtUserAuthenticationResolver, Optional.empty(), resourceBootstrapHandler); } if (isFirstOrgUser) { @@ -371,6 +377,9 @@ void testNewUserCreation(final AuthProvider authProvider, when(workspacesHandler.listWorkspacesInOrganization( new ListWorkspacesInOrganizationRequestBody().organizationId(ORGANIZATION.getOrganizationId()))).thenReturn( new WorkspaceReadList().workspaces(List.of(defaultWorkspace))); + if (newUser.getDefaultWorkspaceId() == null) { + newUser.setDefaultWorkspaceId(defaultWorkspace.getWorkspaceId()); + } } else { when(workspacesHandler.listWorkspacesInOrganization(any())).thenReturn(new WorkspaceReadList().workspaces(List.of())); } @@ -390,7 +399,7 @@ void testNewUserCreation(final AuthProvider authProvider, verifyUserRead(userRead, apiAuthProvider); verifyInstanceAdminPermissionCreation(initialUserEmail, initialUserPresent); verifyOrganizationPermissionCreation(ssoRealm, isFirstOrgUser); - verifyDefaultWorkspaceCreation(ssoRealm, isDefaultWorkspaceForOrgPresent, userPersistenceInOrder); + verifyDefaultWorkspaceCreation(isDefaultWorkspaceForOrgPresent, userPersistenceInOrder); } private void verifyCreatedUser(final AuthProvider expectedAuthProvider, final InOrder inOrder) throws IOException { @@ -400,38 +409,19 @@ private void verifyCreatedUser(final AuthProvider expectedAuthProvider, final In && user.getAuthProvider().equals(expectedAuthProvider))); } - private void verifyDefaultWorkspaceCreation(final String ssoRealm, final Boolean isDefaultWorkspaceForOrgPresent, final InOrder inOrder) - throws IOException, JsonValidationException, ConfigNotFoundException { - boolean workspaceCreated = false; - - if (ssoRealm == null) { - // always create a default workspace for non-SSO users - verify(workspacesHandler).createDefaultWorkspaceForUser( - argThat(user -> user.getUserId().equals(NEW_USER_ID)), - eq(Optional.empty())); - workspaceCreated = true; - - } else { - if (!isDefaultWorkspaceForOrgPresent) { - // create a default workspace for the org if one doesn't yet exist - verify(workspacesHandler).createDefaultWorkspaceForUser( - argThat(user -> user.getUserId().equals(NEW_USER_ID)), - argThat(org -> org.orElseThrow().getOrganizationId().equals(ORGANIZATION.getOrganizationId()))); - workspaceCreated = true; - - } else { - // never create an additional workspace for the org if one already exists. - verify(workspacesHandler, never()).createDefaultWorkspaceForUser(any(), any()); - } - } - if (workspaceCreated) { + private void verifyDefaultWorkspaceCreation(final Boolean isDefaultWorkspaceForOrgPresent, final InOrder inOrder) + throws IOException { + // No need to deal with other vars because SSO users and first org users etc. are all directed + // through the same codepath now. + if (!isDefaultWorkspaceForOrgPresent) { + // create a default workspace for the org if one doesn't yet exist + verify(resourceBootstrapHandler).bootStrapWorkspaceForCurrentUser(any()); // if a workspace was created, verify that the user's defaultWorkspaceId was updated // and that a workspaceAdmin permission was created for them. inOrder.verify(userPersistence).writeUser(argThat(user -> user.getDefaultWorkspaceId().equals(WORKSPACE_ID))); - verify(permissionHandler).createPermission(new PermissionCreate() - .permissionType(io.airbyte.api.model.generated.PermissionType.WORKSPACE_ADMIN) - .workspaceId(WORKSPACE_ID) - .userId(NEW_USER_ID)); + } else { + // never create an additional workspace for the org if one already exists. + verify(resourceBootstrapHandler, never()).bootStrapWorkspaceForCurrentUser(any()); } } @@ -443,17 +433,17 @@ private void verifyUserRead(final UserRead userRead, final io.airbyte.api.model. } private void verifyInstanceAdminPermissionCreation(final String initialUserEmail, final boolean initialUserPresent) - throws IOException { + throws Exception { // instance_admin permissions should only ever be created when the initial user config is present // (which should never be true in Cloud). // also, if the initial user email is null or doesn't match the new user's email, no instance_admin // permission should be created if (!initialUserPresent || initialUserEmail == null || !initialUserEmail.equalsIgnoreCase(NEW_EMAIL)) { - verify(permissionPersistence, never()) - .writePermission(argThat(permission -> permission.getPermissionType().equals(PermissionType.INSTANCE_ADMIN))); + verify(permissionService, never()) + .createPermission(argThat(permission -> permission.getPermissionType().equals(PermissionType.INSTANCE_ADMIN))); } else { // otherwise, instance_admin permission should be created - verify(permissionPersistence).writePermission(argThat( + verify(permissionService).createPermission(argThat( permission -> permission.getPermissionType().equals(PermissionType.INSTANCE_ADMIN) && permission.getUserId().equals(NEW_USER_ID))); } } diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java index 36db591806f..5652e2d3b6a 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java @@ -11,9 +11,11 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -105,7 +107,9 @@ import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; +import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.TestClient; +import io.airbyte.featureflag.UseClear; import io.airbyte.featureflag.UseIconUrlInApiResponse; import io.airbyte.featureflag.Workspace; import io.airbyte.persistence.job.factory.OAuthConfigSupplier; @@ -130,6 +134,8 @@ import java.util.stream.Collectors; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.ArgumentCaptor; import org.mockito.InOrder; @@ -155,6 +161,7 @@ class WebBackendConnectionsHandlerTest { private ConfigRepository configRepository; private ActorDefinitionVersionHelper actorDefinitionVersionHelper; private ActorDefinitionHandlerHelper actorDefinitionHandlerHelper; + private final FeatureFlagClient featureFlagClient = mock(TestClient.class); private static final String STREAM1 = "stream1"; private static final String STREAM2 = "stream2"; @@ -189,7 +196,6 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio final WorkspaceService workspaceService = mock(WorkspaceService.class); final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); - final TestClient featureFlagClient = mock(TestClient.class); final Supplier uuidGenerator = mock(Supplier.class); when(featureFlagClient.boolVariation(UseIconUrlInApiResponse.INSTANCE, new Workspace(ANONYMOUS))) .thenReturn(true); @@ -219,7 +225,7 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio actorDefinitionHandlerHelper, actorDefinitionVersionUpdater); - wbHandler = new WebBackendConnectionsHandler( + wbHandler = spy(new WebBackendConnectionsHandler( connectionsHandler, stateHandler, sourceHandler, @@ -229,7 +235,8 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio operationsHandler, eventRunner, configRepository, - actorDefinitionVersionHelper); + actorDefinitionVersionHelper, + featureFlagClient)); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() .withSourceDefinitionId(UUID.randomUUID()) @@ -927,8 +934,11 @@ void testUpdateConnectionWithOperations() throws JsonValidationException, Config verify(operationsHandler, times(1)).updateOperation(operationUpdate); } - @Test - void testUpdateConnectionWithUpdatedSchemaLegacy() throws JsonValidationException, ConfigNotFoundException, IOException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testUpdateConnectionWithUpdatedSchemaLegacy(boolean isClear) throws JsonValidationException, ConfigNotFoundException, IOException { + when(featureFlagClient.boolVariation(eq(UseClear.INSTANCE), any())).thenReturn(isClear); + final WebBackendConnectionUpdate updateBody = new WebBackendConnectionUpdate() .namespaceDefinition(expected.getNamespaceDefinition()) .namespaceFormat(expected.getNamespaceFormat()) @@ -972,9 +982,11 @@ void testUpdateConnectionWithUpdatedSchemaLegacy() throws JsonValidationExceptio when(configRepository.getAllStreamsForConnection(expected.getConnectionId())).thenReturn(connectionStreams); final ManualOperationResult successfulResult = ManualOperationResult.builder().jobId(Optional.empty()).failingReason(Optional.empty()).build(); - when(eventRunner.resetConnection(any(), any(), anyBoolean())).thenReturn(successfulResult); + when(eventRunner.resetConnection(any(), any(), eq(!isClear))).thenReturn(successfulResult); when(eventRunner.startNewManualSync(any())).thenReturn(successfulResult); + when(configRepository.getMostRecentActorCatalogForSource(any())).thenReturn(Optional.of(new ActorCatalog().withCatalog(Jsons.emptyObject()))); + final WebBackendConnectionRead result = wbHandler.webBackendUpdateConnection(updateBody); assertEquals(expectedWithNewSchema.getSyncCatalog(), result.getSyncCatalog()); @@ -984,11 +996,14 @@ void testUpdateConnectionWithUpdatedSchemaLegacy() throws JsonValidationExceptio verify(schedulerHandler, times(0)).syncConnection(connectionId); verify(connectionsHandler, times(1)).updateConnection(any()); final InOrder orderVerifier = inOrder(eventRunner); - orderVerifier.verify(eventRunner, times(1)).resetConnection(connectionId.getConnectionId(), connectionStreams, true); + orderVerifier.verify(eventRunner, times(1)).resetConnection(connectionId.getConnectionId(), connectionStreams, !isClear); } - @Test - void testUpdateConnectionWithUpdatedSchemaPerStream() throws JsonValidationException, ConfigNotFoundException, IOException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testUpdateConnectionWithUpdatedSchemaPerStream(boolean isClear) throws JsonValidationException, ConfigNotFoundException, IOException { + when(featureFlagClient.boolVariation(eq(UseClear.INSTANCE), any())).thenReturn(isClear); + final WebBackendConnectionUpdate updateBody = new WebBackendConnectionUpdate() .namespaceDefinition(expected.getNamespaceDefinition()) .namespaceFormat(expected.getNamespaceFormat()) @@ -1038,9 +1053,12 @@ void testUpdateConnectionWithUpdatedSchemaPerStream() throws JsonValidationExcep when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn(connectionRead); final ManualOperationResult successfulResult = ManualOperationResult.builder().jobId(Optional.empty()).failingReason(Optional.empty()).build(); - when(eventRunner.resetConnection(any(), any(), anyBoolean())).thenReturn(successfulResult); + when(eventRunner.resetConnection(any(), any(), eq(!isClear))).thenReturn(successfulResult); when(eventRunner.startNewManualSync(any())).thenReturn(successfulResult); + when(configRepository.getMostRecentActorCatalogForSource(any())).thenReturn(Optional.of(new ActorCatalog().withCatalog(Jsons.emptyObject()))); + doReturn(false).when(wbHandler).containsBreakingChange(any()); + final WebBackendConnectionRead result = wbHandler.webBackendUpdateConnection(updateBody); assertEquals(expectedWithNewSchema.getSyncCatalog(), result.getSyncCatalog()); @@ -1055,7 +1073,7 @@ void testUpdateConnectionWithUpdatedSchemaPerStream() throws JsonValidationExcep new io.airbyte.protocol.models.StreamDescriptor().withName("updateStream"), new io.airbyte.protocol.models.StreamDescriptor().withName("configUpdateStream"), new io.airbyte.protocol.models.StreamDescriptor().withName("removeStream")), - true); + !isClear); } @Test diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java index 7c9a20629c2..58a848a36e1 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java @@ -345,7 +345,8 @@ void testCreateWorkspaceWithMinimumInput() throws JsonValidationException, IOExc final WorkspaceCreate workspaceCreate = new WorkspaceCreate() .name(NEW_WORKSPACE) - .email(TEST_EMAIL); + .email(TEST_EMAIL) + .organizationId(ORGANIZATION_ID); final WorkspaceRead actualRead = workspacesHandler.createWorkspace(workspaceCreate); final WorkspaceRead expectedRead = new WorkspaceRead() @@ -363,7 +364,8 @@ void testCreateWorkspaceWithMinimumInput() throws JsonValidationException, IOExc .notificationSettings(generateDefaultApiNotificationSettings()) .defaultGeography(GEOGRAPHY_AUTO) .webhookConfigs(Collections.emptyList()) - .tombstone(false); + .tombstone(false) + .organizationId(ORGANIZATION_ID); assertEquals(expectedRead, actualRead); } @@ -387,7 +389,8 @@ void testCreateWorkspaceDuplicateSlug() throws JsonValidationException, IOExcept .news(false) .anonymousDataCollection(false) .securityUpdates(false) - .notifications(Collections.emptyList()); + .notifications(Collections.emptyList()) + .organizationId(ORGANIZATION_ID); final WorkspaceRead actualRead = workspacesHandler.createWorkspace(workspaceCreate); final WorkspaceRead expectedRead = new WorkspaceRead() @@ -405,7 +408,8 @@ void testCreateWorkspaceDuplicateSlug() throws JsonValidationException, IOExcept .notificationSettings(generateDefaultApiNotificationSettings()) .defaultGeography(GEOGRAPHY_AUTO) .webhookConfigs(Collections.emptyList()) - .tombstone(false); + .tombstone(false) + .organizationId(ORGANIZATION_ID); assertTrue(actualRead.getSlug().startsWith(workspace.getSlug())); assertNotEquals(workspace.getSlug(), actualRead.getSlug()); @@ -876,7 +880,8 @@ void testWorkspaceIsWrittenThroughSecretsWriter() .securityUpdates(false) .notifications(List.of(generateApiNotification())) .notificationSettings(generateApiNotificationSettings()) - .defaultGeography(GEOGRAPHY_US); + .defaultGeography(GEOGRAPHY_US) + .organizationId(ORGANIZATION_ID); final WorkspaceRead actualRead = workspacesHandler.createWorkspace(workspaceCreate); final WorkspaceRead expectedRead = new WorkspaceRead() @@ -894,7 +899,8 @@ void testWorkspaceIsWrittenThroughSecretsWriter() .notificationSettings(generateApiNotificationSettingsWithDefaultValue()) .defaultGeography(GEOGRAPHY_US) .webhookConfigs(Collections.emptyList()) - .tombstone(false); + .tombstone(false) + .organizationId(ORGANIZATION_ID); assertEquals(expectedRead, actualRead); verify(workspaceService, times(1)).writeWorkspaceWithSecrets(any()); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/CompositeBuilderProjectUpdaterTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/CompositeBuilderProjectUpdaterTest.java new file mode 100644 index 00000000000..14c351ff77d --- /dev/null +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/CompositeBuilderProjectUpdaterTest.java @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.config.persistence.ConfigNotFoundException; +import java.io.IOException; +import java.util.List; +import org.junit.Test; +import org.junit.jupiter.api.DisplayName; + +public class CompositeBuilderProjectUpdaterTest { + + @Test + @DisplayName("updateConnectorBuilderProject should call updateConnectorBuilderProject on underlying updaters") + public void testUpdateCompositeBuilderProjectUpdaterDelegates() throws ConfigNotFoundException, IOException { + final ExistingConnectorBuilderProjectWithWorkspaceId update = mock(ExistingConnectorBuilderProjectWithWorkspaceId.class); + final BuilderProjectUpdater updaterA = mock(BuilderProjectUpdater.class); + final BuilderProjectUpdater updaterB = mock(BuilderProjectUpdater.class); + CompositeBuilderProjectUpdater projectUpdater = new CompositeBuilderProjectUpdater(List.of(updaterA, updaterB)); + projectUpdater.persistBuilderProjectUpdate(update); + + verify(updaterA, times(1)) + .persistBuilderProjectUpdate(update); + verify(updaterB, times(1)) + .persistBuilderProjectUpdate(update); + } + +} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/ConfigRepositoryBuilderProjectUpdaterTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/ConfigRepositoryBuilderProjectUpdaterTest.java new file mode 100644 index 00000000000..b29b633cd48 --- /dev/null +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/ConfigRepositoryBuilderProjectUpdaterTest.java @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import static io.airbyte.commons.server.handlers.ConnectorBuilderProjectsHandler.CONNECTION_SPECIFICATION_FIELD; +import static io.airbyte.commons.server.handlers.ConnectorBuilderProjectsHandler.SPEC_FIELD; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.api.model.generated.ConnectorBuilderProjectDetails; +import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.ConnectorBuilderProject; +import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.config.persistence.ConfigRepository; +import java.io.IOException; +import java.util.UUID; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ConfigRepositoryBuilderProjectUpdaterTest { + + private final JsonNode draftManifest = addSpec(Jsons.deserialize("{\"test\":123,\"empty\":{\"array_in_object\":[]}}")); + + private static final UUID A_SOURCE_DEFINITION_ID = UUID.randomUUID(); + private static final UUID A_BUILDER_PROJECT_ID = UUID.randomUUID(); + private static final UUID A_WORKSPACE_ID = UUID.randomUUID(); + private static final String A_DESCRIPTION = "a description"; + private static final String A_SOURCE_NAME = "a source name"; + private static final String A_NAME = "a name"; + private static final String A_DOCUMENTATION_URL = "http://documentation.url"; + private static final JsonNode A_MANIFEST; + private static final JsonNode A_SPEC; + + static { + try { + A_MANIFEST = new ObjectMapper().readTree("{\"a_manifest\": \"manifest_value\"}"); + A_SPEC = new ObjectMapper().readTree("{\"a_spec\": \"spec_value\"}"); + } catch (final JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + private final String specString = + """ + { + "type": "object", + "properties": { + "username": { + "type": "string" + }, + "password": { + "type": "string", + "airbyte_secret": true + } + } + }"""; + + private ConfigRepository configRepository; + private UUID workspaceId; + private ConfigRepositoryBuilderProjectUpdater projectUpdater; + + @BeforeEach + void setUp() { + configRepository = mock(ConfigRepository.class); + projectUpdater = new ConfigRepositoryBuilderProjectUpdater(configRepository); + } + + @Test + @DisplayName("updateConnectorBuilderProject should update an existing project removing the draft") + void testUpdateConnectorBuilderProjectWipeDraft() throws IOException, ConfigNotFoundException { + final ConnectorBuilderProject project = generateBuilderProject(); + + when(configRepository.getConnectorBuilderProject(project.getBuilderProjectId(), false)).thenReturn(project); + + final ExistingConnectorBuilderProjectWithWorkspaceId update = new ExistingConnectorBuilderProjectWithWorkspaceId() + .builderProject(new ConnectorBuilderProjectDetails().name(project.getName())) + .workspaceId(workspaceId).builderProjectId(project.getBuilderProjectId()); + + projectUpdater.persistBuilderProjectUpdate(update); + + verify(configRepository, times(1)) + .writeBuilderProjectDraft( + project.getBuilderProjectId(), project.getWorkspaceId(), project.getName(), null); + } + + @Test + @DisplayName("updateConnectorBuilderProject should update an existing project") + void testUpdateConnectorBuilderProject() throws IOException, ConfigNotFoundException { + final ConnectorBuilderProject project = generateBuilderProject(); + + when(configRepository.getConnectorBuilderProject(project.getBuilderProjectId(), false)).thenReturn(project); + + final ExistingConnectorBuilderProjectWithWorkspaceId update = new ExistingConnectorBuilderProjectWithWorkspaceId() + .builderProject(new ConnectorBuilderProjectDetails() + .name(project.getName()) + .draftManifest(project.getManifestDraft())) + .workspaceId(workspaceId) + .builderProjectId(project.getBuilderProjectId()); + + projectUpdater.persistBuilderProjectUpdate(update); + + verify(configRepository, times(1)) + .writeBuilderProjectDraft( + project.getBuilderProjectId(), project.getWorkspaceId(), project.getName(), project.getManifestDraft()); + } + + @Test + void givenActorDefinitionAssociatedWithProjectWhenUpdateConnectorBuilderProjectThenUpdateProjectAndDefinition() throws Exception { + when(configRepository.getConnectorBuilderProject(A_BUILDER_PROJECT_ID, false)).thenReturn(anyBuilderProject() + .withBuilderProjectId(A_BUILDER_PROJECT_ID) + .withWorkspaceId(A_WORKSPACE_ID) + .withActorDefinitionId(A_SOURCE_DEFINITION_ID)); + + projectUpdater.persistBuilderProjectUpdate(new ExistingConnectorBuilderProjectWithWorkspaceId() + .builderProject(new ConnectorBuilderProjectDetails() + .name(A_SOURCE_NAME) + .draftManifest(A_MANIFEST)) + .workspaceId(A_WORKSPACE_ID) + .builderProjectId(A_BUILDER_PROJECT_ID)); + + verify(configRepository, times(1)) + .updateBuilderProjectAndActorDefinition( + A_BUILDER_PROJECT_ID, A_WORKSPACE_ID, A_SOURCE_NAME, A_MANIFEST, A_SOURCE_DEFINITION_ID); + } + + private ConnectorBuilderProject generateBuilderProject() throws JsonProcessingException { + final UUID projectId = UUID.randomUUID(); + return new ConnectorBuilderProject().withBuilderProjectId(projectId).withWorkspaceId(workspaceId).withName("Test project") + .withHasDraft(true).withManifestDraft(draftManifest); + } + + private JsonNode addSpec(JsonNode manifest) { + final JsonNode spec = Jsons.deserialize("{\"" + CONNECTION_SPECIFICATION_FIELD + "\":" + specString + "}"); + return ((ObjectNode) Jsons.clone(manifest)).set(SPEC_FIELD, spec); + } + + private static ConnectorBuilderProject anyBuilderProject() { + return new ConnectorBuilderProject(); + } + +} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/support/AuthNettyServerCustomizerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/support/AuthNettyServerCustomizerTest.java index 6028acb482c..29aefde0401 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/support/AuthNettyServerCustomizerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/support/AuthNettyServerCustomizerTest.java @@ -24,6 +24,9 @@ class AuthNettyServerCustomizerTest { private static final Integer MAX_CONTENT_LENGTH = 1024; + private static final Integer MAX_INITIAL_LINE_LENGTH = 4096; + private static final Integer MAX_HEADER_SIZE = 8192; + private static final Integer MAX_CHUNK_SIZE = 8192; private AuthorizationServerHandler authorizationServerHandler; @@ -32,7 +35,8 @@ class AuthNettyServerCustomizerTest { @BeforeEach void setup() { authorizationServerHandler = Mockito.mock(AuthorizationServerHandler.class); - customizer = new AuthNettyServerCustomizer(authorizationServerHandler, MAX_CONTENT_LENGTH); + customizer = new AuthNettyServerCustomizer( + authorizationServerHandler, MAX_CONTENT_LENGTH, MAX_INITIAL_LINE_LENGTH, MAX_HEADER_SIZE, MAX_CHUNK_SIZE); } @Test diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/StreamRefreshesHandlerTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/StreamRefreshesHandlerTest.kt new file mode 100644 index 00000000000..d5290e16f61 --- /dev/null +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/StreamRefreshesHandlerTest.kt @@ -0,0 +1,161 @@ +package io.airbyte.commons.server.handlers + +import io.airbyte.api.model.generated.ConnectionStream +import io.airbyte.commons.server.handlers.StreamRefreshesHandler.Companion.connectionStreamsToStreamDescriptors +import io.airbyte.commons.server.handlers.StreamRefreshesHandler.Companion.streamDescriptorsToStreamRefreshes +import io.airbyte.commons.server.scheduler.EventRunner +import io.airbyte.config.StandardWorkspace +import io.airbyte.config.persistence.StreamRefreshesRepository +import io.airbyte.config.persistence.domain.StreamRefresh +import io.airbyte.data.services.ConnectionService +import io.airbyte.data.services.WorkspaceService +import io.airbyte.featureflag.ActivateRefreshes +import io.airbyte.featureflag.Connection +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.Multi +import io.airbyte.featureflag.Workspace +import io.airbyte.protocol.models.StreamDescriptor +import io.mockk.called +import io.mockk.clearAllMocks +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import io.mockk.verifyOrder +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.util.UUID + +internal class StreamRefreshesHandlerTest { + private val connectionService: ConnectionService = mockk() + private val streamRefreshesRepository: StreamRefreshesRepository = mockk() + private val eventRunner: EventRunner = mockk() + private val workspaceService: WorkspaceService = mockk() + private val featureFlagClient: FeatureFlagClient = mockk() + + private val streamRefreshesHandler = + StreamRefreshesHandler( + connectionService, + streamRefreshesRepository, + eventRunner, + workspaceService, + featureFlagClient, + ) + + private val workspaceId = UUID.randomUUID() + private val connectionId = UUID.randomUUID() + private val ffContext = + Multi( + listOf( + Workspace(workspaceId), + Connection(connectionId), + ), + ) + private val connectionStream = + listOf( + ConnectionStream().streamName("name1").streamNamespace("namespace1"), + ConnectionStream().streamName("name2"), + ) + private val streamDescriptors = + listOf( + StreamDescriptor().withName("name1").withNamespace("namespace1"), + StreamDescriptor().withName("name2"), + ) + + @BeforeEach + fun reset() { + clearAllMocks() + every { + workspaceService.getStandardWorkspaceFromConnection(connectionId, false) + } returns StandardWorkspace().withWorkspaceId(workspaceId) + } + + @Test + fun `test that nothing is submitted if the flag is disabled`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, ffContext) } returns false + + val result = streamRefreshesHandler.createRefreshesForConnection(connectionId, listOf()) + + assertFalse(result) + + verify { + listOf( + streamRefreshesRepository.saveAll(any>()), + eventRunner.startNewManualSync(connectionId), + ) wasNot called + } + } + + @Test + fun `test that the refreshes entries are properly created`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, ffContext) } returns true + every { streamRefreshesRepository.saveAll(any>()) } returns listOf() + every { eventRunner.startNewManualSync(connectionId) } returns null + + val result = streamRefreshesHandler.createRefreshesForConnection(connectionId, connectionStream) + + assertTrue(result) + + verifyOrder { + streamRefreshesRepository.saveAll(any>()) + eventRunner.startNewManualSync(connectionId) + } + } + + @Test + fun `test that the refreshes entries are properly created for all the streams if the provided list is empty`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, ffContext) } returns true + every { streamRefreshesRepository.saveAll(any>()) } returns listOf() + every { eventRunner.startNewManualSync(connectionId) } returns null + every { connectionService.getAllStreamsForConnection(connectionId) } returns streamDescriptors + + val result = streamRefreshesHandler.createRefreshesForConnection(connectionId, listOf()) + + assertTrue(result) + + verifyOrder { + streamRefreshesRepository.saveAll(any>()) + eventRunner.startNewManualSync(connectionId) + } + } + + @Test + fun `test the conversion from connection stream to stream descriptors`() { + val result = connectionStreamsToStreamDescriptors(connectionStream) + + assertEquals(streamDescriptors, result) + } + + @Test + fun `test the conversion from stream descriptors to stream refreshes`() { + val expected = + listOf( + StreamRefresh(connectionId = connectionId, streamName = "name1", streamNamespace = "namespace1"), + StreamRefresh(connectionId = connectionId, streamName = "name2", streamNamespace = null), + ) + + val result = streamDescriptorsToStreamRefreshes(connectionId, streamDescriptors) + + assertEquals(2, result.size) + result.stream().forEach({ + assertEquals(connectionId, it.connectionId) + if (it.streamNamespace == null) { + assertEquals("name2", it.streamName) + } else if (it.streamNamespace == "namespace1") { + assertEquals("name1", it.streamName) + } else { + throw RuntimeException("Unexpected streamNamespace {${it.streamNamespace}}") + } + }) + } + + @Test + fun `test delete`() { + val connectionId: UUID = UUID.randomUUID() + every { streamRefreshesRepository.deleteByConnectionId(connectionId) }.returns(Unit) + streamRefreshesHandler.deleteRefreshesForConnection(connectionId) + verify { streamRefreshesRepository.deleteByConnectionId(connectionId) } + } +} diff --git a/airbyte-commons-temporal-core/build.gradle.kts b/airbyte-commons-temporal-core/build.gradle.kts index 1ba09a2244d..c2bcd6a3ca5 100644 --- a/airbyte-commons-temporal-core/build.gradle.kts +++ b/airbyte-commons-temporal-core/build.gradle.kts @@ -1,21 +1,21 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - kotlin("jvm") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + kotlin("jvm") } dependencies { - implementation(libs.bundles.temporal) - implementation(libs.failsafe) + implementation(libs.bundles.temporal) + implementation(libs.failsafe) - // We do not want dependency on(databases from this library.) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-metrics:metrics-lib")) + // We do not want dependency on(databases from this library.) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-metrics:metrics-lib")) - testImplementation(libs.assertj.core) - testImplementation(libs.bundles.junit) - testImplementation(libs.junit.pioneer) - testImplementation(libs.mockito.inline) - testImplementation(libs.temporal.testing) - testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.assertj.core) + testImplementation(libs.bundles.junit) + testImplementation(libs.junit.pioneer) + testImplementation(libs.mockito.inline) + testImplementation(libs.temporal.testing) + testRuntimeOnly(libs.junit.jupiter.engine) } diff --git a/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/utils/ActivityFailureClassifier.kt b/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/utils/ActivityFailureClassifier.kt new file mode 100644 index 00000000000..cfcc55ca9bc --- /dev/null +++ b/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/utils/ActivityFailureClassifier.kt @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.temporal.utils + +import io.temporal.api.enums.v1.TimeoutType +import io.temporal.failure.ActivityFailure +import io.temporal.failure.TimeoutFailure + +object ActivityFailureClassifier { + @JvmStatic + fun classifyException(e: Exception): TemporalFailureReason = + when (e) { + is ActivityFailure -> + when (e.cause) { + is TimeoutFailure -> + when ((e.cause as TimeoutFailure).timeoutType) { + // ScheduleToClose or StartToClose happen when the activity runs longer than the configured timeout. + // This is most likely an issue with the computation itself more than the infra. + TimeoutType.TIMEOUT_TYPE_SCHEDULE_TO_CLOSE, TimeoutType.TIMEOUT_TYPE_START_TO_CLOSE -> TemporalFailureReason.OPERATION_TIMEOUT + + // This is because we failed our background heartbeat. + // Either the app in charge of heartbeat disappeared or got stuck. + TimeoutType.TIMEOUT_TYPE_HEARTBEAT -> TemporalFailureReason.HEARTBEAT + + // We consider the rest as infra issue, we were most likely not able to start the task within the allocated time. + // Here is most likely TimeoutType.TIMEOUT_TYPE_SCHEDULE_TO_START or TimeoutType.UNRECOGNIZED + else -> TemporalFailureReason.SCHEDULER_OVERLOADED + } + + // This is a temporal error unrelated to a timeout. We do not have a more precised classification at the moment. + else -> TemporalFailureReason.NOT_A_TIMEOUT + } + + // This isn't an ActivityFailure exception, should be classified outside of this method + else -> TemporalFailureReason.UNKNOWN + } + + enum class TemporalFailureReason { + UNKNOWN, + NOT_A_TIMEOUT, + SCHEDULER_OVERLOADED, + HEARTBEAT, + OPERATION_TIMEOUT, + } +} diff --git a/airbyte-commons-temporal/build.gradle.kts b/airbyte-commons-temporal/build.gradle.kts index 3b3d55857c7..421fe0e64e0 100644 --- a/airbyte-commons-temporal/build.gradle.kts +++ b/airbyte-commons-temporal/build.gradle.kts @@ -1,42 +1,42 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.temporal) - implementation(libs.bundles.apache) - implementation(libs.failsafe) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.temporal) + implementation(libs.bundles.apache) + implementation(libs.failsafe) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-temporal-core")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-notification")) - implementation(project(":airbyte-persistence:job-persistence")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-worker-models")) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-temporal-core")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-notification")) + implementation(project(":airbyte-persistence:job-persistence")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-worker-models")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-json-validation")) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.temporal.testing) - // Needed to be able to mock final class) - testImplementation(libs.mockito.inline) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) + testImplementation(libs.temporal.testing) + // Needed to be able to mock final class) + testImplementation(libs.mockito.inline) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.junit.pioneer) } diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java index 78d60f76ca1..79e8886af28 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java @@ -77,27 +77,6 @@ public ConnectionManagerWorkflow signalWorkflowAndRepairIfNecessary(final UUID c return signalWorkflowAndRepairIfNecessary(connectionId, signalMethod, Optional.empty()); } - /** - * Attempts to send a signal to the existing ConnectionManagerWorkflow for the provided connection. - * - * If the workflow is unreachable, this will restart the workflow and send the signal in a single - * batched request. Batching is used to avoid race conditions between starting the workflow and - * executing the signal. - * - * @param connectionId the connection ID to execute this operation for - * @param signalMethod a function that takes in a connection manager workflow and executes a signal - * method on it, with 1 argument - * @param signalArgument the single argument to be input to the signal - * @return the healthy connection manager workflow that was signaled - * @throws DeletedWorkflowException if the connection manager workflow was deleted - */ - public ConnectionManagerWorkflow signalWorkflowAndRepairIfNecessary(final UUID connectionId, - final Function> signalMethod, - final T signalArgument) - throws DeletedWorkflowException { - return signalWorkflowAndRepairIfNecessary(connectionId, signalMethod, Optional.of(signalArgument)); - } - // This method unifies the logic of the above two, by using the optional signalArgument parameter to // indicate if an argument is being provided to the signal or not. // Keeping this private and only exposing the above methods outside this class provides a strict diff --git a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/ConnectionManageUtilsTest.java b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/ConnectionManageUtilsTest.java new file mode 100644 index 00000000000..2e2ffdf8e00 --- /dev/null +++ b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/ConnectionManageUtilsTest.java @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.temporal; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import io.airbyte.commons.temporal.exception.DeletedWorkflowException; +import io.airbyte.commons.temporal.scheduling.ConnectionManagerWorkflow; +import io.airbyte.metrics.lib.MetricClient; +import io.temporal.client.BatchRequest; +import io.temporal.client.WorkflowOptions; +import java.util.UUID; +import org.junit.jupiter.api.Test; + +public class ConnectionManageUtilsTest { + + @Test + void signalAndRepairIfNeceesaryWhenNoWorkflowWillCreate() throws DeletedWorkflowException { + final var mWorkflow = mock(WorkflowClientWrapped.class); + final var mMetric = mock(MetricClient.class); + final var cid = UUID.randomUUID(); + + when(mWorkflow.newWorkflowStub(any(), any(WorkflowOptions.class))) + .thenReturn(mock(ConnectionManagerWorkflow.class)); + when(mWorkflow.newSignalWithStartRequest()).thenReturn(mock(BatchRequest.class)); + + final var utils = new ConnectionManagerUtils(mWorkflow, mMetric); + utils.signalWorkflowAndRepairIfNecessary(cid, (workflow) -> null); + // Because we do not mock the getConnectionManagerWorkflow call, the underlying call throws an + // exception + // and the logic recreates it. + verify(mWorkflow).signalWithStart(any()); + } + +} diff --git a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java index 97804c368e7..70b1b1e9f50 100644 --- a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java +++ b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java @@ -426,8 +426,9 @@ void testStartNewManualSyncAlreadyRunning() { @DisplayName("Test startNewManualSync repairs the workflow if it is in a bad state") void testStartNewManualSyncRepairsBadWorkflowState() { final ConnectionManagerWorkflow mTerminatedConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - when(mTerminatedConnectionManagerWorkflow.getState()) - .thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); + + // This simulates a workflow that is in a bad state. + when(mTerminatedConnectionManagerWorkflow.getState()).thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); when(mTerminatedConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); @@ -631,8 +632,9 @@ void testResetConnectionSuccessAndContinue() throws IOException { @DisplayName("Test resetConnection repairs the workflow if it is in a bad state") void testResetConnectionRepairsBadWorkflowState() throws IOException { final ConnectionManagerWorkflow mTerminatedConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - when(mTerminatedConnectionManagerWorkflow.getState()) - .thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); + + // This simulates a workflow that is in a bad state. + when(mTerminatedConnectionManagerWorkflow.getState()).thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); when(mTerminatedConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); diff --git a/airbyte-commons-with-dependencies/build.gradle.kts b/airbyte-commons-with-dependencies/build.gradle.kts index 8e1f72bf6ea..b10042f6fdb 100644 --- a/airbyte-commons-with-dependencies/build.gradle.kts +++ b/airbyte-commons-with-dependencies/build.gradle.kts @@ -1,21 +1,21 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-config:config-models")) - implementation(libs.guava) + implementation(libs.guava) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.mockito.core) - testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockito.core) + testImplementation(libs.bundles.micronaut.test) } diff --git a/airbyte-commons-worker/build.gradle.kts b/airbyte-commons-worker/build.gradle.kts index bea90cf1601..871e1a1a6eb 100644 --- a/airbyte-commons-worker/build.gradle.kts +++ b/airbyte-commons-worker/build.gradle.kts @@ -1,112 +1,112 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } configurations.all { - resolutionStrategy { - force(libs.platform.testcontainers.postgresql) - } + resolutionStrategy { + force(libs.platform.testcontainers.postgresql) + } } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - kapt(libs.bundles.micronaut.annotation.processor) + kapt(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.micronaut.metrics) - implementation(libs.micronaut.http) - implementation(libs.kotlin.logging) - implementation(libs.bundles.micronaut.kotlin) - implementation(libs.micronaut.jackson.databind) - implementation(libs.bundles.kubernetes.client) - implementation(libs.java.jwt) - implementation(libs.gson) - implementation(libs.guava) - implementation(libs.temporal.sdk) { - exclude(module = "guava") - } - implementation(libs.apache.ant) - implementation(libs.apache.commons.text) - implementation(libs.bundles.datadog) - implementation(libs.commons.io) - implementation(libs.bundles.apache) - implementation(libs.bundles.log4j) - implementation(libs.failsafe.okhttp) - implementation(libs.google.cloud.storage) - implementation(libs.okhttp) - implementation(libs.aws.java.sdk.s3) - implementation(libs.aws.java.sdk.sts) - implementation(libs.s3) - implementation(libs.sts) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.micronaut.metrics) + implementation(libs.micronaut.http) + implementation(libs.kotlin.logging) + implementation(libs.bundles.micronaut.kotlin) + implementation(libs.micronaut.jackson.databind) + implementation(libs.bundles.kubernetes.client) + implementation(libs.java.jwt) + implementation(libs.gson) + implementation(libs.guava) + implementation(libs.temporal.sdk) { + exclude(module = "guava") + } + implementation(libs.apache.ant) + implementation(libs.apache.commons.text) + implementation(libs.bundles.datadog) + implementation(libs.commons.io) + implementation(libs.bundles.apache) + implementation(libs.bundles.log4j) + implementation(libs.failsafe.okhttp) + implementation(libs.google.cloud.storage) + implementation(libs.okhttp) + implementation(libs.aws.java.sdk.s3) + implementation(libs.aws.java.sdk.sts) + implementation(libs.s3) + implementation(libs.sts) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-analytics")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-converters")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-commons-temporal-core")) - implementation(project(":airbyte-commons-with-dependencies")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-config:config-secrets")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-persistence:job-persistence")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-worker-models")) - implementation(libs.jakarta.validation.api) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-analytics")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-converters")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-commons-temporal-core")) + implementation(project(":airbyte-commons-with-dependencies")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-config:config-secrets")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-persistence:job-persistence")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-worker-models")) + implementation(libs.jakarta.validation.api) - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testAnnotationProcessor(libs.jmh.annotations) + testCompileOnly(libs.lombok) + testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(libs.jmh.annotations) - kaptTest(platform(libs.micronaut.platform)) - kaptTest(libs.bundles.micronaut.annotation.processor) - kaptTest(libs.bundles.micronaut.test.annotation.processor) + kaptTest(platform(libs.micronaut.platform)) + kaptTest(libs.bundles.micronaut.annotation.processor) + kaptTest(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockk) - testImplementation(libs.json.path) - testImplementation(libs.bundles.mockito.inline) - testImplementation(libs.mockk) - testImplementation(variantOf(libs.opentracing.util) { classifier("tests") }) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.jmh.core) - testImplementation(libs.jmh.annotations) - testImplementation(libs.docker.java) - testImplementation(libs.docker.java.transport.httpclient5) - testImplementation(libs.reactor.test) - testImplementation(libs.mockk) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - testImplementation(libs.mockk) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockk) + testImplementation(libs.json.path) + testImplementation(libs.bundles.mockito.inline) + testImplementation(libs.mockk) + testImplementation(variantOf(libs.opentracing.util) { classifier("tests") }) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.jmh.core) + testImplementation(libs.jmh.annotations) + testImplementation(libs.docker.java) + testImplementation(libs.docker.java.transport.httpclient5) + testImplementation(libs.reactor.test) + testImplementation(libs.mockk) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + testImplementation(libs.mockk) - testRuntimeOnly(libs.junit.jupiter.engine) - testRuntimeOnly(libs.javax.databind) + testRuntimeOnly(libs.junit.jupiter.engine) + testRuntimeOnly(libs.javax.databind) } tasks.named("test") { - maxHeapSize = "10g" + maxHeapSize = "10g" - useJUnitPlatform { - excludeTags("cloud-storage") - } + useJUnitPlatform { + excludeTags("cloud-storage") + } } // The DuplicatesStrategy will be required while this module is mixture of kotlin and java _with_ lombok dependencies.) @@ -115,5 +115,5 @@ tasks.named("test") { // keepJavacAnnotationProcessors enabled, which causes duplicate META-INF files to be generated.) // Once lombok has been removed, this can also be removed.) tasks.withType().configureEach { - duplicatesStrategy = DuplicatesStrategy.EXCLUDE + duplicatesStrategy = DuplicatesStrategy.EXCLUDE } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/BufferedReplicationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/BufferedReplicationWorker.java index 5b5547bbcdd..1531ca8a6f0 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/BufferedReplicationWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/BufferedReplicationWorker.java @@ -23,10 +23,12 @@ import io.airbyte.persistence.job.models.ReplicationInput; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.protocol.models.AirbyteTraceMessage; import io.airbyte.workers.RecordSchemaValidator; import io.airbyte.workers.context.ReplicationContext; import io.airbyte.workers.context.ReplicationFeatureFlags; import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteSource; import io.airbyte.workers.internal.DestinationTimeoutMonitor; @@ -35,9 +37,11 @@ import io.airbyte.workers.internal.exception.SourceException; import io.airbyte.workers.internal.syncpersistence.SyncPersistence; import java.nio.file.Path; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.OptionalInt; +import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.concurrent.ExecutionException; @@ -80,6 +84,7 @@ public class BufferedReplicationWorker implements ReplicationWorker { private final Stopwatch writeToDestStopwatch; private final Stopwatch readFromDestStopwatch; private final Stopwatch processFromDestStopwatch; + private final StreamStatusCompletionTracker streamStatusCompletionTracker; private static final int sourceMaxBufferSize = 1000; private static final int destinationMaxBufferSize = 1000; @@ -96,9 +101,10 @@ public BufferedReplicationWorker(final String jobId, final ReplicationFeatureFlagReader replicationFeatureFlagReader, final ReplicationWorkerHelper replicationWorkerHelper, final DestinationTimeoutMonitor destinationTimeoutMonitor, - final BufferedReplicationWorkerType bufferedReplicationWorkerType) { + final BufferedReplicationWorkerType bufferedReplicationWorkerType, + final StreamStatusCompletionTracker streamStatusCompletionTracker) { this(jobId, attempt, source, destination, syncPersistence, recordSchemaValidator, srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, - replicationWorkerHelper, destinationTimeoutMonitor, bufferedReplicationWorkerType, OptionalInt.empty()); + replicationWorkerHelper, destinationTimeoutMonitor, bufferedReplicationWorkerType, OptionalInt.empty(), streamStatusCompletionTracker); } public BufferedReplicationWorker(final String jobId, @@ -112,7 +118,8 @@ public BufferedReplicationWorker(final String jobId, final ReplicationWorkerHelper replicationWorkerHelper, final DestinationTimeoutMonitor destinationTimeoutMonitor, final BufferedReplicationWorkerType bufferedReplicationWorkerType, - final OptionalInt pollTimeOutDurationForQueue) { + final OptionalInt pollTimeOutDurationForQueue, + final StreamStatusCompletionTracker streamStatusCompletionTracker) { this.jobId = jobId; this.attempt = attempt; this.source = source; @@ -140,6 +147,7 @@ public BufferedReplicationWorker(final String jobId, this.writeToDestStopwatch = new Stopwatch(); this.readFromDestStopwatch = new Stopwatch(); this.processFromDestStopwatch = new Stopwatch(); + this.streamStatusCompletionTracker = streamStatusCompletionTracker; } @Trace(operationName = WORKER_OPERATION_NAME) @@ -152,7 +160,7 @@ public ReplicationOutput run(final ReplicationInput replicationInput, final Path try { final ReplicationContext replicationContext = getReplicationContext(replicationInput); final ReplicationFeatureFlags flags = replicationFeatureFlagReader.readReplicationFeatureFlags(); - replicationWorkerHelper.initialize(replicationContext, flags, jobRoot); + replicationWorkerHelper.initialize(replicationContext, flags, jobRoot, replicationInput.getCatalog()); final CloseableWithTimeout destinationWithCloseTimeout = new CloseableWithTimeout(destination, mdc, flags); // note: resources are closed in the opposite order in which they are declared. thus source will be @@ -278,10 +286,13 @@ private void trackFailures(final V value, final Throwable t) { } private ReplicationContext getReplicationContext(final ReplicationInput replicationInput) { + + final UUID sourceDefinitionId = replicationWorkerHelper.getSourceDefinitionIdForSourceId(replicationInput.getSourceId()); + final UUID destinationDefinitionId = replicationWorkerHelper.getDestinationDefinitionIdForDestinationId(replicationInput.getDestinationId()); return new ReplicationContext(replicationInput.getIsReset(), replicationInput.getConnectionId(), replicationInput.getSourceId(), replicationInput.getDestinationId(), Long.parseLong(jobId), attempt, replicationInput.getWorkspaceId(), replicationInput.getSourceLauncherConfig().getDockerImage(), - replicationInput.getDestinationLauncherConfig().getDockerImage()); + replicationInput.getDestinationLauncherConfig().getDockerImage(), sourceDefinitionId, destinationDefinitionId); } @Override @@ -345,7 +356,12 @@ private void readFromSource() { while (!replicationWorkerHelper.getShouldAbort() && !(sourceIsFinished = sourceIsFinished()) && !messagesFromSourceQueue.isClosed()) { final Optional messageOptional = source.attemptRead(); if (messageOptional.isPresent()) { - while (!replicationWorkerHelper.getShouldAbort() && !messagesFromSourceQueue.add(messageOptional.get()) + final AirbyteMessage message = messageOptional.get(); + if (message.getType() == Type.TRACE && message.getTrace().getType() == AirbyteTraceMessage.Type.STREAM_STATUS) { + streamStatusCompletionTracker.track(message.getTrace().getStreamStatus()); + } + + while (!replicationWorkerHelper.getShouldAbort() && !messagesFromSourceQueue.add(message) && !messagesFromSourceQueue.isClosed()) { Thread.sleep(100); } @@ -434,6 +450,11 @@ private void writeToDestination() { destination.accept(message); } } + + final List statusMessageToSend = replicationWorkerHelper.getStreamStatusToSend(source.getExitValue()); + for (AirbyteMessage airbyteMessage : statusMessageToSend) { + destination.accept(airbyteMessage); + } } finally { destination.notifyEndOfInput(); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java index ac18c0a58f0..6357a29666d 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java @@ -18,10 +18,12 @@ import io.airbyte.persistence.job.models.ReplicationInput; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.protocol.models.AirbyteTraceMessage; import io.airbyte.workers.RecordSchemaValidator; import io.airbyte.workers.context.ReplicationContext; import io.airbyte.workers.context.ReplicationFeatureFlags; import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteSource; import io.airbyte.workers.internal.DestinationTimeoutMonitor; @@ -30,6 +32,7 @@ import io.airbyte.workers.internal.exception.SourceException; import io.airbyte.workers.internal.syncpersistence.SyncPersistence; import java.nio.file.Path; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; @@ -77,6 +80,7 @@ public class DefaultReplicationWorker implements ReplicationWorker { private final RecordSchemaValidator recordSchemaValidator; private final HeartbeatTimeoutChaperone srcHeartbeatTimeoutChaperone; private final ReplicationFeatureFlagReader replicationFeatureFlagReader; + private final StreamStatusCompletionTracker streamStatusCompletionTracker; private static final int executorShutdownGracePeriodInSeconds = 10; @@ -89,7 +93,8 @@ public DefaultReplicationWorker(final String jobId, final HeartbeatTimeoutChaperone srcHeartbeatTimeoutChaperone, final ReplicationFeatureFlagReader replicationFeatureFlagReader, final ReplicationWorkerHelper replicationWorkerHelper, - final DestinationTimeoutMonitor destinationTimeoutMonitor) { + final DestinationTimeoutMonitor destinationTimeoutMonitor, + final StreamStatusCompletionTracker streamStatusCompletionTracker) { this.jobId = jobId; this.attempt = attempt; this.destinationTimeoutMonitor = destinationTimeoutMonitor; @@ -102,6 +107,7 @@ public DefaultReplicationWorker(final String jobId, this.executors = Executors.newFixedThreadPool(5); this.recordSchemaValidator = recordSchemaValidator; this.srcHeartbeatTimeoutChaperone = srcHeartbeatTimeoutChaperone; + this.streamStatusCompletionTracker = streamStatusCompletionTracker; this.replicationFeatureFlagReader = replicationFeatureFlagReader; this.hasFailed = new AtomicBoolean(false); @@ -125,7 +131,6 @@ public final ReplicationOutput run(final ReplicationInput replicationInput, fina LOGGER.info("start sync worker. job id: {} attempt id: {}", jobId, attempt); LineGobbler.startSection("REPLICATION"); - try { LOGGER.info("configured sync modes: {}", replicationInput.getCatalog().getStreams() .stream() @@ -135,11 +140,12 @@ public final ReplicationOutput run(final ReplicationInput replicationInput, fina new ReplicationContext(replicationInput.getIsReset(), replicationInput.getConnectionId(), replicationInput.getSourceId(), replicationInput.getDestinationId(), Long.parseLong(jobId), attempt, replicationInput.getWorkspaceId(), replicationInput.getSourceLauncherConfig().getDockerImage(), - replicationInput.getDestinationLauncherConfig().getDockerImage()); + replicationInput.getDestinationLauncherConfig().getDockerImage(), + replicationWorkerHelper.getSourceDefinitionIdForSourceId(replicationInput.getSourceId()), + replicationWorkerHelper.getDestinationDefinitionIdForDestinationId(replicationInput.getDestinationId())); final ReplicationFeatureFlags flags = replicationFeatureFlagReader.readReplicationFeatureFlags(); - replicationWorkerHelper.initialize(replicationContext, flags, jobRoot); - + replicationWorkerHelper.initialize(replicationContext, flags, jobRoot, replicationInput.getCatalog()); replicate(jobRoot, replicationInput, flags); return replicationWorkerHelper.getReplicationOutput(); @@ -186,7 +192,8 @@ private void replicate(final Path jobRoot, source, destination, replicationWorkerHelper, - mdc), executors) + mdc, + streamStatusCompletionTracker), executors) .whenComplete((msg, ex) -> { if (ex != null) { ApmTraceUtils.addExceptionToTrace(ex); @@ -330,7 +337,8 @@ private static Runnable readFromDstRunnable(final AirbyteDestination destination private static Runnable readFromSrcAndWriteToDstRunnable(final AirbyteSource source, final AirbyteDestination destination, final ReplicationWorkerHelper replicationWorkerHelper, - final Map mdc) { + final Map mdc, + final StreamStatusCompletionTracker streamStatusCompletionTracker) { return () -> { MDC.setContextMap(mdc); LOGGER.info("Replication thread started."); @@ -346,6 +354,9 @@ private static Runnable readFromSrcAndWriteToDstRunnable(final AirbyteSource sou if (messageOptional.isPresent()) { final AirbyteMessage airbyteMessage = messageOptional.get(); + if (airbyteMessage.getType() == Type.TRACE && airbyteMessage.getTrace().getType() == AirbyteTraceMessage.Type.STREAM_STATUS) { + streamStatusCompletionTracker.track(airbyteMessage.getTrace().getStreamStatus()); + } final Optional processedAirbyteMessage = replicationWorkerHelper.processMessageFromSource(airbyteMessage); @@ -373,6 +384,12 @@ private static Runnable readFromSrcAndWriteToDstRunnable(final AirbyteSource sou } replicationWorkerHelper.endOfSource(); + List statusMessageToSend = replicationWorkerHelper.getStreamStatusToSend(source.getExitValue()); + + for (AirbyteMessage airbyteMessage : statusMessageToSend) { + destination.accept(airbyteMessage); + } + try { destination.notifyEndOfInput(); } catch (final Exception e) { diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorkerFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorkerFactory.java index e62b0306fcb..8d09ef48652 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorkerFactory.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorkerFactory.java @@ -9,6 +9,8 @@ import io.airbyte.analytics.TrackingClient; import io.airbyte.api.client.AirbyteApiClient; +import io.airbyte.api.client.WorkloadApiClient; +import io.airbyte.api.client.generated.DestinationApi; import io.airbyte.api.client.generated.SourceApi; import io.airbyte.api.client.generated.SourceDefinitionApi; import io.airbyte.api.client.invoker.generated.ApiException; @@ -43,6 +45,7 @@ import io.airbyte.workers.WorkerMetricReporter; import io.airbyte.workers.WorkerUtils; import io.airbyte.workers.helper.AirbyteMessageDataExtractor; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteMapper; import io.airbyte.workers.internal.AirbyteSource; @@ -58,10 +61,10 @@ import io.airbyte.workers.internal.syncpersistence.SyncPersistence; import io.airbyte.workers.internal.syncpersistence.SyncPersistenceFactory; import io.airbyte.workers.process.AirbyteIntegrationLauncherFactory; -import io.airbyte.workload.api.client.generated.WorkloadApi; import io.micronaut.context.annotation.Value; import io.micronaut.core.util.CollectionUtils; import jakarta.inject.Singleton; +import java.time.Clock; import java.time.Duration; import java.util.ArrayList; import java.util.List; @@ -92,8 +95,11 @@ public class ReplicationWorkerFactory { private final MetricClient metricClient; private final ReplicationAirbyteMessageEventPublishingHelper replicationAirbyteMessageEventPublishingHelper; private final TrackingClient trackingClient; - private final WorkloadApi workloadApi; + private final WorkloadApiClient workloadApiClient; private final boolean workloadEnabled; + private final DestinationApi destinationApi; + private final StreamStatusCompletionTracker streamStatusCompletionTracker; + private final Clock clock; public ReplicationWorkerFactory( final AirbyteIntegrationLauncherFactory airbyteIntegrationLauncherFactory, @@ -105,9 +111,12 @@ public ReplicationWorkerFactory( final FeatureFlags featureFlags, final ReplicationAirbyteMessageEventPublishingHelper replicationAirbyteMessageEventPublishingHelper, final MetricClient metricClient, - final WorkloadApi workloadApi, + final WorkloadApiClient workloadApiClient, final TrackingClient trackingClient, - @Value("${airbyte.workload.enabled}") final boolean workloadEnabled) { + @Value("${airbyte.workload.enabled}") final boolean workloadEnabled, + final DestinationApi destinationApi, + final StreamStatusCompletionTracker streamStatusCompletionTracker, + final Clock clock) { this.airbyteIntegrationLauncherFactory = airbyteIntegrationLauncherFactory; this.sourceApi = sourceApi; this.sourceDefinitionApi = sourceDefinitionApi; @@ -118,9 +127,12 @@ public ReplicationWorkerFactory( this.featureFlagClient = featureFlagClient; this.featureFlags = featureFlags; this.metricClient = metricClient; - this.workloadApi = workloadApi; + this.workloadApiClient = workloadApiClient; this.workloadEnabled = workloadEnabled; this.trackingClient = trackingClient; + this.destinationApi = destinationApi; + this.streamStatusCompletionTracker = streamStatusCompletionTracker; + this.clock = clock; } /** @@ -171,8 +183,8 @@ public ReplicationWorker create(final ReplicationInput replicationInput, return createReplicationWorker(airbyteSource, airbyteDestination, messageTracker, syncPersistence, recordSchemaValidator, fieldSelector, heartbeatTimeoutChaperone, featureFlagClient, jobRunConfig, replicationInput, airbyteMessageDataExtractor, replicationAirbyteMessageEventPublishingHelper, - onReplicationRunning, metricClient, destinationTimeout, workloadApi, workloadEnabled, analyticsMessageTracker, - workloadId); + onReplicationRunning, metricClient, destinationTimeout, workloadApiClient, workloadEnabled, analyticsMessageTracker, + workloadId, sourceApi, destinationApi, streamStatusCompletionTracker, clock); } /** @@ -311,10 +323,14 @@ private static ReplicationWorker createReplicationWorker(final AirbyteSource sou final VoidCallable onReplicationRunning, final MetricClient metricClient, final DestinationTimeoutMonitor destinationTimeout, - final WorkloadApi workloadApi, + final WorkloadApiClient workloadApiClient, final boolean workloadEnabled, final AnalyticsMessageTracker analyticsMessageTracker, - final Optional workloadId) { + final Optional workloadId, + final SourceApi sourceApi, + final DestinationApi destinationApi, + final StreamStatusCompletionTracker streamStatusCompletionTracker, + final Clock clock) { final Context flagContext = getFeatureFlagContext(replicationInput); final String workerImpl = featureFlagClient.stringVariation(ReplicationWorkerImpl.INSTANCE, flagContext); return buildReplicationWorkerInstance( @@ -338,10 +354,15 @@ private static ReplicationWorker createReplicationWorker(final AirbyteSource sou onReplicationRunning, metricClient, destinationTimeout, - workloadApi, + workloadApiClient, workloadEnabled, analyticsMessageTracker, - workloadId); + workloadId, + featureFlagClient, + sourceApi, + destinationApi, + streamStatusCompletionTracker, + clock); } private static Context getFeatureFlagContext(final ReplicationInput replicationInput) { @@ -383,25 +404,31 @@ private static ReplicationWorker buildReplicationWorkerInstance(final String wor final VoidCallable onReplicationRunning, final MetricClient metricClient, final DestinationTimeoutMonitor destinationTimeout, - final WorkloadApi workloadApi, + final WorkloadApiClient workloadApiClient, final boolean workloadEnabled, final AnalyticsMessageTracker analyticsMessageTracker, - final Optional workloadId) { + final Optional workloadId, + final FeatureFlagClient featureFlagClient, + final SourceApi sourceApi, + final DestinationApi destinationApi, + final StreamStatusCompletionTracker streamStatusCompletionTracker, + final Clock clock) { final ReplicationWorkerHelper replicationWorkerHelper = new ReplicationWorkerHelper(airbyteMessageDataExtractor, fieldSelector, mapper, messageTracker, syncPersistence, - messageEventPublishingHelper, new ThreadedTimeTracker(), onReplicationRunning, workloadApi, - workloadEnabled, analyticsMessageTracker, workloadId); + messageEventPublishingHelper, new ThreadedTimeTracker(), onReplicationRunning, workloadApiClient, + workloadEnabled, analyticsMessageTracker, workloadId, sourceApi, destinationApi, streamStatusCompletionTracker); final Optional bufferedReplicationWorkerType = bufferedReplicationWorkerType(workerImpl); if (bufferedReplicationWorkerType.isPresent()) { metricClient.count(OssMetricsRegistry.REPLICATION_WORKER_CREATED, 1, new MetricAttribute(MetricTags.IMPLEMENTATION, workerImpl)); return new BufferedReplicationWorker(jobId, attempt, source, destination, syncPersistence, recordSchemaValidator, srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, destinationTimeout, - bufferedReplicationWorkerType.get()); + bufferedReplicationWorkerType.get(), streamStatusCompletionTracker); } else { metricClient.count(OssMetricsRegistry.REPLICATION_WORKER_CREATED, 1, new MetricAttribute(MetricTags.IMPLEMENTATION, "default")); return new DefaultReplicationWorker(jobId, attempt, source, destination, syncPersistence, recordSchemaValidator, - srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, destinationTimeout); + srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, destinationTimeout, + new StreamStatusCompletionTracker(featureFlagClient, clock)); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java index e10e4c94dfa..ec8f8f6cd56 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java @@ -75,9 +75,7 @@ public DefaultAirbyteDestination(final IntegrationLauncher integrationLauncher, VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory( LOGGER, CONTAINER_LOG_MDC_BUILDER, - Optional.empty(), - Runtime.getRuntime().maxMemory(), - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), new GsonPksExtractor()), new DefaultAirbyteMessageBufferedWriterFactory(), new DefaultProtocolSerializer(), diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactory.java index 9ac94943d96..0883cdc1e13 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactory.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactory.java @@ -31,16 +31,12 @@ import io.airbyte.workers.helper.GsonPksExtractor; import java.io.BufferedReader; import java.io.IOException; -import java.lang.reflect.InvocationTargetException; import java.nio.charset.StandardCharsets; -import java.text.CharacterIterator; -import java.text.StringCharacterIterator; import java.util.List; import java.util.Optional; import java.util.UUID; import java.util.function.Predicate; import java.util.stream.Stream; -import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,13 +55,10 @@ @SuppressWarnings("PMD.MoreThanOneLogger") public class VersionedAirbyteStreamFactory implements AirbyteStreamFactory { - public record InvalidLineFailureConfiguration(boolean failTooLongRecords, boolean printLongRecordPks) {} + public record InvalidLineFailureConfiguration(boolean printLongRecordPks) {} private static final Logger DEFAULT_LOGGER = LoggerFactory.getLogger(VersionedAirbyteStreamFactory.class); - private static final double MAX_SIZE_RATIO = 0.8; - @VisibleForTesting - static final long DEFAULT_MEMORY_LIMIT = Runtime.getRuntime().maxMemory(); @VisibleForTesting static final MdcScope.Builder DEFAULT_MDC_SCOPE = MdcScope.DEFAULT_BUILDER; @@ -82,11 +75,9 @@ public record InvalidLineFailureConfiguration(boolean failTooLongRecords, boolea // BASIC PROCESSING FIELDS protected final Logger logger; - private final long maxMemory; private final Optional connectionId; private final MdcScope.Builder containerLogMdcBuilder; - private final Optional> exceptionClass; // VERSION RELATED FIELDS private final AirbyteMessageSerDeProvider serDeProvider; @@ -108,9 +99,9 @@ public record InvalidLineFailureConfiguration(boolean failTooLongRecords, boolea * @return a VersionedAirbyteStreamFactory that does not perform any migration. */ @VisibleForTesting - public static VersionedAirbyteStreamFactory noMigrationVersionedAirbyteStreamFactory(final boolean failTooLongRecords) { - return noMigrationVersionedAirbyteStreamFactory(DEFAULT_LOGGER, MdcScope.DEFAULT_BUILDER, Optional.empty(), Runtime.getRuntime().maxMemory(), - new InvalidLineFailureConfiguration(failTooLongRecords, false), new GsonPksExtractor()); + public static VersionedAirbyteStreamFactory noMigrationVersionedAirbyteStreamFactory() { + return noMigrationVersionedAirbyteStreamFactory(DEFAULT_LOGGER, MdcScope.DEFAULT_BUILDER, + new InvalidLineFailureConfiguration(false), new GsonPksExtractor()); } /** @@ -121,8 +112,6 @@ public static VersionedAirbyteStreamFactory noMigrationVersionedAirbyteStreamFac @VisibleForTesting public static VersionedAirbyteStreamFactory noMigrationVersionedAirbyteStreamFactory(final Logger logger, final MdcScope.Builder mdcBuilder, - final Optional> clazz, - final long maxMemory, final InvalidLineFailureConfiguration conf, final GsonPksExtractor gsonPksExtractor) { final AirbyteMessageSerDeProvider provider = new AirbyteMessageSerDeProvider( @@ -138,8 +127,7 @@ public static VersionedAirbyteStreamFactory noMigrationVersionedAirbyteStreamFac new AirbyteProtocolVersionedMigratorFactory(airbyteMessageMigrator, configuredAirbyteCatalogMigrator); return new VersionedAirbyteStreamFactory<>(provider, fac, AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION, Optional.empty(), - Optional.empty(), logger, - mdcBuilder, clazz, maxMemory, conf, gsonPksExtractor); + Optional.empty(), logger, mdcBuilder, conf, gsonPksExtractor); } public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProvider, @@ -148,11 +136,10 @@ public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProv final Optional connectionId, final Optional configuredAirbyteCatalog, final MdcScope.Builder containerLogMdcBuilder, - final Optional> exceptionClass, final InvalidLineFailureConfiguration invalidLineFailureConfiguration, final GsonPksExtractor gsonPksExtractor) { this(serDeProvider, migratorFactory, protocolVersion, connectionId, configuredAirbyteCatalog, DEFAULT_LOGGER, containerLogMdcBuilder, - exceptionClass, Runtime.getRuntime().maxMemory(), invalidLineFailureConfiguration, gsonPksExtractor); + invalidLineFailureConfiguration, gsonPksExtractor); } public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProvider, @@ -160,11 +147,10 @@ public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProv final Version protocolVersion, final Optional connectionId, final Optional configuredAirbyteCatalog, - final Optional> exceptionClass, final InvalidLineFailureConfiguration invalidLineFailureConfiguration, final GsonPksExtractor gsonPksExtractor) { - this(serDeProvider, migratorFactory, protocolVersion, connectionId, configuredAirbyteCatalog, DEFAULT_LOGGER, DEFAULT_MDC_SCOPE, exceptionClass, - DEFAULT_MEMORY_LIMIT, invalidLineFailureConfiguration, gsonPksExtractor); + this(serDeProvider, migratorFactory, protocolVersion, connectionId, configuredAirbyteCatalog, DEFAULT_LOGGER, DEFAULT_MDC_SCOPE, + invalidLineFailureConfiguration, gsonPksExtractor); } public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProvider, @@ -174,15 +160,11 @@ public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProv final Optional configuredAirbyteCatalog, final Logger logger, final MdcScope.Builder containerLogMdcBuilder, - final Optional> exceptionClass, - final long maxMemory, final InvalidLineFailureConfiguration invalidLineFailureConfiguration, final GsonPksExtractor gsonPksExtractor) { // TODO AirbyteProtocolPredicate needs to be updated to be protocol version aware this.logger = logger; this.containerLogMdcBuilder = containerLogMdcBuilder; - this.exceptionClass = exceptionClass; - this.maxMemory = maxMemory; this.gsonPksExtractor = gsonPksExtractor; Preconditions.checkNotNull(protocolVersion); @@ -238,20 +220,6 @@ private Stream addLineReadLogic(final BufferedReader bufferedRea .peek(str -> { final long messageSize = str.getBytes(StandardCharsets.UTF_8).length; metricClient.distribution(OssMetricsRegistry.JSON_STRING_LENGTH, messageSize); - - if (exceptionClass.isPresent()) { - if (messageSize > maxMemory * MAX_SIZE_RATIO) { - connectionId.ifPresent(id -> metricClient.count(OssMetricsRegistry.RECORD_SIZE_ERROR, 1, - new MetricAttribute(MetricTags.CONNECTION_ID, id.toString()))); - final String errorMessage = String.format( - "Airbyte has received a message at %s UTC which is larger than %s (size: %s). " - + "The sync has been failed to prevent running out of memory.", - DateTime.now(), - humanReadableByteCountSI(maxMemory), - humanReadableByteCountSI(messageSize)); - throwExceptionClass(errorMessage); - } - } }) .flatMap(this::toAirbyteMessage) .filter(this::filterLog); @@ -341,21 +309,6 @@ protected void internalLog(final AirbyteLogMessage logMessage) { } } - // Human-readable byte size from - // https://stackoverflow.com/questions/3758606/how-can-i-convert-byte-size-into-a-human-readable-format-in-java - @SuppressWarnings("PMD.AvoidReassigningParameters") - private String humanReadableByteCountSI(long bytes) { - if (-1000 < bytes && bytes < 1000) { - return bytes + " B"; - } - final CharacterIterator ci = new StringCharacterIterator("kMGTPE"); - while (bytes <= -999_950 || bytes >= 999_950) { - bytes /= 1000; - ci.next(); - } - return String.format("%.1f %cB", bytes / 1000.0, ci.current()); - } - /** * For every incoming message, *

    @@ -441,15 +394,6 @@ private void logMalformedLogMessage(final String line) { } } - private void throwExceptionClass(final String message) { - try { - throw exceptionClass.get().getConstructor(String.class) - .newInstance(message); - } catch (final InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException ex) { - throw new RuntimeException(ex); - } - } - protected Stream upgradeMessage(final AirbyteMessage msg) { try { final AirbyteMessage message = migrator.upgrade(msg, configuredAirbyteCatalog); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncherFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncherFactory.java index e4e36e708a1..a0c344f464b 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncherFactory.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncherFactory.java @@ -11,7 +11,6 @@ import io.airbyte.commons.protocol.VersionedProtocolSerializer; import io.airbyte.config.SyncResourceRequirements; import io.airbyte.featureflag.Connection; -import io.airbyte.featureflag.FailSyncIfTooBig; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.Multi; import io.airbyte.featureflag.PrintLongRecordPks; @@ -29,8 +28,6 @@ import io.airbyte.workers.internal.HeartbeatMonitor; import io.airbyte.workers.internal.VersionedAirbyteMessageBufferedWriterFactory; import io.airbyte.workers.internal.VersionedAirbyteStreamFactory; -import io.airbyte.workers.internal.exception.DestinationException; -import io.airbyte.workers.internal.exception.SourceException; import jakarta.inject.Singleton; import java.util.Collections; import java.util.List; @@ -107,21 +104,14 @@ public AirbyteSource createAirbyteSource(final IntegrationLauncherConfig sourceL final HeartbeatMonitor heartbeatMonitor) { final IntegrationLauncher sourceLauncher = createIntegrationLauncher(sourceLauncherConfig, syncResourceRequirements); - final boolean failTooLongRecords = featureFlagClient.boolVariation(FailSyncIfTooBig.INSTANCE, - new Multi(List.of( - new Connection(sourceLauncherConfig.getConnectionId()), - new Workspace(sourceLauncherConfig.getWorkspaceId())))); - final boolean printLongRecordPks = featureFlagClient.boolVariation(PrintLongRecordPks.INSTANCE, new Multi(List.of( new Connection(sourceLauncherConfig.getConnectionId()), new Workspace(sourceLauncherConfig.getWorkspaceId())))); return new DefaultAirbyteSource(sourceLauncher, - getStreamFactory(sourceLauncherConfig, configuredAirbyteCatalog, SourceException.class, DefaultAirbyteSource.CONTAINER_LOG_MDC_BUILDER, - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration( - failTooLongRecords, - printLongRecordPks)), + getStreamFactory(sourceLauncherConfig, configuredAirbyteCatalog, DefaultAirbyteSource.CONTAINER_LOG_MDC_BUILDER, + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(printLongRecordPks)), heartbeatMonitor, getProtocolSerializer(sourceLauncherConfig), featureFlags, @@ -144,9 +134,8 @@ public AirbyteDestination createAirbyteDestination(final IntegrationLauncherConf return new DefaultAirbyteDestination(destinationLauncher, getStreamFactory(destinationLauncherConfig, configuredAirbyteCatalog, - DestinationException.class, DefaultAirbyteDestination.CONTAINER_LOG_MDC_BUILDER, - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false)), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false)), new VersionedAirbyteMessageBufferedWriterFactory(serDeProvider, migratorFactory, destinationLauncherConfig.getProtocolVersion(), Optional.of(configuredAirbyteCatalog)), getProtocolSerializer(destinationLauncherConfig), @@ -160,11 +149,10 @@ private VersionedProtocolSerializer getProtocolSerializer(final IntegrationLaunc private AirbyteStreamFactory getStreamFactory(final IntegrationLauncherConfig launcherConfig, final ConfiguredAirbyteCatalog configuredAirbyteCatalog, - final Class exceptionClass, final MdcScope.Builder mdcScopeBuilder, final VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration invalidLineFailureConfiguration) { return new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, launcherConfig.getProtocolVersion(), - Optional.of(launcherConfig.getConnectionId()), Optional.of(configuredAirbyteCatalog), mdcScopeBuilder, Optional.of(exceptionClass), + Optional.of(launcherConfig.getConnectionId()), Optional.of(configuredAirbyteCatalog), mdcScopeBuilder, invalidLineFailureConfiguration, gsonPksExtractor); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java index d0995cbb3ba..9f26047aaaa 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java @@ -7,6 +7,7 @@ import static io.airbyte.config.helpers.LogClientSingleton.fullLogPath; import io.airbyte.api.client.AirbyteApiClient; +import io.airbyte.api.client.WorkloadApiClient; import io.airbyte.api.client.invoker.generated.ApiException; import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; import io.airbyte.api.client.model.generated.Geography; @@ -30,7 +31,6 @@ import io.airbyte.workers.workload.JobOutputDocStore; import io.airbyte.workers.workload.WorkloadIdGenerator; import io.airbyte.workers.workload.exception.DocStoreAccessException; -import io.airbyte.workload.api.client.generated.WorkloadApi; import io.airbyte.workload.api.client.model.generated.Workload; import io.airbyte.workload.api.client.model.generated.WorkloadCancelRequest; import io.airbyte.workload.api.client.model.generated.WorkloadCreateRequest; @@ -58,7 +58,7 @@ */ public class WorkloadApiWorker implements Worker { - private static final int HTTP_CONFLICT_CODE = 409; + private static final int HTTP_CONFLICT_CODE = HttpStatus.CONFLICT.getCode(); private static final String DESTINATION = "destination"; private static final String SOURCE = "source"; @@ -66,7 +66,7 @@ public class WorkloadApiWorker implements Worker TERMINAL_STATUSES = Set.of(WorkloadStatus.CANCELLED, WorkloadStatus.FAILURE, WorkloadStatus.SUCCESS); private final JobOutputDocStore jobOutputDocStore; private final AirbyteApiClient apiClient; - private final WorkloadApi workloadApi; + private final WorkloadApiClient workloadApiClient; private final WorkloadIdGenerator workloadIdGenerator; private final ReplicationActivityInput input; private final FeatureFlagClient featureFlagClient; @@ -75,13 +75,13 @@ public class WorkloadApiWorker implements Worker output; output = fetchReplicationOutput(workloadId, (location) -> { @@ -230,7 +230,7 @@ private Context getFeatureFlagContext() { private void createWorkload(final WorkloadCreateRequest workloadCreateRequest) { try { - workloadApi.workloadCreate(workloadCreateRequest); + workloadApiClient.getWorkloadApi().workloadCreate(workloadCreateRequest); } catch (final ClientException e) { /* * The Workload API returns a 304 response when the request to execute the workload has already been @@ -250,7 +250,7 @@ private void createWorkload(final WorkloadCreateRequest workloadCreateRequest) { private Workload getWorkload(final String workloadId) { try { - return workloadApi.workloadGet(workloadId); + return workloadApiClient.getWorkloadApi().workloadGet(workloadId); } catch (final IOException e) { throw new RuntimeException(e); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java index d7cf84d7f78..fea45080944 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java @@ -244,6 +244,22 @@ public static AirbyteMessage createStatusTraceMessage(final StreamDescriptor str .withTrace(airbyteTraceMessage); } + public static AirbyteMessage createStreamStatusTraceMessageWithType(final StreamDescriptor stream, + final AirbyteStreamStatusTraceMessage.AirbyteStreamStatus status) { + final AirbyteStreamStatusTraceMessage airbyteStreamStatusTraceMessage = new AirbyteStreamStatusTraceMessage() + .withStatus(status) + .withStreamDescriptor(stream); + + final AirbyteTraceMessage airbyteTraceMessage = new AirbyteTraceMessage() + .withEmittedAt(null) + .withType(AirbyteTraceMessage.Type.STREAM_STATUS) + .withStreamStatus(airbyteStreamStatusTraceMessage); + + return new AirbyteMessage() + .withType(Type.TRACE) + .withTrace(airbyteTraceMessage); + } + public static AirbyteMessage createAnalyticsTraceMessage(final String type, final String value) { final AirbyteAnalyticsTraceMessage airbyteAnalyticsTraceMessage = new AirbyteAnalyticsTraceMessage() .withType(type) diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java index e3a8bbce704..9f1d8ccde55 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java @@ -62,9 +62,7 @@ public static ImmutablePair createSyncConfig(fi .withSourceId(replicationInput.getSourceId()) .withDestinationId(replicationInput.getDestinationId()) .withDestinationConfiguration(replicationInput.getDestinationConfiguration()) - .withCatalog(replicationInput.getCatalog()) .withSourceConfiguration(replicationInput.getSourceConfiguration()) - .withState(replicationInput.getState()) .withOperationSequence(replicationInput.getOperationSequence()) .withWorkspaceId(replicationInput.getWorkspaceId()) .withConnectionContext(new ConnectionContext().withOrganizationId(organizationId))); diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/HelperBeanFactory.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/HelperBeanFactory.kt new file mode 100644 index 00000000000..419505fb6cf --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/HelperBeanFactory.kt @@ -0,0 +1,13 @@ +package io.airbyte.workers.config + +import io.micronaut.context.annotation.Factory +import jakarta.inject.Singleton +import java.time.Clock + +@Factory +class HelperBeanFactory { + @Singleton + fun getClock(): Clock { + return Clock.systemUTC() + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/WorkloadApiClientFactory.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/WorkloadApiClientFactory.kt deleted file mode 100644 index 234bfc17481..00000000000 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/WorkloadApiClientFactory.kt +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.config - -import dev.failsafe.RetryPolicy -import io.airbyte.api.client.WorkloadApiClient -import io.airbyte.commons.auth.AuthenticationInterceptor -import io.airbyte.workload.api.client.generated.WorkloadApi -import io.github.oshai.kotlinlogging.KotlinLogging -import io.micrometer.core.instrument.MeterRegistry -import io.micronaut.context.annotation.Factory -import io.micronaut.context.annotation.Value -import jakarta.inject.Singleton -import okhttp3.HttpUrl -import okhttp3.OkHttpClient -import okhttp3.Response -import org.openapitools.client.infrastructure.ClientException -import org.openapitools.client.infrastructure.ServerException -import java.io.IOException -import java.time.Duration -import java.util.Optional - -private val logger = KotlinLogging.logger {} - -@Factory -class WorkloadApiClientFactory { - @Singleton - fun workloadApiClient( - @Value("\${airbyte.workload-api.base-path}") workloadApiBasePath: String, - @Value("\${airbyte.workload-api.connect-timeout-seconds}") connectTimeoutSeconds: Long, - @Value("\${airbyte.workload-api.read-timeout-seconds}") readTimeoutSeconds: Long, - @Value("\${airbyte.workload-api.retries.delay-seconds}") retryDelaySeconds: Long, - @Value("\${airbyte.workload-api.retries.max}") maxRetries: Int, - authenticationInterceptor: AuthenticationInterceptor, - meterRegistry: Optional, - ): WorkloadApi { - val builder: OkHttpClient.Builder = OkHttpClient.Builder() - builder.addInterceptor(authenticationInterceptor) - builder.readTimeout(Duration.ofSeconds(readTimeoutSeconds)) - builder.connectTimeout(Duration.ofSeconds(connectTimeoutSeconds)) - - val okHttpClient: OkHttpClient = builder.build() - val metricTags = arrayOf("max-retries", maxRetries.toString()) - - val retryPolicy: RetryPolicy = - RetryPolicy.builder() - .handle( - listOf( - IllegalStateException::class.java, - IOException::class.java, - UnsupportedOperationException::class.java, - ClientException::class.java, - ServerException::class.java, - ), - ) - // TODO move these metrics into a centralized metric registery as part of the MetricClient refactor/cleanup - .onAbort { l -> - logger.warn { "Attempt aborted. Attempt count ${l.attemptCount}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.abort", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .onFailure { l -> - logger.error(l.exception) { "Failed to call $workloadApiBasePath. Last response: ${l.result}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.failure", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .onRetry { l -> - logger.warn { "Retry attempt ${l.attemptCount} of $maxRetries. Last response: ${l.lastResult}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.retry", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "url", "method", l.lastResult.request.method), - *getUrlTags(l.lastResult.request.url), - ).increment() - } - } - .onRetriesExceeded { l -> - logger.error(l.exception) { "Retry attempts exceeded." } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.retries_exceeded", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .onSuccess { l -> - logger.debug { "Successfully called ${l.result.request.url}. Response: ${l.result}, isRetry: ${l.isRetry}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.success", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .withDelay(Duration.ofSeconds(retryDelaySeconds)) - .withMaxRetries(maxRetries) - .build() - - return WorkloadApiClient(workloadApiBasePath, retryPolicy, okHttpClient).workloadApi - } - - private fun getUrlTags(httpUrl: HttpUrl): Array { - val last = httpUrl.pathSegments.last() - if (last.contains("[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}".toRegex())) { - return arrayOf("url", httpUrl.toString().removeSuffix(last), "workload-id", last) - } else { - return arrayOf("url", httpUrl.toString()) - } - } -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/context/ReplicationContext.java b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/context/ReplicationContext.kt similarity index 52% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/context/ReplicationContext.java rename to airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/context/ReplicationContext.kt index e302c303869..c91afa685a6 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/context/ReplicationContext.java +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/context/ReplicationContext.kt @@ -1,10 +1,6 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.context; +package io.airbyte.workers.context -import java.util.UUID; +import java.util.UUID /** * Context of a Replication. @@ -20,13 +16,19 @@ * @param workspaceId The workspace ID associated with the sync. * @param sourceImage The name and version of the source image. * @param destinationImage The name and version of the destination image. + * @param sourceDefinitionId The source definition ID associated with the sync + * @param destinationDefinitionId The source definition ID associated with the sync */ -public record ReplicationContext(boolean isReset, - UUID connectionId, - UUID sourceId, - UUID destinationId, - Long jobId, - Integer attempt, - UUID workspaceId, - String sourceImage, - String destinationImage) {} +data class ReplicationContext( + val isReset: Boolean, + val connectionId: UUID, + val sourceId: UUID, + val destinationId: UUID, + val jobId: Long, + val attempt: Int, + val workspaceId: UUID, + val sourceImage: String, + val destinationImage: String, + val sourceDefinitionId: UUID, + val destinationDefinitionId: UUID, +) diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/ReplicationWorkerHelper.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/ReplicationWorkerHelper.kt index df641e5226b..e0592c5f8f8 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/ReplicationWorkerHelper.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/ReplicationWorkerHelper.kt @@ -7,6 +7,11 @@ package io.airbyte.workers.general import com.fasterxml.jackson.core.JsonProcessingException import com.fasterxml.jackson.databind.ObjectMapper import com.google.common.annotations.VisibleForTesting +import io.airbyte.api.client.WorkloadApiClient +import io.airbyte.api.client.generated.DestinationApi +import io.airbyte.api.client.generated.SourceApi +import io.airbyte.api.client.model.generated.DestinationIdRequestBody +import io.airbyte.api.client.model.generated.SourceIdRequestBody import io.airbyte.api.client.model.generated.StreamStatusIncompleteRunCause import io.airbyte.commons.concurrency.VoidCallable import io.airbyte.commons.converters.ThreadedTimeTracker @@ -30,6 +35,7 @@ import io.airbyte.protocol.models.AirbyteMessage.Type import io.airbyte.protocol.models.AirbyteStateMessage import io.airbyte.protocol.models.AirbyteStateStats import io.airbyte.protocol.models.AirbyteTraceMessage +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog import io.airbyte.protocol.models.StreamDescriptor import io.airbyte.workers.WorkerUtils import io.airbyte.workers.context.ReplicationContext @@ -37,6 +43,7 @@ import io.airbyte.workers.context.ReplicationFeatureFlags import io.airbyte.workers.exception.WorkloadHeartbeatException import io.airbyte.workers.helper.AirbyteMessageDataExtractor import io.airbyte.workers.helper.FailureHelper +import io.airbyte.workers.helper.StreamStatusCompletionTracker import io.airbyte.workers.internal.AirbyteDestination import io.airbyte.workers.internal.AirbyteMapper import io.airbyte.workers.internal.AirbyteSource @@ -54,7 +61,6 @@ import io.airbyte.workers.internal.exception.DestinationException import io.airbyte.workers.internal.exception.SourceException import io.airbyte.workers.internal.syncpersistence.SyncPersistence import io.airbyte.workers.models.StateWithId.attachIdToStateMessageFromSource -import io.airbyte.workload.api.client.generated.WorkloadApi import io.airbyte.workload.api.client.model.generated.WorkloadHeartbeatRequest import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.http.HttpStatus @@ -65,6 +71,7 @@ import java.time.Duration import java.time.Instant import java.util.Collections import java.util.Optional +import java.util.UUID import java.util.concurrent.atomic.AtomicBoolean import io.airbyte.workload.api.client.generated.infrastructure.ClientException as GeneratedClientException @@ -79,10 +86,13 @@ class ReplicationWorkerHelper( private val replicationAirbyteMessageEventPublishingHelper: ReplicationAirbyteMessageEventPublishingHelper, private val timeTracker: ThreadedTimeTracker, private val onReplicationRunning: VoidCallable, - private val workloadApi: WorkloadApi, + private val workloadApiClient: WorkloadApiClient, private val workloadEnabled: Boolean, private val analyticsMessageTracker: AnalyticsMessageTracker, private val workloadId: Optional, + private val sourceApi: SourceApi, + private val destinationApi: DestinationApi, + private val streamStatusCompletionTracker: StreamStatusCompletionTracker, ) { private val metricClient = MetricClientFactory.getMetricClient() private val metricAttrs: MutableList = mutableListOf() @@ -136,7 +146,7 @@ class ReplicationWorkerHelper( throw RuntimeException("workloadId should always be present") } logger.info { "Sending workload heartbeat" } - workloadApi.workloadHeartbeat( + workloadApiClient.workloadApi.workloadHeartbeat( WorkloadHeartbeatRequest(workloadId.get()), ) lastSuccessfulHeartbeat = Instant.now() @@ -169,6 +179,7 @@ class ReplicationWorkerHelper( ctx: ReplicationContext, replicationFeatureFlags: ReplicationFeatureFlags, jobRoot: Path, + configuredAirbyteCatalog: ConfiguredAirbyteCatalog, ) { timeTracker.trackReplicationStartTime() @@ -183,6 +194,7 @@ class ReplicationWorkerHelper( } ApmTraceUtils.addTagsToTrace(ctx.connectionId, ctx.attempt.toLong(), ctx.jobId.toString(), jobRoot) + streamStatusCompletionTracker.startTracking(configuredAirbyteCatalog, ctx) } fun startDestination( @@ -288,6 +300,10 @@ class ReplicationWorkerHelper( internalProcessMessageFromDestination(message) } + fun getStreamStatusToSend(exitValue: Int): List { + return streamStatusCompletionTracker.finalize(exitValue, mapper) + } + @JvmOverloads @Throws(JsonProcessingException::class) fun getReplicationOutput(performanceMetrics: PerformanceMetrics? = null): ReplicationOutput { @@ -423,13 +439,21 @@ class ReplicationWorkerHelper( return attachIdToStateMessageFromSource(sourceRawMessage) .let { internalProcessMessageFromSource(it) } .let { mapper.mapMessage(it) } - .let { Optional.of(it) } + .let { Optional.ofNullable(it) } } fun isWorkerV2TestEnabled(): Boolean { return workloadEnabled } + fun getSourceDefinitionIdForSourceId(sourceId: UUID): UUID { + return sourceApi.getSource(SourceIdRequestBody().sourceId(sourceId)).sourceDefinitionId + } + + fun getDestinationDefinitionIdForDestinationId(destinationId: UUID): UUID { + return destinationApi.getDestination(DestinationIdRequestBody().destinationId(destinationId)).destinationDefinitionId + } + private fun getTotalStats( timeTracker: ThreadedTimeTracker, hasReplicationCompleted: Boolean, diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTracker.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTracker.kt new file mode 100644 index 00000000000..9ab99be332f --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTracker.kt @@ -0,0 +1,97 @@ +package io.airbyte.workers.helper + +import io.airbyte.featureflag.ActivateRefreshes +import io.airbyte.featureflag.Connection +import io.airbyte.featureflag.DestinationDefinition +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.Multi +import io.airbyte.featureflag.SourceDefinition +import io.airbyte.featureflag.Workspace +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.protocol.models.AirbyteStreamStatusTraceMessage +import io.airbyte.protocol.models.AirbyteTraceMessage +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.StreamDescriptor +import io.airbyte.workers.context.ReplicationContext +import io.airbyte.workers.exception.WorkerException +import io.airbyte.workers.internal.AirbyteMapper +import jakarta.inject.Singleton +import java.time.Clock + +@Singleton +class StreamStatusCompletionTracker( + private val featureFlagClient: FeatureFlagClient, + private val clock: Clock, +) { + private val hasCompletedStatus = mutableMapOf() + private var shouldEmitStreamStatus = false + + open fun startTracking( + configuredAirbyteCatalog: ConfiguredAirbyteCatalog, + replicationContext: ReplicationContext, + ) { + shouldEmitStreamStatus = + featureFlagClient.boolVariation( + ActivateRefreshes, + Multi( + listOf( + Workspace(replicationContext.workspaceId), + Connection(replicationContext.connectionId), + SourceDefinition(replicationContext.sourceDefinitionId), + DestinationDefinition(replicationContext.destinationDefinitionId), + ), + ), + ) + + if (shouldEmitStreamStatus) { + configuredAirbyteCatalog.streams.forEach { stream -> + hasCompletedStatus[StreamDescriptor().withName(stream.stream.name).withNamespace(stream.stream.namespace)] = false + } + } + } + + open fun track(streamStatus: AirbyteStreamStatusTraceMessage) { + if (shouldEmitStreamStatus && streamStatus.status == AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE) { + hasCompletedStatus[streamStatus.streamDescriptor] ?: run { + throw WorkerException("A stream status has been detected for a stream not present in the catalog") + } + hasCompletedStatus[streamStatus.streamDescriptor] = true + } + } + + open fun finalize( + exitCode: Int, + namespacingMapper: AirbyteMapper, + ): List { + if (!shouldEmitStreamStatus) { + return listOf() + } + return if (0 == exitCode) { + streamDescriptorsToCompleteStatusMessage(hasCompletedStatus.keys, namespacingMapper) + } else { + streamDescriptorsToCompleteStatusMessage(hasCompletedStatus.filter { it.value }.keys, namespacingMapper) + } + } + + private fun streamDescriptorsToCompleteStatusMessage( + streamDescriptors: Set, + namespacingMapper: AirbyteMapper, + ): List { + return streamDescriptors.map { + namespacingMapper.mapMessage( + AirbyteMessage() + .withType(AirbyteMessage.Type.TRACE) + .withTrace( + AirbyteTraceMessage() + .withType(AirbyteTraceMessage.Type.STREAM_STATUS) + .withEmittedAt(clock.millis().toDouble()) + .withStreamStatus( + AirbyteStreamStatusTraceMessage() + .withStatus(AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE) + .withStreamDescriptor(it), + ), + ), + ) + } + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/Mapper.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/Mapper.kt index d01d5fc542e..14849a378f6 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/Mapper.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/Mapper.kt @@ -6,6 +6,7 @@ import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType import io.airbyte.protocol.models.AirbyteMessage import io.airbyte.protocol.models.AirbyteMessage.Type import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType +import io.airbyte.protocol.models.AirbyteTraceMessage import io.airbyte.protocol.models.ConfiguredAirbyteCatalog import io.github.oshai.kotlinlogging.KotlinLogging @@ -89,6 +90,15 @@ class NamespacingMapper streamDescriptor.namespace = destinationNamespace streamDescriptor.name = destinationStreamName } + Type.TRACE -> + with(message.trace) { + if (this.type != AirbyteTraceMessage.Type.STREAM_STATUS) { + return@with + } + val streamDescriptor = this.streamStatus.streamDescriptor + streamDescriptor.name = transformStreamName(streamDescriptor.name, streamPrefix) + streamDescriptor.namespace = transformNamespace(streamDescriptor.namespace) + } else -> Unit } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTracker.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTracker.kt index 829d211a599..40873cf68dd 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTracker.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTracker.kt @@ -7,6 +7,7 @@ import io.airbyte.config.SyncStats import io.airbyte.featureflag.Connection import io.airbyte.featureflag.EmitStateStatsToSegment import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.LogsForStripeChecksumDebugging import io.airbyte.featureflag.Multi import io.airbyte.featureflag.Workspace import io.airbyte.metrics.lib.MetricAttribute @@ -57,6 +58,11 @@ class ParallelStreamStatsTracker( featureFlagClient.boolVariation(EmitStateStatsToSegment, connectionContext) } + private val logsForStripeChecksumDebugging: Boolean by lazy { + val connectionContext = Multi(listOf(Connection(connectionId), Workspace(workspaceId))) + featureFlagClient.boolVariation(LogsForStripeChecksumDebugging, connectionContext) + } + @Volatile private var hasEstimatesErrors = false @@ -552,6 +558,7 @@ class ParallelStreamStatsTracker( return StreamStatsTracker( nameNamespacePair = pair, metricClient = metricClient, + logsForStripeChecksumDebugging = logsForStripeChecksumDebugging, ).also { streamTrackers[pair] = it } } } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/StatsTracker.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/StatsTracker.kt index 60577b8342e..3f09f6d3bbc 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/StatsTracker.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/StatsTracker.kt @@ -94,6 +94,7 @@ private val logger = KotlinLogging.logger { } class StreamStatsTracker( val nameNamespacePair: AirbyteStreamNameNamespacePair, private val metricClient: MetricClient, + private val logsForStripeChecksumDebugging: Boolean, ) { val streamStats = StreamStatsCounters() private val stateIds = ConcurrentHashMap.newKeySet() @@ -101,6 +102,7 @@ class StreamStatsTracker( private var emittedStats = EmittedStatsCounters() private var previousEmittedStats = EmittedStatsCounters() private var previousStateMessageReceivedAt: LocalDateTime? = null + private var alreadyLogged: Boolean = false /** * Bookkeeping for when a record message is read. @@ -129,6 +131,14 @@ class StreamStatsTracker( emittedRecordsCount.incrementAndGet() emittedBytesCount.addAndGet(estimatedBytesSize) } + + if (logsForStripeChecksumDebugging && !alreadyLogged && stateIds.size > 0) { + logger.info { + "Received records for the stream ${nameNamespacePair.namespace}:${nameNamespacePair.name}, " + + " after receiving a state message" + } + alreadyLogged = true + } } /** @@ -249,6 +259,14 @@ class StreamStatsTracker( } } + if (logsForStripeChecksumDebugging) { + logger.info { + "Received state message back from destination for the stream , " + + "${nameNamespacePair.namespace}:${nameNamespacePair.name}, " + + "committed record count is ${streamStats.committedRecordsCount} , total records at this point is ${streamStats.emittedRecordsCount} " + } + } + // Updating state checkpointing metrics stagedStats?.receivedTime?.until(currentTime, ChronoUnit.SECONDS)?.let { durationBetweenStateEmittedAndCommitted -> streamStats.maxSecondsBetweenStateEmittedAndCommitted.accumulate(durationBetweenStateEmittedAndCommitted) diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/syncpersistence/SyncPersistence.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/syncpersistence/SyncPersistence.kt index 793700353d3..a51ef2e00b9 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/syncpersistence/SyncPersistence.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/syncpersistence/SyncPersistence.kt @@ -4,17 +4,12 @@ import datadog.trace.api.Trace import io.airbyte.api.client.AirbyteApiClient import io.airbyte.api.client.generated.AttemptApi import io.airbyte.api.client.generated.StateApi -import io.airbyte.api.client.invoker.generated.ApiException import io.airbyte.api.client.model.generated.AttemptStats import io.airbyte.api.client.model.generated.AttemptStreamStats -import io.airbyte.api.client.model.generated.ConnectionIdRequestBody import io.airbyte.api.client.model.generated.ConnectionState import io.airbyte.api.client.model.generated.ConnectionStateCreateOrUpdate -import io.airbyte.api.client.model.generated.ConnectionStateType import io.airbyte.api.client.model.generated.SaveStatsRequestBody import io.airbyte.commons.converters.StateConverter -import io.airbyte.config.StateType -import io.airbyte.config.StateWrapper import io.airbyte.config.SyncStats import io.airbyte.config.helpers.StateMessageHelper import io.airbyte.metrics.lib.MetricAttribute @@ -25,7 +20,6 @@ import io.airbyte.metrics.lib.OssMetricsRegistry import io.airbyte.protocol.models.AirbyteEstimateTraceMessage import io.airbyte.protocol.models.AirbyteRecordMessage import io.airbyte.protocol.models.AirbyteStateMessage -import io.airbyte.protocol.models.CatalogHelpers import io.airbyte.protocol.models.ConfiguredAirbyteCatalog import io.airbyte.workers.internal.bookkeeping.SyncStatsTracker import io.airbyte.workers.internal.bookkeeping.getPerStreamStats @@ -83,7 +77,6 @@ class SyncPersistenceImpl ) : SyncPersistence, SyncStatsTracker by syncStatsTracker { private var stateBuffer = stateAggregatorFactory.create() private var stateFlushFuture: ScheduledFuture<*>? = null - private var onlyFlushAtTheEnd = false private var isReceivingStats = false private var stateToFlush: StateAggregator? = null private var statsToPersist: SaveStatsRequestBody? = null @@ -130,30 +123,11 @@ class SyncPersistenceImpl metricClient.count(OssMetricsRegistry.STATE_BUFFERING, 1) stateBuffer.ingest(stateMessage) - startBackgroundFlushStateTask(connectionId, stateMessage) + startBackgroundFlushStateTask(connectionId) } - private fun startBackgroundFlushStateTask( - connectionId: UUID, - stateMessage: AirbyteStateMessage, - ) { - if (stateFlushFuture != null || onlyFlushAtTheEnd) { - return - } - - // Fetch the current persisted state to see if it is a state migration. - // In case of a state migration, we only flush at the end of the sync to avoid dropping states in - // case of a sync failure - val currentPersistedState: ConnectionState? = - try { - stateApi.getState(ConnectionIdRequestBody().connectionId(connectionId)) - } catch (e: ApiException) { - logger.warn(e) { "Failed to check current state for connectionId $connectionId, it will be retried next time we see a state" } - return - } - if (isMigration(currentPersistedState, stateMessage) && stateMessage.type == AirbyteStateMessage.AirbyteStateType.STREAM) { - logger.info { "State type migration from LEGACY to STREAM detected, all states will be persisted at the end of the sync" } - onlyFlushAtTheEnd = true + private fun startBackgroundFlushStateTask(connectionId: UUID) { + if (stateFlushFuture != null) { return } @@ -220,9 +194,6 @@ class SyncPersistenceImpl if (hasStatesToFlush()) { // we still have data to flush prepareDataForFlush() - if (onlyFlushAtTheEnd) { - validateStreamMigration() - } try { retryWithJitterThrows("Flush States from SyncPersistenceImpl") { doFlushState() @@ -333,16 +304,6 @@ class SyncPersistenceImpl metricClient.count(OssMetricsRegistry.STATE_COMMIT_ATTEMPT_SUCCESSFUL, 1) } - private fun isMigration( - currentPersistedState: ConnectionState?, - stateMessage: AirbyteStateMessage, - ): Boolean { - return ( - !isStateEmpty(currentPersistedState) && currentPersistedState?.stateType == ConnectionStateType.LEGACY && - stateMessage.type != AirbyteStateMessage.AirbyteStateType.LEGACY - ) - } - private fun doFlushStats() { if (!hasStatsToFlush()) { return @@ -364,17 +325,6 @@ class SyncPersistenceImpl private fun hasStatsToFlush(): Boolean = isReceivingStats && statsToPersist != null - private fun validateStreamMigration() { - val state = stateToFlush?.getAggregated() ?: return - - StateMessageHelper.getTypedState(state.state) - .getOrNull() - ?.takeIf { it.stateType == StateType.STREAM } - ?.let { - validateStreamStates(it, catalog) - } - } - /** * Wraps RetryWithJitterThrows for testing. * @@ -454,29 +404,3 @@ private fun MetricClient.emitFailedStatsCloseMetrics(connectionId: UUID?) { val attribute: MetricAttribute? = connectionId?.let { MetricAttribute(MetricTags.CONNECTION_ID, it.toString()) } count(OssMetricsRegistry.STATS_COMMIT_NOT_ATTEMPTED, 1, attribute) } - -/** - * Validate that the LEGACY -> STREAM migration is correct - * - * During the migration, we will lose any previous stream state that isn't in the new state. To - * avoid a potential loss of state, we ensure that all the incremental streams are present in the - * new state. - * - * @param state the new state we want to persist - * @param configuredCatalog the configured catalog of the connection of state - */ -fun validateStreamStates( - state: StateWrapper, - configuredCatalog: ConfiguredAirbyteCatalog, -) { - val stateStreamDescriptors = state.stateMessages.map { it.stream.streamDescriptor }.toList() - - CatalogHelpers.extractIncrementalStreamDescriptors(configuredCatalog) - .find { !stateStreamDescriptors.contains(it) } - ?.let { - throw IllegalStateException( - "Job ran during migration from Legacy State to Per Stream State. One of the streams that did not have state is: " + - "(namespace: ${it.namespace}, name: ${it.name}). Job must be retried in order to properly store state.", - ) - } -} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/payload/ActivityPayloadStorageClient.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/payload/ActivityPayloadStorageClient.kt deleted file mode 100644 index 0c9d628e7fe..00000000000 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/payload/ActivityPayloadStorageClient.kt +++ /dev/null @@ -1,57 +0,0 @@ -package io.airbyte.workers.payload - -import io.airbyte.commons.json.JsonSerde -import io.airbyte.metrics.lib.MetricClient -import io.airbyte.metrics.lib.OssMetricsRegistry -import io.airbyte.workers.storage.StorageClient - -/** - * Writes and reads activity payloads to and from the configured object store. - * Currently just handles JSON serialization, but can be updated as necessary. - * */ -class ActivityPayloadStorageClient( - private val storageClientRaw: StorageClient, - private val jsonSerde: JsonSerde, - private val metricClient: MetricClient, -) { - /** - * It reads the object from a location determined by the given [uri] and unmarshals it from JSON. - * Any Exceptions thrown by the raw object storage client or json deserializer will be forwarded to the caller. - * - * @return the unmarshalled object on a hit and null on a miss. - */ - inline fun readJSON(uri: ActivityPayloadURI): T? { - return readJSON(uri, T::class.java) - } - - /** - * It reads the object from a location determined by the given [uri] and unmarshals it from JSON to [target] class. - * Any Exceptions thrown by the raw object storage client or json deserializer will be forwarded to the caller. - * - * @return the unmarshalled object on a hit and null on a miss. - */ - fun readJSON( - uri: ActivityPayloadURI, - target: Class, - ): T? { - metricClient.count(OssMetricsRegistry.ACTIVITY_PAYLOAD_READ_FROM_DOC_STORE, 1) - - return storageClientRaw.read(uri.id) - ?.let { jsonSerde.deserialize(it, target) } - } - - /** - * It marshals the given object to JSON and writes it to object storage at a location determined by the given [uri]. - * Any Exceptions thrown by the raw object storage client or json serializer will be forwarded to the caller. - * - * @return Unit - */ - fun writeJSON( - uri: ActivityPayloadURI, - payload: T, - ) { - metricClient.count(OssMetricsRegistry.ACTIVITY_PAYLOAD_WRITTEN_TO_DOC_STORE, 1) - - return storageClientRaw.write(uri.id, jsonSerde.serialize(payload)) - } -} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/payload/ActivityPayloadURI.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/payload/ActivityPayloadURI.kt deleted file mode 100644 index b90fe4709fb..00000000000 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/payload/ActivityPayloadURI.kt +++ /dev/null @@ -1,21 +0,0 @@ -package io.airbyte.workers.payload - -import java.util.UUID - -enum class ActivityPayloadURIVersion { - V1, -} - -class ActivityPayloadURI( - val id: String, - val version: String = ActivityPayloadURIVersion.V1.name, -) { - fun v1( - connectionId: UUID, - jobId: Long, - attemptNumber: Int, - payloadName: String, - ): ActivityPayloadURI { - return ActivityPayloadURI("${connectionId}_${jobId}_${attemptNumber}_$payloadName", ActivityPayloadURIVersion.V1.name) - } -} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadStorageClient.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadStorageClient.kt new file mode 100644 index 00000000000..0d761a76d7f --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadStorageClient.kt @@ -0,0 +1,124 @@ +package io.airbyte.workers.storage.activities + +import io.airbyte.commons.json.JsonSerde +import io.airbyte.metrics.lib.ApmTraceUtils +import io.airbyte.metrics.lib.MetricAttribute +import io.airbyte.metrics.lib.MetricClient +import io.airbyte.metrics.lib.MetricTags +import io.airbyte.metrics.lib.OssMetricsRegistry +import io.airbyte.workers.storage.StorageClient +import io.github.oshai.kotlinlogging.KotlinLogging + +private val logger = KotlinLogging.logger {} + +/** + * Writes and reads activity payloads to and from the configured object store. + * Currently just handles JSON serialization, but can be updated as necessary. + * */ +class ActivityPayloadStorageClient( + private val storageClientRaw: StorageClient, + private val jsonSerde: JsonSerde, + private val metricClient: MetricClient, +) { + /** + * It reads the object from the location described by the given [uri] and unmarshals it from JSON. + * Any Exceptions thrown by the raw object storage client or json deserializer will be forwarded to the caller. + * + * @return the unmarshalled object on a hit and null on a miss. + */ + inline fun readJSON(uri: ActivityPayloadURI): T? { + return readJSON(uri, T::class.java) + } + + /** + * It reads the object from the location described by the given [uri] and unmarshals it from JSON to [target] class. + * Any Exceptions thrown by the raw object storage client or json deserializer will be forwarded to the caller. + * + * @return the unmarshalled object on a hit and null on a miss. + */ + fun readJSON( + uri: ActivityPayloadURI, + target: Class, + ): T? { + metricClient.count(OssMetricsRegistry.ACTIVITY_PAYLOAD_READ_FROM_DOC_STORE, 1) + + return storageClientRaw.read(uri.id) + ?.let { jsonSerde.deserialize(it, target) } + } + + /** + * It marshals the given object to JSON and writes it to object storage at a location determined by the given [uri]. + * Any Exceptions thrown by the raw object storage client or json serializer will be forwarded to the caller. + * + * @return Unit + */ + fun writeJSON( + uri: ActivityPayloadURI, + payload: T, + ) { + metricClient.count(OssMetricsRegistry.ACTIVITY_PAYLOAD_WRITTEN_TO_DOC_STORE, 1) + + return storageClientRaw.write(uri.id, jsonSerde.serialize(payload)) + } + + /** + * It reads the object from the location described by the given [uri] and unmarshals it from JSON to [target] class + * and compares it to the [expected] recording a metric based on the result. + * + * Any Exceptions thrown by the raw object storage client or json serializer will be forwarded to the caller. + * + * @return the object passed for comparison + */ + fun validateOutput( + uri: ActivityPayloadURI?, + target: Class, + expected: T, + comparator: Comparator, + attrs: List, + ): T { + if (uri == null) { + val baseAttrs = attrs + MetricAttribute(MetricTags.URI_NULL, true.toString()) + metricClient.count(OssMetricsRegistry.PAYLOAD_FAILURE_READ, 1, *baseAttrs.toTypedArray()) + + return expected + } + + ApmTraceUtils.addTagsToTrace(mapOf(Pair(MetricTags.URI_ID, uri.id), Pair(MetricTags.URI_VERSION, uri.version))) + + val baseAttrs = + attrs + + listOf( + MetricAttribute(MetricTags.URI_NULL, false.toString()), + MetricAttribute(MetricTags.URI_ID, uri.id), + MetricAttribute(MetricTags.URI_VERSION, uri.version), + MetricAttribute(MetricTags.PAYLOAD_NAME, target.name), + ) + + val remote: T? + try { + remote = readJSON(uri, target) + } catch (e: Exception) { + logger.error { e } + + ApmTraceUtils.addExceptionToTrace(e) + val attrsWithException = + baseAttrs + MetricAttribute(MetricTags.FAILURE_CAUSE, e.javaClass.simpleName) + + metricClient.count(OssMetricsRegistry.PAYLOAD_FAILURE_READ, 1, *attrsWithException.toTypedArray()) + + return expected + } + + val match = comparator.compare(expected, remote) == 0 + val miss = remote == null + + val attrsWithMatch = + baseAttrs + + MetricAttribute(MetricTags.IS_MATCH, match.toString()) + + MetricAttribute(MetricTags.IS_MISS, miss.toString()) + + metricClient.count(OssMetricsRegistry.PAYLOAD_VALIDATION_RESULT, 1, *attrsWithMatch.toTypedArray()) + + return expected + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadURI.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadURI.kt new file mode 100644 index 00000000000..c0890c9ecba --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadURI.kt @@ -0,0 +1,43 @@ +package io.airbyte.workers.storage.activities + +import java.util.UUID +import io.airbyte.config.ActivityPayloadURI as OpenApi + +enum class ActivityPayloadURIVersion { + V1, +} + +class ActivityPayloadURI( + val id: String, + val version: String = ActivityPayloadURIVersion.V1.name, +) { + companion object Factory { + @JvmStatic + fun v1( + connectionId: UUID, + jobId: Long, + attemptNumber: Int, + payloadName: String, + ): ActivityPayloadURI { + return ActivityPayloadURI("${connectionId}_${jobId}_${attemptNumber}_$payloadName", ActivityPayloadURIVersion.V1.name) + } + + @JvmStatic + fun fromOpenApi(dto: OpenApi?): ActivityPayloadURI? { + if (dto == null || dto.version == null || dto.id == null) { + return null + } + + return ActivityPayloadURI( + version = dto.version, + id = dto.id, + ) + } + } + + fun toOpenApi(): OpenApi { + return OpenApi() + .withId(id) + .withVersion(version) + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/NaiveEqualityComparator.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/NaiveEqualityComparator.kt new file mode 100644 index 00000000000..10d8d4e18f7 --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/NaiveEqualityComparator.kt @@ -0,0 +1,8 @@ +package io.airbyte.workers.storage.activities + +class NaiveEqualityComparator : Comparator { + override fun compare( + o1: T?, + o2: T?, + ): Int = if (o1 == o2) 0 else 1 +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/OutputStorageClient.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/OutputStorageClient.kt new file mode 100644 index 00000000000..c432c9a8ede --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/OutputStorageClient.kt @@ -0,0 +1,86 @@ +package io.airbyte.workers.storage.activities + +import io.airbyte.metrics.lib.ApmTraceUtils +import io.airbyte.metrics.lib.MetricAttribute +import io.airbyte.metrics.lib.MetricClient +import io.airbyte.metrics.lib.MetricTags +import io.airbyte.metrics.lib.OssMetricsRegistry +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.UUID +import io.airbyte.config.ActivityPayloadURI as OpenApiURI + +private val logger = KotlinLogging.logger {} + +/** + * Client for writing per-attempt outputs to object storage. This is for outputs that are not directly + * operationalized against, but are useful debugging and troubleshooting purposes. + */ +class OutputStorageClient + @JvmOverloads + constructor( + private val storageClient: ActivityPayloadStorageClient, + private val metricClient: MetricClient, + private val payloadName: String, + private val target: Class, + private val comparator: Comparator = NaiveEqualityComparator(), + ) { + /** + * Persists and object to storage id-ed by connection, job and attempt number. + */ + fun persist( + obj: T?, + connectionId: UUID, + jobId: Long, + attemptNumber: Int, + metricAttributes: Array, + ): OpenApiURI? { + if (obj == null) return null + + val uri = ActivityPayloadURI.v1(connectionId, jobId, attemptNumber, payloadName) + + try { + storageClient.writeJSON(uri, obj) + } catch (e: Exception) { + val attrs = + listOf(*metricAttributes) + + listOf( + MetricAttribute(MetricTags.URI_ID, uri.id), + MetricAttribute(MetricTags.URI_VERSION, uri.version), + MetricAttribute(MetricTags.FAILURE_CAUSE, e.javaClass.simpleName), + MetricAttribute(MetricTags.PAYLOAD_NAME, payloadName), + ) + + ApmTraceUtils.addExceptionToTrace(e) + ApmTraceUtils.addTagsToTrace(attrs) + + logger.error { "Failure writing $payloadName to object storage." } + logger.error { "Message: ${e.message}" } + logger.error { "Stack Trace: ${e.stackTrace}" } + + metricClient.count(OssMetricsRegistry.PAYLOAD_FAILURE_WRITE, 1, *attrs.toTypedArray()) + } + + return uri.toOpenApi() + } + + /** + * Queries object storage based on the provided uri. Emits a metric whether it's a match. + */ + fun validate( + expected: T?, + uri: OpenApiURI, + attrs: List, + ) { + if (expected == null) return + + val domainUri = ActivityPayloadURI.fromOpenApi(uri) ?: return + + storageClient.validateOutput( + domainUri, + target, + expected, + comparator, + attrs, + ) + } + } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/temporal/FailureConverter.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/temporal/FailureConverter.kt new file mode 100644 index 00000000000..f1e3bd1975e --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/temporal/FailureConverter.kt @@ -0,0 +1,67 @@ +package io.airbyte.workers.temporal + +import io.airbyte.commons.temporal.utils.ActivityFailureClassifier +import io.airbyte.config.ActorType +import io.airbyte.config.FailureReason +import org.apache.commons.lang3.exception.ExceptionUtils +import org.slf4j.LoggerFactory +import java.lang.String +import kotlin.time.Duration +import kotlin.time.toKotlinDuration + +class FailureConverter { + @JvmOverloads + fun getFailureReason( + commandName: String, + actorType: ActorType, + e: Exception, + timeout: java.time.Duration? = null, + ): FailureReason = getFailureReason(commandName, actorType, e, timeout?.toKotlinDuration()) + + fun getFailureReason( + commandName: String, + actorType: ActorType, + e: Exception, + timeout: Duration?, + ): FailureReason { + val failureReason = + FailureReason() + .withFailureOrigin(if (actorType == ActorType.SOURCE) FailureReason.FailureOrigin.SOURCE else FailureReason.FailureOrigin.DESTINATION) + .withStacktrace(ExceptionUtils.getStackTrace(e)) + val classifiedExc = ActivityFailureClassifier.classifyException(e) + LoggerFactory.getLogger("test").error("exception classified as $classifiedExc") + when (classifiedExc) { + ActivityFailureClassifier.TemporalFailureReason.HEARTBEAT -> + failureReason + .withFailureOrigin(FailureReason.FailureOrigin.AIRBYTE_PLATFORM) + .withFailureType(FailureReason.FailureType.SYSTEM_ERROR) + .withExternalMessage("$commandName connection failed because of an internal error.") + .withInternalMessage("$commandName pod failed to heartbeat, verify resource and heath of the worker/check pods.") + + ActivityFailureClassifier.TemporalFailureReason.SCHEDULER_OVERLOADED -> + failureReason + .withFailureOrigin(FailureReason.FailureOrigin.AIRBYTE_PLATFORM) + .withFailureType(FailureReason.FailureType.TRANSIENT_ERROR) + .withExternalMessage("Airbyte Platform is experiencing a higher than usual load, please try again later.") + .withInternalMessage("$commandName wasn't able to start within the expected time, verify scheduler and worker load.") + + ActivityFailureClassifier.TemporalFailureReason.OPERATION_TIMEOUT -> + failureReason + .withExternalMessage("$commandName took too long.") + .withInternalMessage("$commandName exceeded the timeout${timeout?.let { " of ${it.inWholeMinutes} minutes" }.orEmpty()}.") + + ActivityFailureClassifier.TemporalFailureReason.UNKNOWN, ActivityFailureClassifier.TemporalFailureReason.NOT_A_TIMEOUT -> + failureReason + .withFailureOrigin(FailureReason.FailureOrigin.AIRBYTE_PLATFORM) + .withExternalMessage("$commandName failed because of an internal error") + .withInternalMessage("$commandName failed because of an internal error") + + else -> + failureReason + .withFailureOrigin(FailureReason.FailureOrigin.AIRBYTE_PLATFORM) + .withExternalMessage("$commandName failed because of an internal error") + .withInternalMessage("$commandName failed because of an internal error") + } + return failureReason + } +} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/BufferedReplicationWorkerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/BufferedReplicationWorkerTest.java index 9ee0246aecd..eb588f0836e 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/BufferedReplicationWorkerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/BufferedReplicationWorkerTest.java @@ -28,8 +28,9 @@ class BufferedReplicationWorkerTest extends ReplicationWorkerTest { ReplicationWorker getDefaultReplicationWorker(final boolean fieldSelectionEnabled) { final var fieldSelector = new FieldSelector(recordSchemaValidator, workerMetricReporter, fieldSelectionEnabled, false); replicationWorkerHelper = spy(new ReplicationWorkerHelper(airbyteMessageDataExtractor, fieldSelector, mapper, messageTracker, syncPersistence, - replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), onReplicationRunning, workloadApi, false, analyticsMessageTracker, - Optional.empty())); + replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), onReplicationRunning, workloadApiClient, false, + analyticsMessageTracker, + Optional.empty(), sourceApi, destinationApi, streamStatusCompletionTracker)); return new BufferedReplicationWorker( JOB_ID, JOB_ATTEMPT, @@ -42,7 +43,8 @@ replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), onRep replicationWorkerHelper, destinationTimeoutMonitor, getQueueType(), - OptionalInt.of(1)); + OptionalInt.of(1), + streamStatusCompletionTracker); } public BufferedReplicationWorkerType getQueueType() { diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java index fb0438445d5..f14f1b57e96 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java @@ -21,8 +21,9 @@ class DefaultReplicationWorkerTest extends ReplicationWorkerTest { ReplicationWorker getDefaultReplicationWorker(final boolean fieldSelectionEnabled) { final var fieldSelector = new FieldSelector(recordSchemaValidator, workerMetricReporter, fieldSelectionEnabled, false); replicationWorkerHelper = spy(new ReplicationWorkerHelper(airbyteMessageDataExtractor, fieldSelector, mapper, messageTracker, syncPersistence, - replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), onReplicationRunning, workloadApi, false, analyticsMessageTracker, - Optional.empty())); + replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), onReplicationRunning, workloadApiClient, false, + analyticsMessageTracker, + Optional.empty(), sourceApi, destinationApi, streamStatusCompletionTracker)); return new DefaultReplicationWorker( JOB_ID, JOB_ATTEMPT, @@ -33,7 +34,8 @@ replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), onRep heartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, - destinationTimeoutMonitor); + destinationTimeoutMonitor, + streamStatusCompletionTracker); } // DefaultReplicationWorkerTests. diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java index 4c52d03824b..fbb4cbb8166 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java @@ -17,6 +17,9 @@ import static org.mockito.Mockito.when; import com.fasterxml.jackson.core.JsonProcessingException; +import io.airbyte.api.client.WorkloadApiClient; +import io.airbyte.api.client.generated.DestinationApi; +import io.airbyte.api.client.generated.SourceApi; import io.airbyte.commons.concurrency.VoidCallable; import io.airbyte.commons.converters.ThreadedTimeTracker; import io.airbyte.persistence.job.models.ReplicationInput; @@ -29,6 +32,7 @@ import io.airbyte.workers.context.ReplicationContext; import io.airbyte.workers.context.ReplicationFeatureFlags; import io.airbyte.workers.helper.AirbyteMessageDataExtractor; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteMapper; import io.airbyte.workers.internal.AirbyteSource; @@ -54,6 +58,8 @@ class ReplicationWorkerHelperTest { private AirbyteMessageTracker messageTracker; private SyncPersistence syncPersistence; private AnalyticsMessageTracker analyticsMessageTracker; + private StreamStatusCompletionTracker streamStatusCompletionTracker; + private WorkloadApiClient workloadApiClient; @BeforeEach void setUp() { @@ -62,7 +68,10 @@ void setUp() { syncPersistence = mock(SyncPersistence.class); messageTracker = mock(AirbyteMessageTracker.class); analyticsMessageTracker = mock(AnalyticsMessageTracker.class); + streamStatusCompletionTracker = mock(StreamStatusCompletionTracker.class); + workloadApiClient = mock(WorkloadApiClient.class); when(messageTracker.getSyncStatsTracker()).thenReturn(syncStatsTracker); + when(workloadApiClient.getWorkloadApi()).thenReturn(mock(WorkloadApi.class)); replicationWorkerHelper = spy(new ReplicationWorkerHelper( mock(AirbyteMessageDataExtractor.class), mock(FieldSelector.class), @@ -72,20 +81,27 @@ void setUp() { mock(ReplicationAirbyteMessageEventPublishingHelper.class), mock(ThreadedTimeTracker.class), mock(VoidCallable.class), - mock(WorkloadApi.class), + workloadApiClient, false, analyticsMessageTracker, - Optional.empty())); + Optional.empty(), + mock(SourceApi.class), + mock(DestinationApi.class), + streamStatusCompletionTracker)); } @Test void testGetReplicationOutput() throws JsonProcessingException { // Need to pass in a replication context + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withAdditionalProperty("test", "test"); + final ReplicationContext replicationContext = new ReplicationContext(true, UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID(), 0L, + 1, UUID.randomUUID(), SOURCE_IMAGE, DESTINATION_IMAGE, UUID.randomUUID(), UUID.randomUUID()); replicationWorkerHelper.initialize( - new ReplicationContext(true, UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID(), 0L, - 1, UUID.randomUUID(), SOURCE_IMAGE, DESTINATION_IMAGE), + replicationContext, mock(ReplicationFeatureFlags.class), - mock(Path.class)); + mock(Path.class), + catalog); + verify(streamStatusCompletionTracker).startTracking(catalog, replicationContext); // Need to have a configured catalog for getReplicationOutput replicationWorkerHelper.startDestination( mock(AirbyteDestination.class), @@ -106,12 +122,13 @@ void testGetReplicationOutput() throws JsonProcessingException { void testAnalyticsMessageHandling() { final ReplicationContext context = new ReplicationContext(true, UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID(), 0L, - 1, UUID.randomUUID(), SOURCE_IMAGE, DESTINATION_IMAGE); + 1, UUID.randomUUID(), SOURCE_IMAGE, DESTINATION_IMAGE, UUID.randomUUID(), UUID.randomUUID()); // Need to pass in a replication context replicationWorkerHelper.initialize( context, mock(ReplicationFeatureFlags.class), - mock(Path.class)); + mock(Path.class), + mock(ConfiguredAirbyteCatalog.class)); // Need to have a configured catalog for getReplicationOutput replicationWorkerHelper.startDestination( mock(AirbyteDestination.class), diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java index 2f0b31ababf..6cbc5449fcf 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java @@ -31,6 +31,11 @@ import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.api.client.WorkloadApiClient; +import io.airbyte.api.client.generated.DestinationApi; +import io.airbyte.api.client.generated.SourceApi; +import io.airbyte.api.client.model.generated.DestinationRead; +import io.airbyte.api.client.model.generated.SourceRead; import io.airbyte.api.client.model.generated.StreamStatusIncompleteRunCause; import io.airbyte.commons.concurrency.VoidCallable; import io.airbyte.commons.converters.ConnectorConfigUpdater; @@ -61,6 +66,7 @@ import io.airbyte.protocol.models.AirbyteLogMessage.Level; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; +import io.airbyte.protocol.models.AirbyteStreamStatusTraceMessage; import io.airbyte.protocol.models.AirbyteTraceMessage; import io.airbyte.protocol.models.Config; import io.airbyte.protocol.models.StreamDescriptor; @@ -73,6 +79,7 @@ import io.airbyte.workers.exception.WorkerException; import io.airbyte.workers.helper.AirbyteMessageDataExtractor; import io.airbyte.workers.helper.FailureHelper; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteSource; import io.airbyte.workers.internal.AnalyticsMessageTracker; @@ -143,12 +150,15 @@ abstract class ReplicationWorkerTest { protected static final AirbyteMessage STATE_MESSAGE = AirbyteMessageUtils.createStateMessage(STREAM_NAME, "checkpoint", "1"); protected static final AirbyteTraceMessage ERROR_TRACE_MESSAGE = AirbyteMessageUtils.createErrorTraceMessage("a connector error occurred", Double.valueOf(123)); + protected static final Config CONNECTOR_CONFIG = new Config().withAdditionalProperty("my_key", "my_new_value"); protected static final AirbyteMessage CONFIG_MESSAGE = AirbyteMessageUtils.createConfigControlMessage(CONNECTOR_CONFIG, 1D); protected static final String STREAM1 = "stream1"; protected static final String NAMESPACE = "namespace"; protected static final String INDUCED_EXCEPTION = "induced exception"; + protected static final UUID SOURCE_DEFINITION_ID = UUID.randomUUID(); + protected static final UUID DESTINATION_DEFINITION_ID = UUID.randomUUID(); protected Path jobRoot; protected SimpleAirbyteSource sourceStub; @@ -175,8 +185,12 @@ abstract class ReplicationWorkerTest { protected ReplicationWorkerHelper replicationWorkerHelper; protected WorkloadApi workloadApi; + protected WorkloadApiClient workloadApiClient; protected AnalyticsMessageTracker analyticsMessageTracker; + protected SourceApi sourceApi; + protected DestinationApi destinationApi; + protected StreamStatusCompletionTracker streamStatusCompletionTracker; ReplicationWorker getDefaultReplicationWorker() { return getDefaultReplicationWorker(false); @@ -221,9 +235,17 @@ void setup() throws Exception { destinationTimeoutMonitor = mock(DestinationTimeoutMonitor.class); replicationAirbyteMessageEventPublishingHelper = mock(ReplicationAirbyteMessageEventPublishingHelper.class); workloadApi = mock(WorkloadApi.class); + workloadApiClient = mock(WorkloadApiClient.class); + when(workloadApiClient.getWorkloadApi()).thenReturn(workloadApi); analyticsMessageTracker = mock(AnalyticsMessageTracker.class); + sourceApi = mock(SourceApi.class); + when(sourceApi.getSource(any())).thenReturn(new SourceRead().sourceDefinitionId(SOURCE_DEFINITION_ID)); + destinationApi = mock(DestinationApi.class); + when(destinationApi.getDestination(any())).thenReturn(new DestinationRead().destinationDefinitionId(DESTINATION_DEFINITION_ID)); + streamStatusCompletionTracker = mock(StreamStatusCompletionTracker.class); + when(messageTracker.getSyncStatsTracker()).thenReturn(syncStatsTracker); when(mapper.mapCatalog(destinationConfig.getCatalog())).thenReturn(destinationConfig.getCatalog()); @@ -532,22 +554,25 @@ void testReplicationRunnableSourceUpdateConfig() throws Exception { verify(replicationAirbyteMessageEventPublishingHelper).publishStatusEvent(new ReplicationAirbyteMessageEvent(AirbyteMessageOrigin.SOURCE, CONFIG_MESSAGE, new ReplicationContext(false, replicationInput.getConnectionId(), replicationInput.getSourceId(), replicationInput.getDestinationId(), - Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE))); + Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE, SOURCE_DEFINITION_ID, + DESTINATION_DEFINITION_ID))); } @Test void testSourceConfigPersistError() throws Exception { sourceStub.setMessages(CONFIG_MESSAGE); - final String persistErrorMessage = "there was a problem persisting the new config"; doThrow(new RuntimeException(persistErrorMessage)) .when(replicationAirbyteMessageEventPublishingHelper) .publishStatusEvent(new ReplicationAirbyteMessageEvent(AirbyteMessageOrigin.SOURCE, CONFIG_MESSAGE, new ReplicationContext(false, replicationInput.getConnectionId(), replicationInput.getSourceId(), replicationInput.getDestinationId(), - Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE))); + Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE, SOURCE_DEFINITION_ID, + DESTINATION_DEFINITION_ID))); final ReplicationWorker worker = getDefaultReplicationWorker(); + doReturn(SOURCE_DEFINITION_ID).when(replicationWorkerHelper).getSourceDefinitionIdForSourceId(replicationInput.getSourceId()); + doReturn(DESTINATION_DEFINITION_ID).when(replicationWorkerHelper).getDestinationDefinitionIdForDestinationId(replicationInput.getDestinationId()); final ReplicationOutput output = worker.run(replicationInput, jobRoot); assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); @@ -566,7 +591,8 @@ void testReplicationRunnableDestinationUpdateConfig() throws Exception { verify(replicationAirbyteMessageEventPublishingHelper).publishStatusEvent(new ReplicationAirbyteMessageEvent(AirbyteMessageOrigin.DESTINATION, CONFIG_MESSAGE, new ReplicationContext(false, replicationInput.getConnectionId(), replicationInput.getSourceId(), replicationInput.getDestinationId(), - Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE))); + Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE, SOURCE_DEFINITION_ID, + DESTINATION_DEFINITION_ID))); } @Test @@ -580,7 +606,8 @@ void testDestinationConfigPersistError() throws Exception { .publishStatusEvent(new ReplicationAirbyteMessageEvent(AirbyteMessageOrigin.DESTINATION, CONFIG_MESSAGE, new ReplicationContext(false, replicationInput.getConnectionId(), replicationInput.getSourceId(), replicationInput.getDestinationId(), - Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE))); + Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE, SOURCE_DEFINITION_ID, + DESTINATION_DEFINITION_ID))); final ReplicationWorker worker = getDefaultReplicationWorker(); @@ -1180,6 +1207,35 @@ void testCallHeartbeat() throws WorkerException { verify(replicationWorkerHelper).getWorkloadStatusHeartbeat(any()); } + @Test + void testStreamStatusCompletionTracking() throws Exception { + sourceStub.setMessages(RECORD_MESSAGE1); + + final ReplicationWorker worker = getDefaultReplicationWorker(); + + worker.run(replicationInput, jobRoot); + + verify(streamStatusCompletionTracker).startTracking(any(), any()); + + verify(streamStatusCompletionTracker).finalize(0, mapper); + } + + @Test + void testStreamStatusCompletionTrackingTrackSourceMessage() throws Exception { + + AirbyteMessage streamStatus = AirbyteMessageUtils.createStatusTraceMessage(new StreamDescriptor().withName(STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + sourceStub.setMessages(RECORD_MESSAGE1, streamStatus); + + final ReplicationWorker worker = getDefaultReplicationWorker(); + + worker.run(replicationInput, jobRoot); + + verify(streamStatusCompletionTracker).startTracking(any(), any()); + verify(streamStatusCompletionTracker).track(streamStatus.getTrace().getStreamStatus()); + verify(streamStatusCompletionTracker).finalize(0, mapper); + } + private ReplicationContext simpleContext(final boolean isReset) { return new ReplicationContext( isReset, @@ -1190,7 +1246,9 @@ private ReplicationContext simpleContext(final boolean isReset) { JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, - DESTINATION_IMAGE); + DESTINATION_IMAGE, + SOURCE_DEFINITION_ID, + DESTINATION_DEFINITION_ID); } } diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/BufferedReplicationWorkerPerformanceTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/BufferedReplicationWorkerPerformanceTest.java index 3c48cfab333..4cada03a333 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/BufferedReplicationWorkerPerformanceTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/BufferedReplicationWorkerPerformanceTest.java @@ -11,6 +11,7 @@ import io.airbyte.workers.general.ReplicationWorker; import io.airbyte.workers.general.ReplicationWorkerHelper; import io.airbyte.workers.helper.AirbyteMessageDataExtractor; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteMapper; import io.airbyte.workers.internal.AirbyteSource; @@ -42,10 +43,11 @@ public ReplicationWorker getReplicationWorker(final String jobId, final AirbyteMessageDataExtractor airbyteMessageDataExtractor, final ReplicationAirbyteMessageEventPublishingHelper messageEventPublishingHelper, final ReplicationWorkerHelper replicationWorkerHelper, - final DestinationTimeoutMonitor destinationTimeoutMonitor) { + final DestinationTimeoutMonitor destinationTimeoutMonitor, + final StreamStatusCompletionTracker streamStatusCompletionTracker) { return new BufferedReplicationWorker(jobId, attempt, source, destination, syncPersistence, recordSchemaValidator, srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, destinationTimeoutMonitor, - BufferedReplicationWorkerType.BUFFERED_WITH_LINKED_BLOCKING_QUEUE); + BufferedReplicationWorkerType.BUFFERED_WITH_LINKED_BLOCKING_QUEUE, streamStatusCompletionTracker); } public static void main(final String[] args) throws IOException, InterruptedException { diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/DefaultReplicationWorkerPerformanceTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/DefaultReplicationWorkerPerformanceTest.java index b0bb8eb88a4..a87da89def1 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/DefaultReplicationWorkerPerformanceTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/DefaultReplicationWorkerPerformanceTest.java @@ -10,6 +10,7 @@ import io.airbyte.workers.general.ReplicationWorker; import io.airbyte.workers.general.ReplicationWorkerHelper; import io.airbyte.workers.helper.AirbyteMessageDataExtractor; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteMapper; import io.airbyte.workers.internal.AirbyteSource; @@ -41,9 +42,11 @@ public ReplicationWorker getReplicationWorker(final String jobId, final AirbyteMessageDataExtractor airbyteMessageDataExtractor, final ReplicationAirbyteMessageEventPublishingHelper messageEventPublishingHelper, final ReplicationWorkerHelper replicationWorkerHelper, - final DestinationTimeoutMonitor destinationTimeoutMonitor) { + final DestinationTimeoutMonitor destinationTimeoutMonitor, + final StreamStatusCompletionTracker streamStatusCompletionTracker) { return new DefaultReplicationWorker(jobId, attempt, source, destination, syncPersistence, recordSchemaValidator, - srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, destinationTimeoutMonitor); + srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, destinationTimeoutMonitor, + streamStatusCompletionTracker); } public static void main(final String[] args) throws IOException, InterruptedException { diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/ReplicationWorkerPerformanceTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/ReplicationWorkerPerformanceTest.java index 25c097353ca..96d22ebf07a 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/ReplicationWorkerPerformanceTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/ReplicationWorkerPerformanceTest.java @@ -10,6 +10,9 @@ import static org.mockito.Mockito.when; import io.airbyte.api.client.AirbyteApiClient; +import io.airbyte.api.client.WorkloadApiClient; +import io.airbyte.api.client.generated.DestinationApi; +import io.airbyte.api.client.generated.SourceApi; import io.airbyte.commons.converters.ConnectorConfigUpdater; import io.airbyte.commons.converters.ThreadedTimeTracker; import io.airbyte.commons.features.EnvVariableFeatureFlags; @@ -39,6 +42,7 @@ import io.airbyte.workers.general.ReplicationWorker; import io.airbyte.workers.general.ReplicationWorkerHelper; import io.airbyte.workers.helper.AirbyteMessageDataExtractor; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteMapper; import io.airbyte.workers.internal.AirbyteSource; @@ -90,7 +94,8 @@ public abstract ReplicationWorker getReplicationWorker(final String jobId, final AirbyteMessageDataExtractor airbyteMessageDataExtractor, final ReplicationAirbyteMessageEventPublishingHelper messageEventPublishingHelper, final ReplicationWorkerHelper replicationWorkerHelper, - final DestinationTimeoutMonitor destinationTimeoutMonitor); + final DestinationTimeoutMonitor destinationTimeoutMonitor, + final StreamStatusCompletionTracker streamStatusCompletionTracker); /** * Hook up the DefaultReplicationWorker to a test harness with an insanely quick Source @@ -143,7 +148,7 @@ public void executeOneSync() throws InterruptedException { catalogMigrator.initialize(); final var migratorFactory = new AirbyteProtocolVersionedMigratorFactory(msgMigrator, catalogMigrator); - final var versionFac = VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory(false); + final var versionFac = VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory(); final HeartbeatMonitor heartbeatMonitor = new HeartbeatMonitor(DEFAULT_HEARTBEAT_FRESHNESS_THRESHOLD); final var versionedAbSource = new DefaultAirbyteSource(integrationLauncher, versionFac, heartbeatMonitor, migratorFactory.getProtocolSerializer(new Version("0.2.0")), @@ -177,12 +182,15 @@ public void executeOneSync() throws InterruptedException { final boolean fieldSelectionEnabled = false; final FieldSelector fieldSelector = new FieldSelector(validator, metricReporter, fieldSelectionEnabled, false); + final WorkloadApiClient workloadApiClient = mock(WorkloadApiClient.class); + when(workloadApiClient.getWorkloadApi()).thenReturn(mock(WorkloadApi.class)); final ReplicationWorkerHelper replicationWorkerHelper = new ReplicationWorkerHelper(airbyteMessageDataExtractor, fieldSelector, dstNamespaceMapper, messageTracker, syncPersistence, - replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), () -> {}, mock(WorkloadApi.class), false, + replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), () -> {}, workloadApiClient, false, analyticsMessageTracker, - Optional.empty()); + Optional.empty(), mock(SourceApi.class), mock(DestinationApi.class), mock(StreamStatusCompletionTracker.class)); + final StreamStatusCompletionTracker streamStatusCompletionTracker = mock(StreamStatusCompletionTracker.class); final var worker = getReplicationWorker("1", 0, versionedAbSource, @@ -197,7 +205,8 @@ replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), () -> airbyteMessageDataExtractor, replicationAirbyteMessageEventPublishingHelper, replicationWorkerHelper, - destinationTimeoutMonitor); + destinationTimeoutMonitor, + streamStatusCompletionTracker); final AtomicReference output = new AtomicReference<>(); final Thread workerThread = new Thread(() -> { try { diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/NamespacingMapperTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/NamespacingMapperTest.java index 925d9baadca..b637f6f0ad1 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/NamespacingMapperTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/NamespacingMapperTest.java @@ -13,10 +13,12 @@ import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamStatusTraceMessage; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; +import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.workers.test_utils.AirbyteMessageUtils; import java.util.Map; import org.junit.jupiter.api.BeforeEach; @@ -38,6 +40,9 @@ class NamespacingMapperTest { Field.of(FIELD_NAME, JsonSchemaType.STRING)); private AirbyteMessage recordMessage; private AirbyteMessage stateMessage; + private final AirbyteMessage streamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(STREAM_NAME).withNamespace(INPUT_NAMESPACE), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); private Map destinationToSourceNamespaceAndStreamName; private static AirbyteMessage createRecordMessage() { @@ -84,6 +89,15 @@ void testSourceNamespace() { final AirbyteMessage actualMessage = mapper.mapMessage(recordMessage); assertEquals(expectedMessage, actualMessage); + + final AirbyteMessage expectedStreamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(OUTPUT_PREFIX + STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + expectedStreamStatusMessage.getTrace().getStreamStatus().getStreamDescriptor().withNamespace(INPUT_NAMESPACE); + + final AirbyteMessage actualStreamStatusMessage = mapper.mapMessage(streamStatusMessage); + + assertEquals(expectedStreamStatusMessage, actualStreamStatusMessage); } @Test @@ -106,10 +120,19 @@ void testEmptySourceNamespace() { assertEquals(originalMessage, recordMessage); originalMessage.getRecord().withNamespace(null); + final AirbyteMessage originalStreamStatusMessage = Jsons.clone(streamStatusMessage); + assertEquals(originalStreamStatusMessage, streamStatusMessage); + originalStreamStatusMessage.getTrace().getStreamStatus().getStreamDescriptor().withNamespace(null); + final AirbyteMessage expectedMessage = AirbyteMessageUtils.createRecordMessage(OUTPUT_PREFIX + STREAM_NAME, FIELD_NAME, BLUE); expectedMessage.getRecord().withNamespace(null); final AirbyteMessage actualMessage = mapper.mapMessage(originalMessage); + final AirbyteMessage expectedStreamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(OUTPUT_PREFIX + STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + final AirbyteMessage actualStreamStatusMessage = mapper.mapMessage(originalStreamStatusMessage); + assertEquals(expectedMessage, actualMessage); } @@ -134,6 +157,12 @@ void testDestinationNamespace() { final AirbyteMessage expectedMessage = AirbyteMessageUtils.createRecordMessage(OUTPUT_PREFIX + STREAM_NAME, FIELD_NAME, BLUE); final AirbyteMessage actualMessage = mapper.mapMessage(recordMessage); assertEquals(expectedMessage, actualMessage); + + final AirbyteMessage expectedStreamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(OUTPUT_PREFIX + STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + final AirbyteMessage actualStreamStatusMessage = mapper.mapMessage(streamStatusMessage); + assertEquals(expectedStreamStatusMessage, actualStreamStatusMessage); } @Test @@ -162,6 +191,14 @@ void testCustomFormatWithVariableNamespace() { final AirbyteMessage actualMessage = mapper.mapMessage(recordMessage); assertEquals(expectedMessage, actualMessage); + + final AirbyteMessage expectedStreamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(OUTPUT_PREFIX + STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + expectedStreamStatusMessage.getTrace().getStreamStatus().getStreamDescriptor().withNamespace(expectedNamespace); + final AirbyteMessage actualStreamStatusMessage = mapper.mapMessage(streamStatusMessage); + + assertEquals(expectedStreamStatusMessage, actualStreamStatusMessage); } @Test @@ -190,6 +227,14 @@ void testCustomFormatWithoutVariableNamespace() { final AirbyteMessage actualMessage = mapper.mapMessage(recordMessage); assertEquals(expectedMessage, actualMessage); + + final AirbyteMessage expectedStreamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(OUTPUT_PREFIX + STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + expectedStreamStatusMessage.getTrace().getStreamStatus().getStreamDescriptor().withNamespace(expectedNamespace); + final AirbyteMessage actualStreamStatusMessage = mapper.mapMessage(streamStatusMessage); + + assertEquals(expectedStreamStatusMessage, actualStreamStatusMessage); } @Test @@ -220,6 +265,18 @@ void testEmptyCustomFormatWithVariableNamespace() { final AirbyteMessage actualMessage = mapper.mapMessage(originalMessage); assertEquals(expectedMessage, actualMessage); + + final AirbyteMessage originalStreamStatusMessage = Jsons.clone(streamStatusMessage); + assertEquals(originalStreamStatusMessage, streamStatusMessage); + originalStreamStatusMessage.getTrace().getStreamStatus().getStreamDescriptor().withNamespace(null); + + final AirbyteMessage expectedStreamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(OUTPUT_PREFIX + STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + expectedStreamStatusMessage.getTrace().getStreamStatus().getStreamDescriptor().withNamespace(null); + final AirbyteMessage actualStreamStatusMessage = mapper.mapMessage(originalStreamStatusMessage); + + assertEquals(expectedStreamStatusMessage, actualStreamStatusMessage); } @Test diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java index d6c36e5b35f..29b3e40f38f 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java @@ -5,7 +5,6 @@ package io.airbyte.workers.internal; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.mock; @@ -163,23 +162,6 @@ void testFailValidation() { verify(logger, atLeastOnce()).error(anyString(), anyString()); } - @Test - void testFailsSize() { - final AirbyteMessage record1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "green"); - - final InputStream inputStream = new ByteArrayInputStream(record1.toString().getBytes(StandardCharsets.UTF_8)); - final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); - - final Stream messageStream = - VersionedAirbyteStreamFactory - .noMigrationVersionedAirbyteStreamFactory(logger, new Builder(), Optional.of(RuntimeException.class), 1L, - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), gsonPksExtractor) - .create(bufferedReader); - - verifyStreamHeader(); - assertThrows(RuntimeException.class, () -> messageStream.toList()); - } - @ParameterizedTest @ValueSource(strings = { // Missing closing bracket. @@ -196,10 +178,12 @@ void testMalformedRecordShouldOnlyDebugLog(final String invalidRecord) { verify(logger).debug(invalidRecord); } - private VersionedAirbyteStreamFactory getFactory(final boolean failTooLongMessage) { + private VersionedAirbyteStreamFactory getFactory() { return VersionedAirbyteStreamFactory - .noMigrationVersionedAirbyteStreamFactory(logger, new Builder(), Optional.of(RuntimeException.class), 100000L, - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(failTooLongMessage, false), + .noMigrationVersionedAirbyteStreamFactory( + logger, + new Builder(), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), gsonPksExtractor); } @@ -211,13 +195,13 @@ private VersionedAirbyteStreamFactory getFactory(final boolean failTooLongMessag @Test void testToAirbyteMessageValid() { final String messageLine = String.format(VALID_MESSAGE_TEMPLATE, "hello"); - Assertions.assertThat(getFactory(false).toAirbyteMessage(messageLine)).hasSize(1); + Assertions.assertThat(getFactory().toAirbyteMessage(messageLine)).hasSize(1); } @Test void testToAirbyteMessageRandomLog() { final String randomLog = "I should not be send on the same channel than the airbyte messages"; - Assertions.assertThat(getFactory(false).toAirbyteMessage(randomLog)) + Assertions.assertThat(getFactory().toAirbyteMessage(randomLog)) .isEmpty(); verify(logger).info(randomLog); } @@ -225,7 +209,7 @@ void testToAirbyteMessageRandomLog() { @Test void testToAirbyteMessageMixedUpRecordShouldOnlyDebugLog() { final String messageLine = "It shouldn't be here" + String.format(VALID_MESSAGE_TEMPLATE, "hello"); - getFactory(false).toAirbyteMessage(messageLine); + getFactory().toAirbyteMessage(messageLine); verifyBlankedRecordRecordWarning(); verify(logger).debug(messageLine); } @@ -233,7 +217,7 @@ void testToAirbyteMessageMixedUpRecordShouldOnlyDebugLog() { @Test void testToAirbyteMessageMixedUpRecordFailureDisable() { final String messageLine = "It shouldn't be here" + String.format(VALID_MESSAGE_TEMPLATE, "hello"); - Assertions.assertThat(getFactory(false).toAirbyteMessage(messageLine)).isEmpty(); + Assertions.assertThat(getFactory().toAirbyteMessage(messageLine)).isEmpty(); verifyBlankedRecordRecordWarning(); verify(logger).debug(messageLine); } @@ -246,7 +230,7 @@ void testToAirbyteMessageVeryLongMessageDontFail() { longStringBuilder.append("a"); } final String messageLine = String.format(VALID_MESSAGE_TEMPLATE, longStringBuilder); - Assertions.assertThat(getFactory(false).toAirbyteMessage(messageLine)).isNotEmpty(); + Assertions.assertThat(getFactory().toAirbyteMessage(messageLine)).isNotEmpty(); } private Stream stringToMessageStream(final String inputString) { @@ -254,8 +238,10 @@ private Stream stringToMessageStream(final String inputString) { final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); final var stream = VersionedAirbyteStreamFactory - .noMigrationVersionedAirbyteStreamFactory(logger, new Builder(), Optional.of(RuntimeException.class), 100000L, - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), + .noMigrationVersionedAirbyteStreamFactory( + logger, + new Builder(), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), gsonPksExtractor) .create(bufferedReader); verifyStreamHeader(); @@ -298,8 +284,8 @@ void beforeEach() { void testCreate() { final Version initialVersion = new Version("0.1.2"); final VersionedAirbyteStreamFactory streamFactory = - new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), Optional.empty(), - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), + new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), gsonPksExtractor); final BufferedReader bufferedReader = new BufferedReader(new StringReader("")); @@ -312,8 +298,8 @@ void testCreate() { void testCreateWithVersionDetection() { final Version initialVersion = new Version("0.0.0"); final VersionedAirbyteStreamFactory streamFactory = - new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), Optional.empty(), - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), + new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), gsonPksExtractor) .withDetectVersion(true); @@ -329,8 +315,8 @@ void testCreateWithVersionDetection() { void testCreateWithVersionDetectionFallback() { final Version initialVersion = new Version("0.0.6"); final VersionedAirbyteStreamFactory streamFactory = - new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), Optional.empty(), - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), + new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), gsonPksExtractor) .withDetectVersion(true); @@ -346,8 +332,8 @@ void testCreateWithVersionDetectionFallback() { void testCreateWithVersionDetectionWithoutSpecMessage() { final Version initialVersion = new Version("0.0.1"); final VersionedAirbyteStreamFactory streamFactory = - new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), Optional.empty(), - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), + new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), gsonPksExtractor) .withDetectVersion(true); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/StreamStatusTrackerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/StreamStatusTrackerTest.java index 8b967f02493..c6fd2e84bd7 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/StreamStatusTrackerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/StreamStatusTrackerTest.java @@ -64,6 +64,8 @@ class StreamStatusTrackerTest { private static final UUID SOURCE_ID = UUID.randomUUID(); private static final UUID STREAM_ID = UUID.randomUUID(); private static final UUID WORKSPACE_ID = UUID.randomUUID(); + private static final UUID SOURCE_DEFINITION_ID = UUID.randomUUID(); + private static final UUID DESTINATION_DEFINITION_ID = UUID.randomUUID(); private static final Duration TIMESTAMP = Duration.of(12345L, ChronoUnit.MILLIS); private AirbyteApiClient airbyteApiClient; @@ -101,8 +103,7 @@ void testCurrentStatusNoStatus() { void testTrackingStartedStatus(final boolean isReset) throws ApiException { final AirbyteMessageOrigin airbyteMessageOrigin = AirbyteMessageOrigin.SOURCE; final AirbyteMessage airbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final ReplicationAirbyteMessageEvent event = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, airbyteMessage, replicationContext); final StreamStatusCreateRequestBody expected = new StreamStatusCreateRequestBody() .streamName(streamDescriptor.getName()) @@ -115,7 +116,7 @@ void testTrackingStartedStatus(final boolean isReset) throws ApiException { .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); final StreamStatusRead streamStatusRead = new StreamStatusRead() .attemptNumber(ATTEMPT) .connectionId(CONNECTION_ID) @@ -143,8 +144,7 @@ void testTrackingRunningStatus() throws ApiException { final AirbyteMessageOrigin airbyteMessageOrigin = AirbyteMessageOrigin.SOURCE; final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent runningEvent = @@ -161,7 +161,7 @@ void testTrackingRunningStatus() throws ApiException { .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -180,8 +180,7 @@ void testTrackingCompleteSourceOnly() throws ApiException { final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage sourceCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent runningEvent = @@ -189,7 +188,7 @@ void testTrackingCompleteSourceOnly() throws ApiException { final ReplicationAirbyteMessageEvent sourceEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, sourceCompleteAirbyteMessage, replicationContext); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -208,8 +207,7 @@ void testTrackingCompleteDestinationOnly() throws ApiException { final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent runningEvent = @@ -217,7 +215,7 @@ void testTrackingCompleteDestinationOnly() throws ApiException { final ReplicationAirbyteMessageEvent destinationEvent = new ReplicationAirbyteMessageEvent(AirbyteMessageOrigin.DESTINATION, destinationCompleteAirbyteMessage, replicationContext); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -238,8 +236,7 @@ void testTrackingCompleteSourceAndCompleteDestination(final boolean isReset) thr final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); final AirbyteMessage sourceCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent runningEvent = @@ -260,7 +257,7 @@ void testTrackingCompleteSourceAndCompleteDestination(final boolean isReset) thr .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -286,8 +283,7 @@ void testTrackingCompleteDestinationAndCompleteSource(final boolean isReset) thr final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); final AirbyteMessage sourceCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent runningEvent = @@ -308,7 +304,7 @@ void testTrackingCompleteDestinationAndCompleteSource(final boolean isReset) thr .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -338,8 +334,7 @@ void testTrackingIncompleteSourceOnly(final boolean isReset) throws ApiException final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage sourceIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -360,7 +355,7 @@ void testTrackingIncompleteSourceOnly(final boolean isReset) throws ApiException .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -383,8 +378,7 @@ void testTrackingIncompleteDestinationOnly(final boolean isReset) throws ApiExce final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -406,7 +400,7 @@ void testTrackingIncompleteDestinationOnly(final boolean isReset) throws ApiExce .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -430,8 +424,7 @@ void testTrackingIncompleteSourceAndIncompleteDestination(final boolean isReset) final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); final AirbyteMessage sourceIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -455,7 +448,7 @@ void testTrackingIncompleteSourceAndIncompleteDestination(final boolean isReset) .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -481,8 +474,7 @@ void testTrackingIncompleteDestinationAndIncompleteSource(final boolean isReset) final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); final AirbyteMessage sourceIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -506,7 +498,7 @@ void testTrackingIncompleteDestinationAndIncompleteSource(final boolean isReset) .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -532,8 +524,7 @@ void testTrackingIncompleteSourceAndCompleteDestination(final boolean isReset) t final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); final AirbyteMessage sourceIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -556,7 +547,7 @@ void testTrackingIncompleteSourceAndCompleteDestination(final boolean isReset) t .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -582,8 +573,7 @@ void testTrackingCompleteDestinationAndIncompleteSource(final boolean isReset) t final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); final AirbyteMessage sourceIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -606,7 +596,7 @@ void testTrackingCompleteDestinationAndIncompleteSource(final boolean isReset) t .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -632,8 +622,7 @@ void testTrackingCompleteSourceAndIncompleteDestination(final boolean isReset) t final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); final AirbyteMessage sourceCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -657,7 +646,7 @@ void testTrackingCompleteSourceAndIncompleteDestination(final boolean isReset) t .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -683,8 +672,7 @@ void testTrackingIncompleteDestinationAndCompleteSource(final boolean isReset) t final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); final AirbyteMessage sourceCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -708,7 +696,7 @@ void testTrackingIncompleteDestinationAndCompleteSource(final boolean isReset) t .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -733,8 +721,7 @@ void testTrackingInternalIncomplete(final boolean isReset) throws ApiException { final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage sourceIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -755,7 +742,7 @@ void testTrackingInternalIncomplete(final boolean isReset) throws ApiException { .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -776,8 +763,7 @@ void testTrackingInternalIncomplete(final boolean isReset) throws ApiException { void testTrackingOutOfOrderStartedStatus() throws ApiException { final AirbyteMessageOrigin airbyteMessageOrigin = AirbyteMessageOrigin.SOURCE; final AirbyteMessage airbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent event = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, airbyteMessage, replicationContext); final StreamStatusCreateRequestBody expected = new StreamStatusCreateRequestBody() .streamName(streamDescriptor.getName()) @@ -790,7 +776,7 @@ void testTrackingOutOfOrderStartedStatus() throws ApiException { .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -807,8 +793,7 @@ void testTrackingOutOfOrderRunningStatus() throws ApiException { final AirbyteMessageOrigin airbyteMessageOrigin = AirbyteMessageOrigin.SOURCE; final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent runningEvent = @@ -825,7 +810,7 @@ void testTrackingOutOfOrderRunningStatus() throws ApiException { .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -846,14 +831,13 @@ void testTrackingOutOfOrderCompleteStatus() throws ApiException { final AirbyteMessageOrigin airbyteMessageOrigin = AirbyteMessageOrigin.SOURCE; final AirbyteMessage destinationStoppedAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); final AirbyteMessage sourceStoppedAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent destinationEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, destinationStoppedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent sourceEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, sourceStoppedAirbyteMessage, replicationContext); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); streamStatusTracker.track(sourceEvent); streamStatusTracker.track(destinationEvent); @@ -868,14 +852,13 @@ void testTrackingOutOfOrderIncompleteStatus() throws ApiException { final AirbyteMessageOrigin airbyteMessageOrigin = AirbyteMessageOrigin.SOURCE; final AirbyteMessage destinationStoppedAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); final AirbyteMessage sourceStoppedAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent destinationEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, destinationStoppedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent sourceEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, sourceStoppedAirbyteMessage, replicationContext); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); streamStatusTracker.track(sourceEvent); streamStatusTracker.track(destinationEvent); @@ -888,8 +871,7 @@ void testTrackingOutOfOrderIncompleteStatus() throws ApiException { @ParameterizedTest @ValueSource(booleans = {true, false}) void testForceCompletionRunning(final boolean isReset) throws ApiException { - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); @@ -913,7 +895,7 @@ void testForceCompletionRunning(final boolean isReset) throws ApiException { .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -929,8 +911,7 @@ void testForceCompletionRunning(final boolean isReset) throws ApiException { @ParameterizedTest @ValueSource(booleans = {true, false}) void testForceCompletionPartiallyComplete(final boolean isReset) throws ApiException { - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); @@ -957,7 +938,7 @@ void testForceCompletionPartiallyComplete(final boolean isReset) throws ApiExcep .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -977,8 +958,7 @@ void testForceCompletionPartiallyComplete(final boolean isReset) throws ApiExcep @ParameterizedTest @ValueSource(booleans = {true, false}) void testForceCompletionAlreadyIncomplete(final boolean isReset) throws ApiException { - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); @@ -1010,7 +990,7 @@ void testForceCompletionAlreadyIncomplete(final boolean isReset) throws ApiExcep .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -1028,8 +1008,7 @@ void testForceCompletionAlreadyIncomplete(final boolean isReset) throws ApiExcep @ParameterizedTest @ValueSource(booleans = {true, false}) void testForceCompletionAlreadyComplete(final boolean isReset) throws ApiException { - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); @@ -1059,7 +1038,7 @@ void testForceCompletionAlreadyComplete(final boolean isReset) throws ApiExcepti .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -1080,11 +1059,10 @@ void testForceCompletionDifferentConnectionId(final boolean isReset) throws ApiE final Integer attempt = 2; final Long jobId = 2L; final UUID connectionId = UUID.randomUUID(); - final ReplicationContext replicationContext1 = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext1 = getDefaultContext(isReset); final ReplicationContext replicationContext2 = new ReplicationContext(isReset, connectionId, UUID.randomUUID(), UUID.randomUUID(), jobId, attempt, WORKSPACE_ID, SOURCE_IMAGE, - DESTINATION_IMAGE); + DESTINATION_IMAGE, SOURCE_DEFINITION_ID, DESTINATION_DEFINITION_ID); final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); @@ -1108,7 +1086,8 @@ void testForceCompletionDifferentConnectionId(final boolean isReset) throws ApiE .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext1.workspaceId(), replicationContext1.connectionId(), replicationContext1.jobId(), replicationContext1.attempt()); + replicationContext1.getWorkspaceId(), replicationContext1.getConnectionId(), replicationContext1.getJobId(), + replicationContext1.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -1124,8 +1103,7 @@ void testForceCompletionDifferentConnectionId(final boolean isReset) throws ApiE @ParameterizedTest @ValueSource(booleans = {true, false}) void testForceCompletionHandleException(final boolean isReset) throws ApiException { - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage forceCompletionMessage = createAirbyteMessage(new StreamDescriptor(), COMPLETE, TIMESTAMP); @@ -1146,7 +1124,7 @@ void testForceCompletionHandleException(final boolean isReset) throws ApiExcepti .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead()); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -1167,4 +1145,18 @@ private AirbyteMessage createAirbyteMessage(final StreamDescriptor streamDescrip return new AirbyteMessage().withType(Type.TRACE).withTrace(traceMessage); } + private ReplicationContext getDefaultContext(boolean isReset) { + return new ReplicationContext(isReset, + CONNECTION_ID, + DESTINATION_ID, + SOURCE_ID, + JOB_ID, + ATTEMPT, + WORKSPACE_ID, + SOURCE_IMAGE, + DESTINATION_IMAGE, + SOURCE_DEFINITION_ID, + DESTINATION_DEFINITION_ID); + } + } diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/events/AirbyteControlMessageEventListenerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/events/AirbyteControlMessageEventListenerTest.java index 466b822f140..80b5c27c8ac 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/events/AirbyteControlMessageEventListenerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/events/AirbyteControlMessageEventListenerTest.java @@ -53,7 +53,7 @@ void testDestinationControlMessage() { when(airbyteControlMessage.getType()).thenReturn(AirbyteControlMessage.Type.CONNECTOR_CONFIG); when(airbyteMessage.getType()).thenReturn(Type.CONTROL); when(airbyteMessage.getControl()).thenReturn(airbyteControlMessage); - when(ReplicationContext.destinationId()).thenReturn(destinationId); + when(ReplicationContext.getDestinationId()).thenReturn(destinationId); final ReplicationAirbyteMessageEvent replicationAirbyteMessageEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, airbyteMessage, ReplicationContext); @@ -77,7 +77,7 @@ void testSourceControlMessage() { when(airbyteControlMessage.getType()).thenReturn(AirbyteControlMessage.Type.CONNECTOR_CONFIG); when(airbyteMessage.getType()).thenReturn(Type.CONTROL); when(airbyteMessage.getControl()).thenReturn(airbyteControlMessage); - when(ReplicationContext.sourceId()).thenReturn(sourceId); + when(ReplicationContext.getSourceId()).thenReturn(sourceId); final ReplicationAirbyteMessageEvent replicationAirbyteMessageEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, airbyteMessage, ReplicationContext); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/sync/WorkloadApiWorkerTest.kt b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/sync/WorkloadApiWorkerTest.kt index 5c9472b62c8..18a77898dcb 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/sync/WorkloadApiWorkerTest.kt +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/sync/WorkloadApiWorkerTest.kt @@ -1,6 +1,7 @@ package io.airbyte.workers.internal.sync import io.airbyte.api.client.AirbyteApiClient +import io.airbyte.api.client.WorkloadApiClient import io.airbyte.api.client.generated.ConnectionApi import io.airbyte.api.client.model.generated.ConnectionRead import io.airbyte.api.client.model.generated.Geography @@ -40,6 +41,7 @@ internal class WorkloadApiWorkerTest { private var apiClient: AirbyteApiClient = mockk() private var connectionApi: ConnectionApi = mockk() private var workloadApi: WorkloadApi = mockk() + private var workloadApiClient: WorkloadApiClient = mockk() private var featureFlagClient: FeatureFlagClient = mockk() private var jobOutputDocStore: JobOutputDocStore = mockk() private lateinit var replicationActivityInput: ReplicationActivityInput @@ -50,6 +52,7 @@ internal class WorkloadApiWorkerTest { @BeforeEach fun beforeEach() { every { apiClient.connectionApi } returns connectionApi + every { workloadApiClient.workloadApi } returns workloadApi featureFlagClient = TestClient() jobRoot = Path.of("test", "path") replicationActivityInput = ReplicationActivityInput() @@ -58,7 +61,7 @@ internal class WorkloadApiWorkerTest { WorkloadApiWorker( jobOutputDocStore, apiClient, - workloadApi, + workloadApiClient, workloadIdGenerator, replicationActivityInput, featureFlagClient, @@ -70,7 +73,6 @@ internal class WorkloadApiWorkerTest { val jobId = 13L val attemptNumber = 37 val workloadId = "my-workload" - val expectedDocPrefix = "testNs/orchestrator-repl-job-$jobId-attempt-$attemptNumber" val expectedOutput = ReplicationOutput() .withReplicationAttemptSummary(ReplicationAttemptSummary().withStatus(StandardSyncSummary.ReplicationStatus.COMPLETED)) @@ -93,7 +95,6 @@ internal class WorkloadApiWorkerTest { val jobId = 13L val attemptNumber = 37 val workloadId = "my-workload" - val expectedDocPrefix = "testNs/orchestrator-repl-job-$jobId-attempt-$attemptNumber" val expectedOutput = ReplicationOutput() .withReplicationAttemptSummary(ReplicationAttemptSummary().withStatus(StandardSyncSummary.ReplicationStatus.COMPLETED)) @@ -117,7 +118,6 @@ internal class WorkloadApiWorkerTest { val jobId = 313L val attemptNumber = 37 val workloadId = "my-workload" - val expectedDocPrefix = "testNs/orchestrator-repl-job-$jobId-attempt-$attemptNumber" val expectedOutput = ReplicationOutput() .withReplicationAttemptSummary(ReplicationAttemptSummary().withStatus(StandardSyncSummary.ReplicationStatus.COMPLETED)) @@ -237,7 +237,7 @@ internal class WorkloadApiWorkerTest { assertThrows { workloadApiWorker.run(replicationInput, jobRoot) } } - fun initializeReplicationInput( + private fun initializeReplicationInput( jobId: Long, attemptNumber: Int, ) { @@ -262,7 +262,7 @@ internal class WorkloadApiWorkerTest { } } - fun mockWorkload( + private fun mockWorkload( status: WorkloadStatus, terminationSource: String? = null, terminationReason: String? = null, diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/syncpersistence/SyncPersistenceImplTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/syncpersistence/SyncPersistenceImplTest.java index 670e8ea10f3..05b0a9e6131 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/syncpersistence/SyncPersistenceImplTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/syncpersistence/SyncPersistenceImplTest.java @@ -24,7 +24,6 @@ import io.airbyte.api.client.generated.AttemptApi; import io.airbyte.api.client.generated.StateApi; import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; import io.airbyte.api.client.model.generated.ConnectionState; import io.airbyte.api.client.model.generated.ConnectionStateCreateOrUpdate; import io.airbyte.api.client.model.generated.ConnectionStateType; @@ -34,12 +33,9 @@ import io.airbyte.protocol.models.AirbyteGlobalState; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStream; import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; import io.airbyte.protocol.models.StreamDescriptor; -import io.airbyte.protocol.models.SyncMode; import io.airbyte.workers.internal.bookkeeping.SyncStatsTracker; import io.airbyte.workers.internal.stateaggregator.StateAggregatorFactory; import java.util.List; @@ -107,7 +103,6 @@ void afterEach() throws Exception { void testPersistHappyPath() throws ApiException { final AirbyteStateMessage stateA1 = getStreamState("A", 1); syncPersistence.persist(connectionId, stateA1); - verify(stateApi).getState(any()); verify(executorService).scheduleAtFixedRate(any(Runnable.class), eq(0L), eq(flushPeriod), eq(TimeUnit.SECONDS)); clearInvocations(executorService, stateApi); @@ -395,69 +390,6 @@ void testLegacyStatesAreGettingIntoTheScheduledFlushLogic() throws Exception { assertTrue(Jsons.serialize(captor.getValue()).contains("myOtherState2")); } - @Test - void testLegacyStateMigrationToStreamAreOnlyFlushedAtTheEnd() throws Exception { - // Migration is defined by current state returned from the API is LEGACY, and we are trying to - // persist a non LEGACY state - when(stateApi.getState(new ConnectionIdRequestBody().connectionId(connectionId))) - .thenReturn(new ConnectionState().state(Jsons.deserialize("{\"state\":\"some_state\"}")).stateType(ConnectionStateType.LEGACY)); - - final AirbyteStateMessage message = getStreamState("migration1", 12); - syncPersistence.persist(connectionId, message); - verify(stateApi).getState(new ConnectionIdRequestBody().connectionId(connectionId)); - verify(executorService, never()).scheduleAtFixedRate(any(), anyLong(), anyLong(), any()); - - reset(stateApi); - - // Since we're delaying the flush, executorService should not have been called - // We also want to make sure we are not calling getState every time - final AirbyteStateMessage otherMessage = getStreamState("migration2", 10); - syncPersistence.persist(connectionId, otherMessage); - verify(stateApi, never()).getState(new ConnectionIdRequestBody().connectionId(connectionId)); - verify(executorService, never()).scheduleAtFixedRate(any(), anyLong(), anyLong(), any()); - - when(executorService.awaitTermination(anyLong(), any())).thenReturn(true); - when(catalog.getStreams()).thenReturn(List.of( - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("migration1")).withSyncMode(SyncMode.INCREMENTAL), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("migration2")).withSyncMode(SyncMode.INCREMENTAL))); - syncPersistence.close(); - verifyStateUpdateApiCall(List.of(message, otherMessage)); - } - - @Test - void testLegacyStateMigrationToGlobalGettingIntoTheScheduledFlushLogic() throws ApiException, InterruptedException { - // Migration is defined by current state returned from the API is LEGACY, and we are trying to - // persist a non LEGACY state - when(stateApi.getState(new ConnectionIdRequestBody().connectionId(connectionId))) - .thenReturn(new ConnectionState().state(Jsons.deserialize("{\"state\":\"some_state\"}")).stateType(ConnectionStateType.LEGACY)); - - final AirbyteStateMessage message = getGlobalState(14); - syncPersistence.persist(connectionId, message); - verify(stateApi).getState(new ConnectionIdRequestBody().connectionId(connectionId)); - verify(executorService).scheduleAtFixedRate(any(), anyLong(), anyLong(), any()); - } - - @Test - void testDoNotStartThreadUntilStateCheckSucceeds() throws ApiException { - when(stateApi.getState(any())) - .thenThrow(new ApiException()) - .thenReturn(null); - - final AirbyteStateMessage s1 = getStreamState("stream 1", 9); - syncPersistence.persist(connectionId, s1); - // First getState failed, we should not have started the thread or persisted states - verify(executorService, never()).scheduleAtFixedRate(any(), anyLong(), anyLong(), any()); - verify(stateApi, never()).createOrUpdateState(any()); - - final AirbyteStateMessage s2 = getStreamState("stream 2", 19); - syncPersistence.persist(connectionId, s2); - verify(executorService).scheduleAtFixedRate(any(), anyLong(), anyLong(), any()); - - // Since the first state check failed, we should be flushing both states on the first flush - actualFlushMethod.getValue().run(); - verifyStateUpdateApiCall(List.of(s1, s2)); - } - @Test void testSyncStatsTrackerWrapping() { syncPersistence.updateStats(new AirbyteRecordMessage()); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/utils/ConfigReplacerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/utils/ConfigReplacerTest.java index 9aedbe46620..ad15bb7d262 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/utils/ConfigReplacerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/utils/ConfigReplacerTest.java @@ -30,10 +30,11 @@ class ConfigReplacerTest { @SuppressWarnings("PMD.AvoidUsingHardCodedIP") void getAllowedHostsGeneralTest() throws IOException { final AllowedHosts allowedHosts = new AllowedHosts(); - final List hosts = new ArrayList(); + final List hosts = new ArrayList<>(); hosts.add("localhost"); hosts.add("static-site.com"); hosts.add("${host}"); + hosts.add("${host_with_extras}"); hosts.add("${number}"); hosts.add("${subdomain}.vendor.com"); hosts.add("${tunnel_method.tunnel_host}"); @@ -43,6 +44,7 @@ void getAllowedHostsGeneralTest() throws IOException { expected.add("localhost"); expected.add("static-site.com"); expected.add("foo.com"); + expected.add("protected-site.com"); expected.add("123"); expected.add("account.vendor.com"); expected.add("1.2.3.4"); @@ -50,6 +52,7 @@ void getAllowedHostsGeneralTest() throws IOException { final String configJson = "{\"host\": \"foo.com\", " + + "\"host_with_extras\": \"ftp://user:password@protected-site.com/some-route\", " + "\"number\": 123, " + "\"subdomain\": \"account\", " + "\"password\": \"abc123\", " @@ -57,6 +60,7 @@ void getAllowedHostsGeneralTest() throws IOException { final JsonNode config = mapper.readValue(configJson, JsonNode.class); final AllowedHosts response = replacer.getAllowedHosts(allowedHosts, config); + System.out.println(response.getHosts()); assertThat(response.getHosts()).isEqualTo(expected); } @@ -115,4 +119,14 @@ void alwaysAllowedHostsListIsImmutable() { } } + @Test + void sanitization() { + assertThat(replacer.sanitize("basic.com")).isEqualTo("basic.com"); + assertThat(replacer.sanitize("http://basic.com")).isEqualTo("basic.com"); + assertThat(replacer.sanitize("http://user@basic.com")).isEqualTo("basic.com"); + assertThat(replacer.sanitize("http://user:password@basic.com")).isEqualTo("basic.com"); + assertThat(replacer.sanitize("http://user:password@basic.com/some/path")).isEqualTo("basic.com"); + assertThat(replacer.sanitize("mongo+srv://user:password@basic.com/some/path")).isEqualTo("basic.com"); + } + } diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTrackerTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTrackerTest.kt new file mode 100644 index 00000000000..bce1d2972b6 --- /dev/null +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTrackerTest.kt @@ -0,0 +1,168 @@ +package io.airbyte.workers.helper + +import io.airbyte.featureflag.ActivateRefreshes +import io.airbyte.featureflag.Connection +import io.airbyte.featureflag.DestinationDefinition +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.Multi +import io.airbyte.featureflag.SourceDefinition +import io.airbyte.featureflag.Workspace +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.protocol.models.AirbyteStream +import io.airbyte.protocol.models.AirbyteStreamStatusTraceMessage +import io.airbyte.protocol.models.AirbyteTraceMessage +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.ConfiguredAirbyteStream +import io.airbyte.protocol.models.StreamDescriptor +import io.airbyte.workers.context.ReplicationContext +import io.airbyte.workers.internal.AirbyteMapper +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.time.Clock +import java.util.UUID + +internal class StreamStatusCompletionTrackerTest { + private val featureFlagClient: FeatureFlagClient = mockk() + private val clock: Clock = mockk() + private val mapper: AirbyteMapper = mockk() + + private val streamStatusCompletionTracker = StreamStatusCompletionTracker(featureFlagClient, clock) + + private val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + listOf( + ConfiguredAirbyteStream().withStream(AirbyteStream().withName("name1")), + ConfiguredAirbyteStream().withStream(AirbyteStream().withName("name2").withNamespace("namespace2")), + ), + ) + + private val connectionId = UUID.randomUUID() + private val workspaceId = UUID.randomUUID() + private val sourceDefinitionId = UUID.randomUUID() + private val destinationDefinitionId = UUID.randomUUID() + private val featureFlagContext = + Multi( + listOf( + Workspace(workspaceId), + Connection(connectionId), + SourceDefinition(sourceDefinitionId), + DestinationDefinition(destinationDefinitionId), + ), + ) + private val replicationContext = + ReplicationContext( + false, + connectionId, + UUID.randomUUID(), + UUID.randomUUID(), + 0, + 0, + workspaceId, + "", + "", + sourceDefinitionId, + destinationDefinitionId, + ) + + @BeforeEach + fun init() { + every { clock.millis() } returns 1 + every { mapper.mapMessage(any()) } returnsArgument 0 + } + + @Test + fun `test that we get all the streams if the exit code is 0 and no stream status is send`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, featureFlagContext) } returns true + + streamStatusCompletionTracker.startTracking(catalog, replicationContext) + val result = streamStatusCompletionTracker.finalize(0, mapper) + + assertEquals( + listOf( + getStreamStatusCompletedMessage("name1"), + getStreamStatusCompletedMessage("name2", "namespace2"), + ), + result, + ) + } + + @Test + fun `test that we get all the streams if the exit code is 0 and some stream status is send`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, featureFlagContext) } returns true + + streamStatusCompletionTracker.startTracking(catalog, replicationContext) + streamStatusCompletionTracker.track(getStreamStatusCompletedMessage("name1").trace.streamStatus) + val result = streamStatusCompletionTracker.finalize(0, mapper) + + assertEquals( + listOf( + getStreamStatusCompletedMessage("name1"), + getStreamStatusCompletedMessage("name2", "namespace2"), + ), + result, + ) + } + + @Test + fun `test that we get no streams if the exit code is 1 and no stream status is send`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, featureFlagContext) } returns true + + streamStatusCompletionTracker.startTracking(catalog, replicationContext) + val result = streamStatusCompletionTracker.finalize(1, mapper) + + assertEquals(listOf(), result) + } + + @Test + fun `test that we get the status of the streams that send a status if the exit code is 1 and no stream status is send`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, featureFlagContext) } returns true + + streamStatusCompletionTracker.startTracking(catalog, replicationContext) + streamStatusCompletionTracker.track(getStreamStatusCompletedMessage("name1").trace.streamStatus) + val result = streamStatusCompletionTracker.finalize(1, mapper) + + assertEquals( + listOf( + getStreamStatusCompletedMessage("name1"), + ), + result, + ) + } + + @Test + fun `test that no message is send if the flag is false`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, featureFlagContext) } returns false + + streamStatusCompletionTracker.startTracking(catalog, replicationContext) + streamStatusCompletionTracker.track(getStreamStatusCompletedMessage("name1").trace.streamStatus) + val result = streamStatusCompletionTracker.finalize(0, mapper) + + assertEquals(listOf(), result) + } + + private fun getStreamStatusCompletedMessage( + name: String, + namespace: String? = null, + ): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.TRACE) + .withTrace( + AirbyteTraceMessage() + .withType(AirbyteTraceMessage.Type.STREAM_STATUS) + .withEmittedAt(1.0) + .withStreamStatus( + AirbyteStreamStatusTraceMessage() + .withStatus(AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE) + .withStreamDescriptor( + StreamDescriptor() + .withName(name) + .withNamespace(namespace), + ), + ), + ) + } +} diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/AnalyticsMessageTrackerTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/AnalyticsMessageTrackerTest.kt index 8e0ba931add..c7cb98f99c1 100644 --- a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/AnalyticsMessageTrackerTest.kt +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/AnalyticsMessageTrackerTest.kt @@ -26,6 +26,8 @@ class AnalyticsMessageTrackerTest { private lateinit var trackingClient: TrackingClient private lateinit var analyticsMessageTracker: AnalyticsMessageTracker private lateinit var ctx: ReplicationContext + private val sourceDefinitionId = UUID.randomUUID() + private val destinationDefinitionId = UUID.randomUUID() @BeforeEach fun setUp() { @@ -35,7 +37,7 @@ class AnalyticsMessageTrackerTest { ctx = ReplicationContext( false, UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID(), - 1, 1, UUID.randomUUID(), SOURCE_IMAGE, DESTINATION_IMAGE, + 1, 1, UUID.randomUUID(), SOURCE_IMAGE, DESTINATION_IMAGE, sourceDefinitionId, destinationDefinitionId, ) analyticsMessageTracker.ctx = ctx } diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/payload/ActivityPayloadStorageClientTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/payload/ActivityPayloadStorageClientTest.kt deleted file mode 100644 index fdf9ae1815a..00000000000 --- a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/payload/ActivityPayloadStorageClientTest.kt +++ /dev/null @@ -1,94 +0,0 @@ -package io.airbyte.workers.payload - -import io.airbyte.commons.json.JsonSerde -import io.airbyte.config.StandardSyncOutput -import io.airbyte.metrics.lib.MetricClient -import io.airbyte.workers.models.RefreshSchemaActivityOutput -import io.airbyte.workers.storage.StorageClient -import io.mockk.every -import io.mockk.impl.annotations.MockK -import io.mockk.junit5.MockKExtension -import io.mockk.verify -import org.junit.jupiter.api.Assertions -import org.junit.jupiter.api.BeforeEach -import org.junit.jupiter.api.Test -import org.junit.jupiter.api.extension.ExtendWith - -@ExtendWith(MockKExtension::class) -class ActivityPayloadStorageClientTest { - @MockK - private lateinit var metricClient: MetricClient - - @MockK - private lateinit var storageClientRaw: StorageClient - - @MockK - private lateinit var serde: JsonSerde - - private lateinit var client: ActivityPayloadStorageClient - - @BeforeEach - fun setup() { - client = ActivityPayloadStorageClient(storageClientRaw, serde, metricClient) - - every { metricClient.count(any(), any()) } returns Unit - - every { storageClientRaw.write(any(), any()) } returns Unit - - every { storageClientRaw.read(any()) } returns "" - } - - @Test - fun `readJSON reads json and unmarshalls to specified class for a given uri`() { - val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") - val refreshOutput = RefreshSchemaActivityOutput() - - every { - storageClientRaw.read("sync-output") - } returns "serialized-sync-output" - - every { - serde.deserialize("serialized-sync-output", StandardSyncOutput::class.java) - } returns syncOutput - - val result1 = client.readJSON(ActivityPayloadURI("sync-output")) - - Assertions.assertEquals(syncOutput, result1) - - every { - storageClientRaw.read("refresh-output") - } returns "serialized-refresh-output" - - every { - serde.deserialize("serialized-refresh-output", RefreshSchemaActivityOutput::class.java) - } returns refreshOutput - - val result2 = client.readJSON(ActivityPayloadURI("refresh-output")) - - Assertions.assertEquals(refreshOutput, result2) - } - - @Test - fun `readJSON handles null`() { - every { - storageClientRaw.read("sync-output") - } returns null - - val result = client.readJSON(ActivityPayloadURI("sync-output")) - - Assertions.assertNull(result) - } - - @Test - fun `writeJSON serializes to json and writes to a given uri`() { - val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") - - every { - serde.serialize(syncOutput) - } returns "serialized-sync-output" - - client.writeJSON(ActivityPayloadURI("sync-output"), syncOutput) - - verify { storageClientRaw.write("sync-output", "serialized-sync-output") } - } -} diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadStorageClientTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadStorageClientTest.kt new file mode 100644 index 00000000000..f91871ed1aa --- /dev/null +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadStorageClientTest.kt @@ -0,0 +1,166 @@ +package io.airbyte.workers.storage.activities + +import io.airbyte.commons.json.JsonSerde +import io.airbyte.config.StandardSyncOutput +import io.airbyte.metrics.lib.MetricClient +import io.airbyte.metrics.lib.OssMetricsRegistry +import io.airbyte.workers.models.RefreshSchemaActivityOutput +import io.airbyte.workers.storage.StorageClient +import io.mockk.every +import io.mockk.impl.annotations.MockK +import io.mockk.junit5.MockKExtension +import io.mockk.verify +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtendWith + +@ExtendWith(MockKExtension::class) +class ActivityPayloadStorageClientTest { + @MockK + private lateinit var metricClient: MetricClient + + @MockK + private lateinit var storageClientRaw: StorageClient + + @MockK + private lateinit var serde: JsonSerde + + private lateinit var client: ActivityPayloadStorageClient + + private var comparator = NaiveEqualityComparator() + + @BeforeEach + fun setup() { + client = ActivityPayloadStorageClient(storageClientRaw, serde, metricClient) + + every { metricClient.count(any(), any(), *anyVararg()) } returns Unit + + every { storageClientRaw.write(any(), any()) } returns Unit + + every { storageClientRaw.read(any()) } returns "" + } + + @Test + fun `readJSON reads json and unmarshalls to specified class for a given uri`() { + val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + val refreshOutput = RefreshSchemaActivityOutput() + + every { + storageClientRaw.read("sync-output") + } returns "serialized-sync-output" + + every { + serde.deserialize("serialized-sync-output", StandardSyncOutput::class.java) + } returns syncOutput + + val result1 = client.readJSON(ActivityPayloadURI("sync-output")) + + Assertions.assertEquals(syncOutput, result1) + + every { + storageClientRaw.read("refresh-output") + } returns "serialized-refresh-output" + + every { + serde.deserialize("serialized-refresh-output", RefreshSchemaActivityOutput::class.java) + } returns refreshOutput + + val result2 = client.readJSON(ActivityPayloadURI("refresh-output")) + + Assertions.assertEquals(refreshOutput, result2) + } + + @Test + fun `readJSON handles null`() { + every { + storageClientRaw.read("sync-output") + } returns null + + val result = client.readJSON(ActivityPayloadURI("sync-output")) + + Assertions.assertNull(result) + } + + @Test + fun `writeJSON serializes to json and writes to a given uri`() { + val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + + every { + serde.serialize(syncOutput) + } returns "serialized-sync-output" + + client.writeJSON(ActivityPayloadURI("sync-output"), syncOutput) + + verify { storageClientRaw.write("sync-output", "serialized-sync-output") } + } + + @Test + fun `validateOutput records a result for a match`() { + val uri = ActivityPayloadURI("id", "version") + val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + + every { serde.deserialize(any(), StandardSyncOutput::class.java) } returns syncOutput + + client.validateOutput(uri, StandardSyncOutput::class.java, syncOutput, comparator, listOf()) + + verify { + metricClient.count(OssMetricsRegistry.PAYLOAD_VALIDATION_RESULT, 1, *anyVararg()) + } + } + + @Test + fun `validateOutput records a result for a mismatch`() { + val uri = ActivityPayloadURI("id", "version") + val syncOutput1 = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + val syncOutput2 = StandardSyncOutput().withAdditionalProperty("some", "unique-value-2") + + every { serde.deserialize(any(), StandardSyncOutput::class.java) } returns syncOutput2 + + client.validateOutput(uri, StandardSyncOutput::class.java, syncOutput1, comparator, listOf()) + + verify { + metricClient.count(OssMetricsRegistry.PAYLOAD_VALIDATION_RESULT, 1, *anyVararg()) + } + } + + @Test + fun `validateOutput records a result for a read miss`() { + val uri = ActivityPayloadURI("id", "version") + val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + + every { storageClientRaw.read(uri.id) } returns null + + client.validateOutput(uri, StandardSyncOutput::class.java, syncOutput, comparator, listOf()) + + verify { + metricClient.count(OssMetricsRegistry.PAYLOAD_VALIDATION_RESULT, 1, *anyVararg()) + } + } + + @Test + fun `validateOutput records read failure for null uri`() { + val uri = null + val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + + client.validateOutput(uri, StandardSyncOutput::class.java, syncOutput, comparator, listOf()) + + verify { + metricClient.count(OssMetricsRegistry.PAYLOAD_FAILURE_READ, 1, *anyVararg()) + } + } + + @Test + fun `validateOutput records read failure on client read exception`() { + val uri = ActivityPayloadURI("id", "version") + val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + + every { storageClientRaw.read(uri.id) } throws RuntimeException("yikes") + + client.validateOutput(uri, StandardSyncOutput::class.java, syncOutput, comparator, listOf()) + + verify { + metricClient.count(OssMetricsRegistry.PAYLOAD_FAILURE_READ, 1, *anyVararg()) + } + } +} diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/activities/OutputStorageClientTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/activities/OutputStorageClientTest.kt new file mode 100644 index 00000000000..d540bdfe7e2 --- /dev/null +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/activities/OutputStorageClientTest.kt @@ -0,0 +1,67 @@ +package io.airbyte.workers.storage.activities + +import io.airbyte.metrics.lib.MetricClient +import io.airbyte.workers.storage.activities.OutputStorageClientTest.Fixtures.ATTEMPT_NUMBER +import io.airbyte.workers.storage.activities.OutputStorageClientTest.Fixtures.CONNECTION_ID +import io.airbyte.workers.storage.activities.OutputStorageClientTest.Fixtures.JOB_ID +import io.airbyte.workers.storage.activities.OutputStorageClientTest.Fixtures.TEST_PAYLOAD_NAME +import io.mockk.every +import io.mockk.impl.annotations.MockK +import io.mockk.junit5.MockKExtension +import io.mockk.verify +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow +import org.junit.jupiter.api.extension.ExtendWith +import java.util.UUID + +@ExtendWith(MockKExtension::class) +class OutputStorageClientTest { + @MockK + private lateinit var metricClient: MetricClient + + @MockK + private lateinit var storageClient: ActivityPayloadStorageClient + + private lateinit var client: OutputStorageClient + + class TestClass(value1: String, value2: Long) + + @BeforeEach + fun setup() { + client = OutputStorageClient(storageClient, metricClient, TEST_PAYLOAD_NAME, TestClass::class.java) + + every { metricClient.count(any(), any(), *anyVararg()) } returns Unit + } + + @Test + fun `persist writes json to storage`() { + val obj = TestClass("test", 123) + client.persist(obj, CONNECTION_ID, JOB_ID, ATTEMPT_NUMBER, arrayOf()) + + verify(exactly = 1) { storageClient.writeJSON(any(), obj) } + } + + @Test + fun `persist short circuits if input null`() { + client.persist(null, CONNECTION_ID, JOB_ID, ATTEMPT_NUMBER, arrayOf()) + + verify(exactly = 0) { storageClient.writeJSON(any(), any()) } + } + + @Test + fun `persist swallows exceptions`() { + val obj = TestClass("test", 123) + + every { storageClient.writeJSON(any(), any()) } throws Exception("bang") + + assertDoesNotThrow { client.persist(obj, CONNECTION_ID, JOB_ID, ATTEMPT_NUMBER, arrayOf()) } + } + + object Fixtures { + const val TEST_PAYLOAD_NAME = "test-payload" + val CONNECTION_ID: UUID = UUID.randomUUID() + const val JOB_ID = 9987124L + const val ATTEMPT_NUMBER = 2 + } +} diff --git a/airbyte-commons/build.gradle.kts b/airbyte-commons/build.gradle.kts index 34d5c987983..c9a7c3ca3ea 100644 --- a/airbyte-commons/build.gradle.kts +++ b/airbyte-commons/build.gradle.kts @@ -1,56 +1,58 @@ import de.undercouch.gradle.tasks.download.Download plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - alias(libs.plugins.de.undercouch.download) - kotlin("jvm") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + alias(libs.plugins.de.undercouch.download) + kotlin("jvm") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.log4j.core) - - implementation(libs.bundles.jackson) - implementation(libs.guava) - implementation(libs.bundles.slf4j) - implementation(libs.commons.io) - implementation(libs.bundles.apache) - implementation(libs.google.cloud.storage) - implementation(libs.bundles.log4j) - implementation(libs.airbyte.protocol) - - // this dependency is an exception to the above rule because it is only used INTERNALLY to the commons library. - implementation(libs.json.path) - - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - - testRuntimeOnly(libs.junit.jupiter.engine) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.log4j.core) + + implementation(libs.bundles.jackson) + implementation(libs.guava) + implementation(libs.bundles.slf4j) + implementation(libs.commons.io) + implementation(libs.bundles.apache) + implementation(libs.google.cloud.storage) + implementation(libs.bundles.log4j) + implementation(libs.airbyte.protocol) + + // this dependency is an exception to the above rule because it is only used INTERNALLY to the commons library. + implementation(libs.json.path) + + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + + testRuntimeOnly(libs.junit.jupiter.engine) } airbyte { - spotless { - excludes = listOf("src/main/resources/seed/specs_secrets_mask.yaml") - } + spotless { + excludes = listOf("src/main/resources/seed/specs_secrets_mask.yaml") + } } val downloadSpecSecretMask = tasks.register("downloadSpecSecretMask") { - src("https://connectors.airbyte.com/files/registries/v0/specs_secrets_mask.yaml") - dest(File(projectDir, "src/main/resources/seed/specs_secrets_mask.yaml")) - overwrite(true) - onlyIfModified(true) + src("https://connectors.airbyte.com/files/registries/v0/specs_secrets_mask.yaml") + dest(File(projectDir, "src/main/resources/seed/specs_secrets_mask.yaml")) + overwrite(true) + onlyIfModified(true) } tasks.named("processResources") { - dependsOn(downloadSpecSecretMask) + dependsOn(downloadSpecSecretMask) } tasks.named("test") { - environment(mapOf( - "Z_TESTING_PURPOSES_ONLY_1" to "value-defined", - "Z_TESTING_PURPOSES_ONLY_2" to " ", - )) + environment( + mapOf( + "Z_TESTING_PURPOSES_ONLY_1" to "value-defined", + "Z_TESTING_PURPOSES_ONLY_2" to " ", + ) + ) } diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonSerde.kt b/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonSerde.kt index 2904b36181c..f3d1c6583ec 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonSerde.kt +++ b/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonSerde.kt @@ -1,13 +1,10 @@ package io.airbyte.commons.json -import org.elasticsearch.common.inject.Singleton - /** * Serde: _Ser_ialization + _de_serialization * * Singleton wrapper around Jsons for use with DI and allow testability via mocking. Add methods here as prudent. */ -@Singleton class JsonSerde { fun serialize(obj: T): String { return Jsons.serialize(obj) diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/logging/MaskedDataInterceptor.java b/airbyte-commons/src/main/java/io/airbyte/commons/logging/MaskedDataInterceptor.java index 5039154dc47..5e748db126e 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/logging/MaskedDataInterceptor.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/logging/MaskedDataInterceptor.java @@ -44,13 +44,51 @@ public class MaskedDataInterceptor implements RewritePolicy { protected static final Logger logger = StatusLogger.getLogger(); + /** + * Regular expression pattern flag that enables case in-sensitive matching. + */ + private static final String CASE_INSENSITIVE_FLAG = "(?i)"; + // This is a little circuitous, but it gets the regex syntax highlighting in intelliJ to work. private static final String DESTINATION_ERROR_PREFIX = Pattern.compile("^(?.*destination.*\\s+>\\s+ERROR.+)").pattern(); + /** + * Regular expression replacement pattern for applying the mask to PII log messages. + */ + private static final String KNOWN_PII_LOG_MESSAGE_REPLACEMENT_PATTERN = + "${destinationPrefix}${messagePrefix}" + AirbyteSecretConstants.SECRETS_MASK; + + /** + * Delimiter used as part of the regular expression pattern for applying the mask to property + * values. + */ + private static final String PROPERTY_MATCHING_PATTERN_DELIMITER = "|"; + + /** + * Regular expression pattern prefix for applying the mask to property values. + */ + private static final String PROPERTY_MATCHING_PATTERN_PREFIX = "\"("; + + /** + * Regular expression pattern suffix for applying the mask to property values. + */ + private static final String PROPERTY_MATCHING_PATTERN_SUFFIX = ")\"\\s*:\\s*(\"(?:[^\"\\\\]|\\\\.)*\"|\\[[^]\\[]*]|\\d+)"; + + /** + * Name of the key in the mask YAML file that contains the list of maskable properties. + */ + private static final String PROPERTIES_KEY = "properties"; + + /** + * Regular expression pattern used to replace a key/value property with a masked value while + * maintaining the property key/name. + */ + private static final String REPLACEMENT_PATTERN = "\"$1\":\"" + AirbyteSecretConstants.SECRETS_MASK + "\""; + /** * The pattern used to determine if a message contains sensitive data. */ - private final Optional pattern; + private final Optional pattern; private static final List KNOWN_PII_PATTERNS = List.of( Pattern.compile(DESTINATION_ERROR_PREFIX + "(?Received\\s+invalid\\s+message:)(.+)$"), @@ -92,7 +130,7 @@ public LogEvent rewrite(final LogEvent source) { */ private String applyMask(final String message) { final String piiScrubbedMessage = removeKnownPii(message); - return pattern.map(s -> piiScrubbedMessage.replaceAll(s, "\"$1\":\"" + AirbyteSecretConstants.SECRETS_MASK + "\"")) + return pattern.map(p -> p.matcher(piiScrubbedMessage).replaceAll(REPLACEMENT_PATTERN)) .orElse(piiScrubbedMessage); } @@ -105,7 +143,7 @@ private String applyMask(final String message) { private static String removeKnownPii(final String message) { return KNOWN_PII_PATTERNS.stream() .reduce(message, (msg, pattern) -> pattern.matcher(msg).replaceAll( - "${destinationPrefix}${messagePrefix}" + AirbyteSecretConstants.SECRETS_MASK), (a, b) -> a); + KNOWN_PII_LOG_MESSAGE_REPLACEMENT_PATTERN), (a, b) -> a); } /** @@ -119,7 +157,7 @@ private Set getMaskableProperties(final String specMaskFile) { try { final String maskFileContents = IOUtils.toString(getClass().getResourceAsStream(specMaskFile), Charset.defaultCharset()); final Map> properties = Jsons.object(Yamls.deserialize(maskFileContents), new TypeReference<>() {}); - return properties.getOrDefault("properties", Set.of()); + return properties.getOrDefault(PROPERTIES_KEY, Set.of()); } catch (final Exception e) { logger.error("Unable to load mask data from '{}': {}.", specMaskFile, e.getMessage()); return Set.of(); @@ -132,9 +170,9 @@ private Set getMaskableProperties(final String specMaskFile) { * @param specMaskFile The spec mask file. * @return The regular expression pattern used to find maskable properties. */ - private Optional buildPattern(final String specMaskFile) { + private Optional buildPattern(final String specMaskFile) { final Set maskableProperties = getMaskableProperties(specMaskFile); - return !maskableProperties.isEmpty() ? Optional.of(generatePattern(maskableProperties)) : Optional.empty(); + return !maskableProperties.isEmpty() ? Optional.of(Pattern.compile(generatePattern(maskableProperties))) : Optional.empty(); } /** @@ -145,10 +183,10 @@ private Optional buildPattern(final String specMaskFile) { */ private String generatePattern(final Set properties) { final StringBuilder builder = new StringBuilder(); - builder.append("(?i)"); // case insensitive - builder.append("\"("); - builder.append(properties.stream().collect(Collectors.joining("|"))); - builder.append(")\"\\s*:\\s*(\"(?:[^\"\\\\]|\\\\.)*\"|\\[[^]\\[]*]|\\d+)"); + builder.append(CASE_INSENSITIVE_FLAG); + builder.append(PROPERTY_MATCHING_PATTERN_PREFIX); + builder.append(properties.stream().collect(Collectors.joining(PROPERTY_MATCHING_PATTERN_DELIMITER))); + builder.append(PROPERTY_MATCHING_PATTERN_SUFFIX); return builder.toString(); } diff --git a/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt b/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt index b24732d7a82..017ec4370bd 100644 --- a/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt +++ b/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt @@ -71,6 +71,8 @@ enum class EnvVar { OTEL_COLLECTOR_ENDPOINT, + PATH_TO_CONNECTORS, + PUBLISH_METRICS, REMOTE_DATAPLANE_SERVICEACCOUNTS, diff --git a/airbyte-config/config-models/build.gradle.kts b/airbyte-config/config-models/build.gradle.kts index 87d850b99d9..008e84125da 100644 --- a/airbyte-config/config-models/build.gradle.kts +++ b/airbyte-config/config-models/build.gradle.kts @@ -2,93 +2,93 @@ import org.gradle.api.tasks.testing.logging.TestLogEvent import org.jsonschema2pojo.SourceType plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("com.github.eirnym.js2p") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + id("com.github.eirnym.js2p") + kotlin("jvm") + kotlin("kapt") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.bundles.micronaut.annotation.processor) - - kapt(libs.bundles.micronaut.annotation.processor) - - api(libs.bundles.micronaut.annotation) - - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-commons")) - - implementation(platform(libs.fasterxml)) - implementation(libs.bundles.jackson) - implementation(libs.spotbugs.annotations) - implementation(libs.guava) - implementation(libs.micronaut.kotlin.extension.functions) - implementation(libs.google.cloud.storage) - implementation(libs.aws.java.sdk.s3) - implementation(libs.aws.java.sdk.sts) - implementation(libs.s3) - implementation(libs.sts) - implementation(libs.bundles.apache) - implementation(libs.airbyte.protocol) - implementation(libs.commons.io) - - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - testImplementation(libs.bundles.micronaut.test) - - testRuntimeOnly(libs.junit.jupiter.engine) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + kapt(libs.bundles.micronaut.annotation.processor) + + api(libs.bundles.micronaut.annotation) + + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-commons")) + + implementation(platform(libs.fasterxml)) + implementation(libs.bundles.jackson) + implementation(libs.spotbugs.annotations) + implementation(libs.guava) + implementation(libs.micronaut.kotlin.extension.functions) + implementation(libs.google.cloud.storage) + implementation(libs.aws.java.sdk.s3) + implementation(libs.aws.java.sdk.sts) + implementation(libs.s3) + implementation(libs.sts) + implementation(libs.bundles.apache) + implementation(libs.airbyte.protocol) + implementation(libs.commons.io) + + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + testImplementation(libs.bundles.micronaut.test) + + testRuntimeOnly(libs.junit.jupiter.engine) } jsonSchema2Pojo { - setSourceType(SourceType.YAMLSCHEMA.name) - setSource(files("${sourceSets["main"].output.resourcesDir}/types")) - targetDirectory = file("$buildDir/generated/src/gen/java/") + setSourceType(SourceType.YAMLSCHEMA.name) + setSource(files("${sourceSets["main"].output.resourcesDir}/types")) + targetDirectory = file("$buildDir/generated/src/gen/java/") - targetPackage = "io.airbyte.config" - useLongIntegers = true + targetPackage = "io.airbyte.config" + useLongIntegers = true - removeOldOutput = true + removeOldOutput = true - generateBuilders = true - includeConstructors = false - includeSetters = true - serializable = true + generateBuilders = true + includeConstructors = false + includeSetters = true + serializable = true } tasks.named("test") { - useJUnitPlatform { - excludeTags("log4j2-config", "logger-client") - } + useJUnitPlatform { + excludeTags("log4j2-config", "logger-client") + } } tasks.named("compileKotlin") { - dependsOn(tasks.named("generateJsonSchema2Pojo")) + dependsOn(tasks.named("generateJsonSchema2Pojo")) } tasks.register("log4j2IntegrationTest") { - useJUnitPlatform { - includeTags("log4j2-config") - } - testLogging { - events = setOf(TestLogEvent.PASSED, TestLogEvent.SKIPPED, TestLogEvent.FAILED) - } + useJUnitPlatform { + includeTags("log4j2-config") + } + testLogging { + events = setOf(TestLogEvent.PASSED, TestLogEvent.SKIPPED, TestLogEvent.FAILED) + } } tasks.register("logClientsIntegrationTest") { - useJUnitPlatform { - includeTags("logger-client") - } - testLogging { - events = setOf(TestLogEvent.PASSED, TestLogEvent.SKIPPED, TestLogEvent.FAILED) - } + useJUnitPlatform { + includeTags("logger-client") + } + testLogging { + events = setOf(TestLogEvent.PASSED, TestLogEvent.SKIPPED, TestLogEvent.FAILED) + } } afterEvaluate { - tasks.named("kaptGenerateStubsKotlin") { - dependsOn(tasks.named("generateJsonSchema2Pojo")) - } + tasks.named("kaptGenerateStubsKotlin") { + dependsOn(tasks.named("generateJsonSchema2Pojo")) + } } diff --git a/airbyte-config/config-models/src/main/resources/types/ActivityPayloadURI.yaml b/airbyte-config/config-models/src/main/resources/types/ActivityPayloadURI.yaml new file mode 100644 index 00000000000..5ef4f09ae60 --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/ActivityPayloadURI.yaml @@ -0,0 +1,15 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/ActivityPayloadURI.yaml +title: ActivityPayloadURI +description: URI struct for activity payloads +type: object +additionalProperties: true +required: + - id + - version +properties: + id: + type: string + version: + type: string diff --git a/airbyte-config/config-models/src/main/resources/types/JobConfig.yaml b/airbyte-config/config-models/src/main/resources/types/JobConfig.yaml index 8698c0792dd..79534182e84 100644 --- a/airbyte-config/config-models/src/main/resources/types/JobConfig.yaml +++ b/airbyte-config/config-models/src/main/resources/types/JobConfig.yaml @@ -17,6 +17,7 @@ properties: - getSpec - sync - resetConnection + - refresh checkConnection: "$ref": JobCheckConnectionConfig.yaml discoverCatalog: @@ -27,3 +28,5 @@ properties: "$ref": JobSyncConfig.yaml resetConnection: "$ref": JobResetConnectionConfig.yaml + refresh: + "$ref": RefreshConfig.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/JobType.yaml b/airbyte-config/config-models/src/main/resources/types/JobType.yaml index 86df39230c2..34697834998 100644 --- a/airbyte-config/config-models/src/main/resources/types/JobType.yaml +++ b/airbyte-config/config-models/src/main/resources/types/JobType.yaml @@ -12,3 +12,4 @@ enum: - reset_connection - connection_updater - replicate + - refresh diff --git a/airbyte-config/config-models/src/main/resources/types/RefreshConfig.yaml b/airbyte-config/config-models/src/main/resources/types/RefreshConfig.yaml new file mode 100644 index 00000000000..e6cef43fa88 --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/RefreshConfig.yaml @@ -0,0 +1,75 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/JobSyncConfig.yaml +title: RefreshConfig +description: configuration of the refresh operation +type: object +additionalProperties: true +required: + - configuredAirbyteCatalog + - sourceDockerImage + - destinationDockerImage + - streamsToRefresh +properties: + streamsToRefresh: + type: array + items: + type: object + existingJavaType: io.airbyte.protocol.models.StreamDescriptor + namespaceDefinition: + "$ref": NamespaceDefinitionType.yaml + namespaceFormat: + type: string + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + description: Prefix that will be prepended to the name of each stream when it is written to the destination. + type: string + configuredAirbyteCatalog: + description: the configured airbyte catalog + type: object + existingJavaType: io.airbyte.protocol.models.ConfiguredAirbyteCatalog + sourceDockerImage: + description: Image name of the source with tag. + type: string + sourceProtocolVersion: + description: Airbyte Protocol Version of the source + type: object + existingJavaType: io.airbyte.commons.version.Version + destinationDockerImage: + description: Image name of the destination with tag. + type: string + destinationProtocolVersion: + description: Airbyte Protocol Version of the destination + type: object + existingJavaType: io.airbyte.commons.version.Version + operationSequence: + description: Sequence of configurations of operations to apply as part of the sync + type: array + items: + "$ref": StandardSyncOperation.yaml + webhookOperationConfigs: + description: The webhook operation configs belonging to this workspace. Must conform to WebhookOperationConfigs.yaml. + type: object + existingJavaType: com.fasterxml.jackson.databind.JsonNode + syncResourceRequirements: + description: Resource requirements to use for the sync + $ref: SyncResourceRequirements.yaml + isSourceCustomConnector: + description: determine if the source running image is a custom connector. + type: boolean + isDestinationCustomConnector: + description: determine if the destination running image is a custom connector. + type: boolean + workspaceId: + description: The id of the workspace associated with the sync + type: string + format: uuid + sourceDefinitionVersionId: + description: The id of the source definition version used for the sync + type: string + format: uuid + destinationDefinitionVersionId: + description: The id of the destination definition version used for the sync + type: string + format: uuid diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSyncInput.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSyncInput.yaml index dcae92f5811..33527acf726 100644 --- a/airbyte-config/config-models/src/main/resources/types/StandardSyncInput.yaml +++ b/airbyte-config/config-models/src/main/resources/types/StandardSyncInput.yaml @@ -10,7 +10,6 @@ required: - sourceConfiguration - destinationId - destinationConfiguration - - catalog properties: namespaceDefinition: "$ref": NamespaceDefinitionType.yaml @@ -46,14 +45,6 @@ properties: description: The webhook operation configs belonging to this workspace. See webhookOperationConfigs in StandardWorkspace.yaml. type: object existingJavaType: com.fasterxml.jackson.databind.JsonNode - catalog: - description: the configured airbyte catalog - type: object - # necessary because the configuration declaration is in a separate package. - existingJavaType: io.airbyte.protocol.models.ConfiguredAirbyteCatalog - state: - description: optional state of the previous run. this object is defined per integration. - "$ref": State.yaml syncResourceRequirements: description: Resource requirements to use for the sync $ref: SyncResourceRequirements.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml index 6dc0e2ae6c7..16516c32756 100644 --- a/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml +++ b/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml @@ -7,8 +7,6 @@ type: object additionalProperties: true required: - standardSyncSummary - - state - - output_catalog properties: standardSyncSummary: "$ref": StandardSyncSummary.yaml @@ -16,11 +14,9 @@ properties: "$ref": NormalizationSummary.yaml webhookOperationSummary: "$ref": WebhookOperationSummary.yaml - state: - "$ref": State.yaml - output_catalog: - existingJavaType: io.airbyte.protocol.models.ConfiguredAirbyteCatalog failures: type: array items: "$ref": FailureReason.yaml + catalogUri: + "$ref": ActivityPayloadURI.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSyncSummary.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSyncSummary.yaml index d71017712fc..f13f19c22c6 100644 --- a/airbyte-config/config-models/src/main/resources/types/StandardSyncSummary.yaml +++ b/airbyte-config/config-models/src/main/resources/types/StandardSyncSummary.yaml @@ -36,3 +36,5 @@ properties: "$ref": StreamSyncStats.yaml performanceMetrics: "$ref": PerformanceMetrics.yaml + streamCount: + type: integer diff --git a/airbyte-config/config-persistence/build.gradle.kts b/airbyte-config/config-persistence/build.gradle.kts index 1364d22cced..4981c623f20 100644 --- a/airbyte-config/config-persistence/build.gradle.kts +++ b/airbyte-config/config-persistence/build.gradle.kts @@ -1,70 +1,70 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - `java-test-fixtures` - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + `java-test-fixtures` + kotlin("jvm") + kotlin("kapt") } configurations.all { - exclude(group = "io.micronaut.flyway") + exclude(group = "io.micronaut.flyway") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) - api(libs.bundles.micronaut.annotation) + api(libs.bundles.micronaut.annotation) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:specs")) - implementation(project(":airbyte-data")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-db:jooq")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-json-validation")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(libs.bundles.apache) - implementation(libs.google.cloud.storage) - implementation(libs.commons.io) - implementation(libs.jackson.databind) - implementation(libs.bundles.micronaut.data.jdbc) - implementation(libs.bundles.micronaut.kotlin) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:specs")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-db:jooq")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-json-validation")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(libs.bundles.apache) + implementation(libs.google.cloud.storage) + implementation(libs.commons.io) + implementation(libs.jackson.databind) + implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.bundles.micronaut.kotlin) - testImplementation(libs.hamcrest.all) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.bundles.flyway) - testImplementation(libs.mockito.inline) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockk) + testImplementation(libs.hamcrest.all) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.bundles.flyway) + testImplementation(libs.mockito.inline) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockk) - kaptTest(platform(libs.micronaut.platform)) - kaptTest(libs.bundles.micronaut.test.annotation.processor) + kaptTest(platform(libs.micronaut.platform)) + kaptTest(libs.bundles.micronaut.test.annotation.processor) - testRuntimeOnly(libs.junit.jupiter.engine) + testRuntimeOnly(libs.junit.jupiter.engine) - integrationTestImplementation(project(":airbyte-config:config-persistence")) + integrationTestImplementation(project(":airbyte-config:config-persistence")) - testFixturesApi(libs.jackson.databind) - testFixturesApi(libs.guava) - testFixturesApi(project(":airbyte-json-validation")) - testFixturesApi(project(":airbyte-commons")) - testFixturesApi(project(":airbyte-config:config-models")) - testFixturesApi(project(":airbyte-config:config-secrets")) - testFixturesApi(libs.airbyte.protocol) - testFixturesApi(libs.lombok) - testFixturesAnnotationProcessor(libs.lombok) + testFixturesApi(libs.jackson.databind) + testFixturesApi(libs.guava) + testFixturesApi(project(":airbyte-json-validation")) + testFixturesApi(project(":airbyte-commons")) + testFixturesApi(project(":airbyte-config:config-models")) + testFixturesApi(project(":airbyte-config:config-secrets")) + testFixturesApi(libs.airbyte.protocol) + testFixturesApi(libs.lombok) + testFixturesAnnotationProcessor(libs.lombok) } // The DuplicatesStrategy will be required while this module is mixture of kotlin and java _with_ lombok dependencies.) diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/PermissionPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/PermissionPersistence.java index a8f0a45105d..61e86618e11 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/PermissionPersistence.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/PermissionPersistence.java @@ -8,7 +8,6 @@ import static io.airbyte.db.instance.configs.jooq.generated.Tables.PERMISSION; import static io.airbyte.db.instance.configs.jooq.generated.Tables.USER; import static org.jooq.impl.DSL.asterisk; -import static org.jooq.impl.DSL.field; import static org.jooq.impl.DSL.select; import io.airbyte.commons.enums.Enums; @@ -19,8 +18,6 @@ import io.airbyte.db.Database; import io.airbyte.db.ExceptionWrappingDatabase; import java.io.IOException; -import java.sql.SQLException; -import java.time.OffsetDateTime; import java.util.List; import java.util.Optional; import java.util.UUID; @@ -28,7 +25,6 @@ import org.jooq.DSLContext; import org.jooq.Record; import org.jooq.Result; -import org.jooq.impl.DSL; /** * Permission Persistence. @@ -42,93 +38,10 @@ public class PermissionPersistence { private final ExceptionWrappingDatabase database; - public static final String PRIMARY_KEY = "id"; - public PermissionPersistence(final Database database) { this.database = new ExceptionWrappingDatabase(database); } - /** - * Create or update Permission. - * - * @param permission permission to write into database. - * @throws IOException in case of a db error. - */ - public void writePermission(final Permission permission) throws IOException { - final io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType permissionType = - PermissionPersistenceHelper.convertConfigPermissionTypeToJooqPermissionType(permission.getPermissionType()); - - database.transaction(ctx -> { - final OffsetDateTime timestamp = OffsetDateTime.now(); - final boolean isExistingConfig = ctx.fetchExists(select() - .from(PERMISSION) - .where(PERMISSION.ID.eq(permission.getPermissionId()))); - - if (isExistingConfig) { - updatePermission(ctx, permission, timestamp); - } else { - ctx.insertInto(PERMISSION) - .set(PERMISSION.ID, permission.getPermissionId()) - .set(PERMISSION.PERMISSION_TYPE, permissionType) - .set(PERMISSION.USER_ID, permission.getUserId()) - .set(PERMISSION.WORKSPACE_ID, permission.getWorkspaceId()) - .set(PERMISSION.ORGANIZATION_ID, permission.getOrganizationId()) - .set(PERMISSION.CREATED_AT, timestamp) - .set(PERMISSION.UPDATED_AT, timestamp) - .execute(); - } - return null; - }); - } - - private void updatePermission(final DSLContext transactionCtx, final Permission updatedPermission, final OffsetDateTime timestamp) - throws SQLException { - - final Permission priorPermission; - try { - priorPermission = getPermission(updatedPermission.getPermissionId()).orElseThrow(); - } catch (final IOException e) { - throw new SQLException(e); - } - - final io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType priorPermissionType = - PermissionPersistenceHelper.convertConfigPermissionTypeToJooqPermissionType(priorPermission.getPermissionType()); - - final io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType newPermissionType = - PermissionPersistenceHelper.convertConfigPermissionTypeToJooqPermissionType(updatedPermission.getPermissionType()); - - transactionCtx.update(PERMISSION) - .set(PERMISSION.PERMISSION_TYPE, newPermissionType) - .set(PERMISSION.WORKSPACE_ID, updatedPermission.getWorkspaceId()) - .set(PERMISSION.ORGANIZATION_ID, updatedPermission.getOrganizationId()) - .set(PERMISSION.USER_ID, updatedPermission.getUserId()) - .set(PERMISSION.UPDATED_AT, timestamp) - .where(PERMISSION.ID.eq(updatedPermission.getPermissionId())) - .execute(); - - // if the updated permission started off as an organization admin, check to see if the org still - // has an admin after the update is applied. if not, roll back the transaction. - final boolean wasOrganizationAdminUpdate = - priorPermissionType.equals(io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType.organization_admin); - - // use priorPermission instead of updatedPermission in case the organization ID changed in the - // update. - if (wasOrganizationAdminUpdate && countOrganizationAdmins(transactionCtx, priorPermission.getOrganizationId()) < 1) { - // trigger a transaction rollback - throw new SQLOperationNotAllowedException( - "Preventing update that would have removed the last OrganizationAdmin from organization " + updatedPermission.getOrganizationId()); - } - } - - private int countOrganizationAdmins(final DSLContext ctx, final UUID organizationId) { - // fetch the count of permission records with type OrganizationAdmin and in the indicated - // organizationId - return ctx.fetchCount(select() - .from(PERMISSION) - .where(PERMISSION.PERMISSION_TYPE.eq(io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType.organization_admin)) - .and(PERMISSION.ORGANIZATION_ID.eq(organizationId))); - } - /** * Get a permission by permission Id. * @@ -193,38 +106,6 @@ private Permission createPermissionFromRecord(final Record record) { .withOrganizationId(record.get(PERMISSION.ORGANIZATION_ID)); } - /** - * Delete Permissions by id. - * - * - */ - public boolean deletePermissionById(final UUID permissionId) throws IOException { - return database.transaction(ctx -> { - final Permission deletedPermission; - try { - deletedPermission = getPermission(permissionId).orElseThrow(); - } catch (final IOException e) { - throw new SQLException(e); - } - final int modifiedCount = ctx.deleteFrom(PERMISSION).where(field(DSL.name(PRIMARY_KEY)).eq(permissionId)).execute(); - - // return early if nothing was deleted - if (modifiedCount == 0) { - return false; - } - - // check if this deletion removed the last OrganizationAdmin from the organization - final boolean wasOrganizationAdminDeletion = deletedPermission.getPermissionType().equals(PermissionType.ORGANIZATION_ADMIN); - if (wasOrganizationAdminDeletion && countOrganizationAdmins(ctx, deletedPermission.getOrganizationId()) < 1) { - // trigger a rollback by throwing an exception - throw new SQLOperationNotAllowedException( - "Rolling back delete that would have removed the last OrganizationAdmin from organization " + deletedPermission.getOrganizationId()); - } - - return modifiedCount > 0; - }); - } - /** * List all users with permissions to the workspace. Note it does not take organization info into * account. @@ -238,10 +119,6 @@ public List listUsersInWorkspace(final UUID workspaceId) throws return this.database.query(ctx -> listPermissionsForWorkspace(ctx, workspaceId)); } - public List listUserPermissionsGrantingWorkspaceAccess(final UUID workspaceId) throws IOException { - return this.database.query(ctx -> listPermissionsForWorkspace(ctx, workspaceId)); - } - public List listInstanceAdminUsers() throws IOException { return this.database.query(ctx -> listInstanceAdminPermissions(ctx)); } diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/RefreshJobStateUpdater.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/RefreshJobStateUpdater.java new file mode 100644 index 00000000000..75c3d3918ed --- /dev/null +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/RefreshJobStateUpdater.java @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence; + +import io.airbyte.config.StateType; +import io.airbyte.config.StateWrapper; +import io.airbyte.config.persistence.domain.StreamRefresh; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.StreamDescriptor; +import jakarta.inject.Singleton; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; + +@Singleton +public class RefreshJobStateUpdater { + + private final StatePersistence statePersistence; + + public RefreshJobStateUpdater(final StatePersistence statePersistence) { + this.statePersistence = statePersistence; + } + + public void updateStateWrapperForRefresh(final UUID connectionId, final StateWrapper currentState, final List streamsToRefresh) + throws IOException { + final StateWrapper updatedState = new StateWrapper(); + final Set streamDescriptorsToRefresh = streamsToRefresh + .stream() + .map(c -> new StreamDescriptor().withName(c.getStreamName()).withNamespace(c.getStreamNamespace())) + .collect(Collectors.toSet()); + + switch (currentState.getStateType()) { + case GLOBAL -> { + final List streamStatesToRetain = new ArrayList<>(); + final AirbyteStateMessage currentGlobalStateMessage = currentState.getGlobal(); + final List currentStreamStates = currentGlobalStateMessage.getGlobal().getStreamStates(); + for (final AirbyteStreamState streamState : currentStreamStates) { + final StreamDescriptor streamDescriptor = streamState.getStreamDescriptor(); + if (!streamDescriptorsToRefresh.contains(streamDescriptor)) { + streamStatesToRetain.add(streamState); + } + } + updatedState.setStateType(StateType.GLOBAL); + updatedState.setGlobal(new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(streamStatesToRetain.isEmpty() ? null : currentGlobalStateMessage.getGlobal().getSharedState()) + .withStreamStates(streamStatesToRetain))); + + } + case STREAM -> { + final List streamStatesToRetain = new ArrayList<>(); + for (final AirbyteStateMessage stateMessage : currentState.getStateMessages()) { + final StreamDescriptor streamDescriptor = stateMessage.getStream().getStreamDescriptor(); + if (!streamDescriptorsToRefresh.contains(streamDescriptor)) { + streamStatesToRetain.add(stateMessage); + } + } + updatedState.setStateType(StateType.STREAM); + updatedState.setStateMessages(streamStatesToRetain); + } + default -> updatedState.setStateType(StateType.LEGACY); + } + statePersistence.updateOrCreateState(connectionId, updatedState); + } + +} diff --git a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamGenerationRepository.kt b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamGenerationRepository.kt new file mode 100644 index 00000000000..b40b1b6cc75 --- /dev/null +++ b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamGenerationRepository.kt @@ -0,0 +1,26 @@ +package io.airbyte.config.persistence + +import io.airbyte.config.persistence.domain.Generation +import io.airbyte.config.persistence.domain.StreamGeneration +import io.micronaut.data.annotation.Query +import io.micronaut.data.jdbc.annotation.JdbcRepository +import io.micronaut.data.model.query.builder.sql.Dialect +import io.micronaut.data.repository.PageableRepository +import java.util.UUID + +@JdbcRepository(dialect = Dialect.POSTGRES, dataSource = "config") +interface StreamGenerationRepository : PageableRepository { + fun findByConnectionId(connectionId: UUID): List + + fun deleteByConnectionId(connectionId: UUID) + + @Query( + value = """ + SELECT stream_name, stream_namespace, MAX(generation_id) as generation_id + FROM stream_generation + WHERE connection_id = :connectionId + GROUP BY (stream_name, stream_namespace) + """, + ) + fun getMaxGenerationOfStreamsForConnectionId(connectionId: UUID): List +} diff --git a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamRefreshesRepository.kt b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamRefreshesRepository.kt index 4ffbe8fa8b0..de4051b9236 100644 --- a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamRefreshesRepository.kt +++ b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamRefreshesRepository.kt @@ -1,15 +1,31 @@ package io.airbyte.config.persistence import io.airbyte.config.persistence.domain.StreamRefresh -import io.airbyte.config.persistence.domain.StreamRefreshPK +import io.micronaut.data.annotation.Query import io.micronaut.data.jdbc.annotation.JdbcRepository import io.micronaut.data.model.query.builder.sql.Dialect import io.micronaut.data.repository.PageableRepository import java.util.UUID -@JdbcRepository(dialect = Dialect.POSTGRES) -interface StreamRefreshesRepository : PageableRepository { - fun findByPkConnectionId(connectionId: UUID): List +@JdbcRepository(dialect = Dialect.POSTGRES, dataSource = "config") +interface StreamRefreshesRepository : PageableRepository { + fun findByConnectionId(connectionId: UUID): List - fun deleteByPkConnectionId(connectionId: UUID) + fun deleteByConnectionId(connectionId: UUID) + + @Query( + value = """ + DELETE FROM stream_refreshes + WHERE connection_id = :connectionId + AND stream_name = :streamName + AND ((:streamNamespace) IS NULL OR stream_namespace = :streamNamespace) + """, + ) + fun deleteByConnectionIdAndStreamNameAndStreamNamespace( + connectionId: UUID, + streamName: String, + streamNamespace: String?, + ) + + fun existsByConnectionId(connectionId: UUID): Boolean } diff --git a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamGeneration.kt b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamGeneration.kt new file mode 100644 index 00000000000..489c2c0b0c2 --- /dev/null +++ b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamGeneration.kt @@ -0,0 +1,51 @@ +package io.airbyte.config.persistence.domain + +import io.micronaut.core.annotation.NonNull +import io.micronaut.core.annotation.Nullable +import io.micronaut.data.annotation.DateCreated +import io.micronaut.data.annotation.DateUpdated +import io.micronaut.data.annotation.Id +import io.micronaut.data.annotation.MappedEntity +import jakarta.persistence.Column +import java.time.OffsetDateTime +import java.util.UUID + +@MappedEntity("stream_generation") +data class StreamGeneration( + @field:Id + @NonNull + var id: UUID? = UUID.randomUUID(), + @Column(name = "connection_id") + @NonNull + var connectionId: UUID, + @Column(name = "stream_name") + @NonNull + var streamName: String, + @Column(name = "stream_namespace") + @Nullable + var streamNamespace: String? = null, + @Column(name = "generation_id") + @NonNull + var generationId: Long, + @Column(name = "start_job_id") + @NonNull + var startJobId: Long, + @Column(name = "created_at") + @DateCreated + var createdAt: OffsetDateTime? = null, + @Column(name = "updated_at") + @DateUpdated + var updatedAt: OffsetDateTime? = null, +) + +@MappedEntity +data class Generation( + @Column(name = "stream_name") + @NonNull + val streamName: String, + @Column(name = "stream_namespace") + @Nullable + val streamNamespace: String? = null, + @Column(name = "generation_id") + val generationId: Long, +) diff --git a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamRefresh.kt b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamRefresh.kt index 37bd67a685b..56a9684af0a 100644 --- a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamRefresh.kt +++ b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamRefresh.kt @@ -1,29 +1,29 @@ package io.airbyte.config.persistence.domain +import io.micronaut.core.annotation.NonNull import io.micronaut.core.annotation.Nullable import io.micronaut.data.annotation.DateCreated -import io.micronaut.data.annotation.Embeddable -import io.micronaut.data.annotation.EmbeddedId +import io.micronaut.data.annotation.Id import io.micronaut.data.annotation.MappedEntity import jakarta.persistence.Column import java.time.OffsetDateTime import java.util.UUID -@Embeddable -data class StreamRefreshPK( +@MappedEntity("stream_refreshes") +data class StreamRefresh( + @field:Id + @NonNull + var id: UUID? = UUID.randomUUID(), @Column(name = "connection_id") - val connectionId: UUID, + @NonNull + var connectionId: UUID, @Column(name = "stream_name") - val streamName: String, + @NonNull + var streamName: String, @Column(name = "stream_namespace") @Nullable - val streamNamespace: String? = null, -) - -@MappedEntity("stream_refreshes") -data class StreamRefresh( - @EmbeddedId - val pk: StreamRefreshPK, + var streamNamespace: String? = null, + @Column(name = "created_at") @DateCreated var createdAt: OffsetDateTime? = null, ) diff --git a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/helper/GenerationBumper.kt b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/helper/GenerationBumper.kt new file mode 100644 index 00000000000..491217b06f2 --- /dev/null +++ b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/helper/GenerationBumper.kt @@ -0,0 +1,70 @@ +package io.airbyte.config.persistence.helper + +import io.airbyte.config.persistence.StreamGenerationRepository +import io.airbyte.config.persistence.domain.Generation +import io.airbyte.config.persistence.domain.StreamGeneration +import io.airbyte.config.persistence.domain.StreamRefresh +import io.airbyte.protocol.models.StreamDescriptor +import jakarta.inject.Singleton +import java.util.UUID + +@Singleton +class GenerationBumper(val streamGenerationRepository: StreamGenerationRepository) { + /** + * This is increasing the generation of the stream being refreshed. + * For each stream being refreshed, it fetches the current generation and then create a new entry in the generation + * table with the generation being bumped. + * @param connectionId - the connectionId of the generation being increased + * @param jobId - The current jobId + * @param streamRefreshes - List of the stream being refreshed + */ + fun updateGenerationForStreams( + connectionId: UUID, + jobId: Long, + streamRefreshes: List, + ) { + val streamDescriptors: Set = + streamRefreshes + .map { StreamDescriptor().withName(it.streamName).withNamespace(it.streamNamespace) }.toHashSet() + + val currentMaxGeneration: List = streamGenerationRepository.getMaxGenerationOfStreamsForConnectionId(connectionId) + + val streamDescriptorWithoutAGeneration = + streamDescriptors.filter { + val missingInGeneration = + currentMaxGeneration.find { generation: Generation -> + generation.streamName == it.name && generation.streamNamespace == it.namespace + } == null + missingInGeneration + } + + val newGenerations = + streamDescriptorWithoutAGeneration.map { + Generation( + streamName = it.name, + streamNamespace = it.namespace, + generationId = 0L, + ) + } + + val generationToUpdate: List = + currentMaxGeneration.filter { + val streamDescriptor = StreamDescriptor().withName(it.streamName).withNamespace(it.streamNamespace) + streamDescriptors.contains(streamDescriptor) + } + newGenerations + + val updatedStreamGeneration = + generationToUpdate.map { + StreamGeneration( + id = UUID.randomUUID(), + connectionId = connectionId, + streamName = it.streamName, + streamNamespace = it.streamNamespace, + generationId = it.generationId + 1, + startJobId = jobId, + ) + } + + streamGenerationRepository.saveAll(updatedStreamGeneration) + } +} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/OrganizationPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/OrganizationPersistenceTest.java index b870c8845a7..14da7b8444f 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/OrganizationPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/OrganizationPersistenceTest.java @@ -38,7 +38,6 @@ class OrganizationPersistenceTest extends BaseConfigDatabaseTest { private OrganizationPersistence organizationPersistence; private UserPersistence userPersistence; - private PermissionPersistence permissionPersistence; private WorkspaceService workspaceService; private TestClient featureFlagClient; private SecretsRepositoryReader secretsRepositoryReader; @@ -47,7 +46,6 @@ class OrganizationPersistenceTest extends BaseConfigDatabaseTest { @BeforeEach void beforeEach() throws Exception { - permissionPersistence = new PermissionPersistence(database); userPersistence = new UserPersistence(database); organizationPersistence = new OrganizationPersistence(database); featureFlagClient = new TestClient(); @@ -206,7 +204,7 @@ void testListOrganizationsByUserId(final Boolean withKeywordSearch, final Boolea .withPba(false) .withOrgLevelBilling(false)); // grant user an admin access to org 1 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withOrganizationId(orgId1) .withUserId(userId) @@ -221,7 +219,7 @@ void testListOrganizationsByUserId(final Boolean withKeywordSearch, final Boolea .withPba(false) .withOrgLevelBilling(false)); // grant user an editor access to org 2 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withOrganizationId(orgId2) .withUserId(userId) @@ -236,7 +234,7 @@ void testListOrganizationsByUserId(final Boolean withKeywordSearch, final Boolea .withPba(false) .withOrgLevelBilling(false)); // grant user a read access to org 3 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withOrganizationId(orgId3) .withUserId(userId) diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java index ea6a07ce926..c3168c0b50d 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java @@ -28,15 +28,11 @@ import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.TestClient; import io.airbyte.test.utils.BaseConfigDatabaseTest; -import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.util.List; import java.util.Optional; -import java.util.UUID; -import org.jooq.exception.DataAccessException; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; class PermissionPersistenceTest extends BaseConfigDatabaseTest { @@ -52,7 +48,7 @@ void beforeEach() throws Exception { setupTestData(); } - private void setupTestData() throws IOException, JsonValidationException { + private void setupTestData() throws Exception { final UserPersistence userPersistence = new UserPersistence(database); final FeatureFlagClient featureFlagClient = mock(TestClient.class); final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); @@ -106,7 +102,7 @@ private void setupTestData() throws IOException, JsonValidationException { // write permission table for (final Permission permission : MockData.permissions()) { - permissionPersistence.writePermission(permission); + BaseConfigDatabaseTest.writePermission(permission); } } @@ -149,12 +145,6 @@ void listPermissionByWorkspaceTest() throws IOException { Assertions.assertEquals(2, permissions.size()); } - @Test - void deletePermissionByIdTest() throws IOException { - permissionPersistence.deletePermissionById(MockData.PERMISSION_ID_4); - Assertions.assertEquals(Optional.empty(), permissionPersistence.getPermission(MockData.PERMISSION_ID_4)); - } - @Test void listUsersInOrganizationTest() throws IOException { final List userPermissions = permissionPersistence.listUsersInOrganization(MockData.ORGANIZATION_ID_1); @@ -230,227 +220,4 @@ void isAuthUserInstanceAdmin() throws IOException { Assertions.assertFalse(permissionPersistence.isAuthUserInstanceAdmin(user2.getAuthUserId())); } - @Nested - class WritePermission { - - @Test - void createNewPermission() throws IOException { - final Permission permission = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_1) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_1); - - Assertions.assertDoesNotThrow(() -> permissionPersistence.writePermission(permission)); - Assertions.assertEquals(permission, permissionPersistence.getPermission(permission.getPermissionId()).orElseThrow()); - } - - @Test - void createPermissionExceptionTest() { - // writing permissions against Permission table constraint should throw db exception. - - // invalid permission 1: permission type cannot be null - final Permission invalidPermission1 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withUserId(UUID.randomUUID()) - .withOrganizationId(UUID.randomUUID()) - .withPermissionType(null); - - // invalid permission 2: for workspace level permission, org id should be null and workspace id - // cannot be null - final Permission invalidPermission2 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withUserId(UUID.randomUUID()) - .withOrganizationId(UUID.randomUUID()) - .withPermissionType(PermissionType.WORKSPACE_OWNER); - - // invalid permission 3: for organization level permission, org id cannot be null and workspace id - // should be null - final Permission invalidPermission3 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withUserId(UUID.randomUUID()) - .withWorkspaceId(UUID.randomUUID()) - .withPermissionType(PermissionType.ORGANIZATION_MEMBER); - - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.writePermission(invalidPermission1)); - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.writePermission(invalidPermission2)); - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.writePermission(invalidPermission3)); - } - - } - - /** - * Note that while the Persistence layer allows updates to ID fields, the API layer does not. Since - * blocking such updates is an explicit API-level concern, our persistence layer tests cover updates - * to ID fields. - */ - @Nested - class UpdatePermission { - - final Permission instanceAdminPermission = MockData.permission1; - final Permission workspaceAdminPermission = MockData.permission4; - final Permission organizationReaderPermission = MockData.permission7; - - @Test - void updateWorkspacePermission() throws IOException { - final Permission update = workspaceAdminPermission - .withPermissionType(PermissionType.WORKSPACE_READER) // change to a different workspace-level permission type - .withWorkspaceId(MockData.WORKSPACE_ID_2) // change to a different workspace ID - .withUserId(MockData.CREATOR_USER_ID_1); // change to a different user ID - - Assertions.assertDoesNotThrow(() -> permissionPersistence.writePermission(update)); - final Permission updated = permissionPersistence.getPermission(update.getPermissionId()).orElseThrow(); - - Assertions.assertEquals(update, updated); - } - - @Test - void updateOrganizationPermission() throws IOException { - final Permission update = organizationReaderPermission - .withPermissionType(PermissionType.ORGANIZATION_EDITOR) // change to a different organization-level permission type - .withOrganizationId(MockData.ORGANIZATION_ID_3) // change to a different organization ID - .withUserId(MockData.CREATOR_USER_ID_1); // change to a different user ID - - Assertions.assertDoesNotThrow(() -> permissionPersistence.writePermission(update)); - final Permission updated = permissionPersistence.getPermission(update.getPermissionId()).orElseThrow(); - - Assertions.assertEquals(update, updated); - } - - @Test - void updateInstanceAdminPermission() throws IOException { - final Permission update = instanceAdminPermission - .withUserId(MockData.CREATOR_USER_ID_2); // change to a different user ID - - Assertions.assertDoesNotThrow(() -> permissionPersistence.writePermission(update)); - final Permission updated = permissionPersistence.getPermission(update.getPermissionId()).orElseThrow(); - - Assertions.assertEquals(instanceAdminPermission.getPermissionId(), updated.getPermissionId()); - Assertions.assertEquals(PermissionType.INSTANCE_ADMIN, updated.getPermissionType()); - Assertions.assertEquals(MockData.CREATOR_USER_ID_2, updated.getUserId()); - } - - @Test - void shouldNotUpdateInstanceAdminPermissionTypeToOthers() { - final Permission update = new Permission() - .withPermissionId(instanceAdminPermission.getPermissionId()) - .withPermissionType(PermissionType.ORGANIZATION_EDITOR); // another permission type - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.writePermission(update)); - } - - @Test - void shouldNotUpdateWorkspaceLevelPermissionTypeToOrganizationLevelPermissions() { - final Permission update = new Permission() - .withPermissionId(workspaceAdminPermission.getPermissionId()) - .withPermissionType(PermissionType.ORGANIZATION_EDITOR); // org level permission type - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.writePermission(update)); - } - - @Test - void shouldNotUpdateOrganizationLevelPermissionTypeToWorkspaceLevelPermissions() { - final Permission update = new Permission() - .withPermissionId(organizationReaderPermission.getPermissionId()) - .withPermissionType(PermissionType.WORKSPACE_ADMIN); // workspace level permission type - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.writePermission(update)); - } - - } - - @Nested - class SpecializedCases { - - @Test - void cannotDeleteLastOrganizationAdmin() throws IOException { - final Permission orgAdmin1 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_2) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_1); - final Permission orgAdmin2 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_2) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_2); - - permissionPersistence.writePermission(orgAdmin1); - permissionPersistence.writePermission(orgAdmin2); - - Assertions.assertDoesNotThrow(() -> permissionPersistence.deletePermissionById(orgAdmin1.getPermissionId())); - final DataAccessException thrown = - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.deletePermissionById(orgAdmin2.getPermissionId())); - - Assertions.assertInstanceOf(SQLOperationNotAllowedException.class, thrown.getCause()); - - // make sure the last org-admin permission is still present in the DB - Assertions.assertEquals(orgAdmin2, permissionPersistence.getPermission(orgAdmin2.getPermissionId()).orElseThrow()); - } - - @Test - void cannotDemoteLastOrganizationAdmin() throws IOException { - final Permission orgAdmin1 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_2) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_1); - final Permission orgAdmin2 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_2) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_2); - - permissionPersistence.writePermission(orgAdmin1); - permissionPersistence.writePermission(orgAdmin2); - - Assertions.assertDoesNotThrow(() -> permissionPersistence.writePermission(orgAdmin1.withPermissionType(PermissionType.ORGANIZATION_EDITOR))); - - final Permission demotionUpdate = orgAdmin2 - .withPermissionId(orgAdmin2.getPermissionId()) - .withPermissionType(PermissionType.ORGANIZATION_EDITOR); - - final DataAccessException thrown = Assertions.assertThrows(DataAccessException.class, - () -> permissionPersistence.writePermission(demotionUpdate)); - - Assertions.assertInstanceOf(SQLOperationNotAllowedException.class, thrown.getCause()); - - // make sure the last org-admin is still an org-admin, ie the update did not persist - Assertions.assertEquals( - PermissionType.ORGANIZATION_ADMIN, - permissionPersistence.getPermission(orgAdmin2.getPermissionId()).orElseThrow().getPermissionType()); - } - - @Test - void cannotChangeLastOrganizationAdminToADifferentOrg() throws IOException { - final Permission orgAdmin1 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_2) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_1); - final Permission orgAdmin2 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_2) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_2); - - permissionPersistence.writePermission(orgAdmin1); - permissionPersistence.writePermission(orgAdmin2); - - Assertions.assertDoesNotThrow(() -> permissionPersistence.writePermission(orgAdmin1.withPermissionType(PermissionType.ORGANIZATION_EDITOR))); - - final Permission demotionUpdate = orgAdmin2 - .withPermissionId(orgAdmin2.getPermissionId()) - .withOrganizationId(MockData.ORGANIZATION_ID_3); - - final DataAccessException thrown = Assertions.assertThrows(DataAccessException.class, - () -> permissionPersistence.writePermission(demotionUpdate)); - - Assertions.assertInstanceOf(SQLOperationNotAllowedException.class, thrown.getCause()); - - // make sure the last org-admin is still in the original org, ie the update did not persist - Assertions.assertEquals( - MockData.ORGANIZATION_ID_2, - permissionPersistence.getPermission(orgAdmin2.getPermissionId()).orElseThrow().getOrganizationId()); - } - - } - } diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/RefreshJobStateUpdaterTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/RefreshJobStateUpdaterTest.java new file mode 100644 index 00000000000..154053c45a3 --- /dev/null +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/RefreshJobStateUpdaterTest.java @@ -0,0 +1,136 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.StateType; +import io.airbyte.config.StateWrapper; +import io.airbyte.config.persistence.domain.StreamRefresh; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.StreamDescriptor; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.UUID; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class RefreshJobStateUpdaterTest { + + private StatePersistence statePersistence; + private RefreshJobStateUpdater refreshJobStateUpdater; + + @BeforeEach + public void init() { + statePersistence = mock(StatePersistence.class); + refreshJobStateUpdater = new RefreshJobStateUpdater(statePersistence); + } + + @Test + public void streamStateTest() throws IOException { + final UUID connectionId = UUID.randomUUID(); + final String streamToRefresh = "name"; + final String streamToNotRefresh = "stream-not-refresh"; + final String streamNamespace = "namespace"; + final AirbyteStateMessage stateMessageFromRefreshStream = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor", 1)))); + + final AirbyteStateMessage stateMessageFromNonRefreshStream = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToNotRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor-2", 2)))); + + final StateWrapper stateWrapper = new StateWrapper().withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList(stateMessageFromRefreshStream, stateMessageFromNonRefreshStream)); + + refreshJobStateUpdater.updateStateWrapperForRefresh(connectionId, stateWrapper, + List.of(new StreamRefresh(UUID.randomUUID(), connectionId, streamToRefresh, streamNamespace, null))); + final StateWrapper expected = + new StateWrapper().withStateType(StateType.STREAM).withStateMessages(Collections.singletonList(stateMessageFromNonRefreshStream)); + verify(statePersistence).updateOrCreateState(connectionId, expected); + } + + @Test + public void globalStateTest() throws IOException { + final UUID connectionId = UUID.randomUUID(); + final String streamToRefresh = "name"; + final String streamToNotRefresh = "stream-not-refresh"; + final String streamNamespace = "namespace"; + final AirbyteStreamState stateMessageFromRefreshStream = new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor", 1))); + + final AirbyteStreamState stateMessageFromNonRefreshStream = new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToNotRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor-2", 2))); + + final JsonNode sharedState = Jsons.jsonNode(ImmutableMap.of("shared-state", 5)); + final AirbyteStateMessage existingStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState().withSharedState(sharedState) + .withStreamStates(Arrays.asList(stateMessageFromRefreshStream, stateMessageFromNonRefreshStream))); + + final StateWrapper stateWrapper = new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(existingStateMessage); + + final AirbyteStateMessage expectedStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal( + new AirbyteGlobalState().withSharedState(sharedState).withStreamStates(Collections.singletonList(stateMessageFromNonRefreshStream))); + + refreshJobStateUpdater.updateStateWrapperForRefresh(connectionId, stateWrapper, + List.of(new StreamRefresh(UUID.randomUUID(), connectionId, streamToRefresh, streamNamespace, null))); + + final StateWrapper expected = new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(expectedStateMessage); + verify(statePersistence).updateOrCreateState(connectionId, expected); + } + + @Test + public void fullGlobalState() throws IOException { + final UUID connectionId = UUID.randomUUID(); + final String streamToRefresh = "name"; + final String streamToRefresh2 = "stream-refresh2"; + final String streamNamespace = "namespace"; + + final AirbyteStreamState stateMessageFromRefreshStream = new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor", 1))); + + final AirbyteStreamState stateMessageFromNonRefreshStream = new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToRefresh2).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor-2", 2))); + + final JsonNode sharedState = Jsons.jsonNode(ImmutableMap.of("shared-state", 5)); + + final AirbyteStateMessage existingStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState().withSharedState(sharedState) + .withStreamStates(Arrays.asList(stateMessageFromRefreshStream, stateMessageFromNonRefreshStream))); + + final StateWrapper stateWrapper = new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(existingStateMessage); + + refreshJobStateUpdater.updateStateWrapperForRefresh(connectionId, stateWrapper, + List.of(new StreamRefresh(UUID.randomUUID(), connectionId, streamToRefresh, streamNamespace, null), + new StreamRefresh(UUID.randomUUID(), connectionId, streamToRefresh2, streamNamespace, null))); + final AirbyteStateMessage expectedStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState().withSharedState(null).withStreamStates(Collections.emptyList())); + + final StateWrapper expected = new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(expectedStateMessage); + verify(statePersistence).updateOrCreateState(connectionId, expected); + } + +} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java index dc485719c41..c9c59e0cc76 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java @@ -288,7 +288,6 @@ class UserAccessTests { void setup() throws IOException, JsonValidationException, SQLException { truncateAllTables(); - final PermissionPersistence permissionPersistence = new PermissionPersistence(database); final OrganizationPersistence organizationPersistence = new OrganizationPersistence(database); organizationPersistence.createOrganization(ORG); @@ -303,7 +302,7 @@ void setup() throws IOException, JsonValidationException, SQLException { for (final Permission permission : List.of(ORG_MEMBER_USER_PERMISSION, ORG_READER_PERMISSION, WORKSPACE_2_READER_PERMISSION, WORKSPACE_3_READER_PERMISSION, BOTH_USER_WORKSPACE_PERMISSION, BOTH_USER_ORGANIZATION_PERMISSION)) { - permissionPersistence.writePermission(permission); + BaseConfigDatabaseTest.writePermission(permission); } } diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java index e62556886b6..2bd01503104 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java @@ -36,7 +36,6 @@ import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; -import io.airbyte.data.services.WorkspaceService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; @@ -73,9 +72,7 @@ class WorkspacePersistenceTest extends BaseConfigDatabaseTest { private ConfigRepository configRepository; private WorkspacePersistence workspacePersistence; - private PermissionPersistence permissionPersistence; private UserPersistence userPersistence; - private WorkspaceService workspaceService; private FeatureFlagClient featureFlagClient; private SecretsRepositoryReader secretsRepositoryReader; private SecretsRepositoryWriter secretsRepositoryWriter; @@ -87,8 +84,6 @@ void setup() throws Exception { secretsRepositoryReader = mock(SecretsRepositoryReader.class); secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); - workspaceService = spy(new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, - secretPersistenceConfigService)); final ConnectionService connectionService = mock(ConnectionService.class); final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); @@ -126,7 +121,6 @@ void setup() throws Exception { secretsRepositoryWriter, secretPersistenceConfigService))); workspacePersistence = new WorkspacePersistence(database); - permissionPersistence = new PermissionPersistence(database); userPersistence = new UserPersistence(database); final OrganizationPersistence organizationPersistence = new OrganizationPersistence(database); @@ -447,14 +441,14 @@ void testListWorkspacesByUserIdWithKeywordWithPagination() throws Exception { configRepository.writeStandardWorkspaceNoSecrets(workspace2); // create a workspace permission for workspace 1 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withWorkspaceId(workspaceId1) .withUserId(userId) .withPermissionType(PermissionType.WORKSPACE_OWNER)); // create an org permission that should grant access to workspace 2 and 3 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withOrganizationId(MockData.ORGANIZATION_ID_2) .withUserId(userId) @@ -510,14 +504,14 @@ void testListWorkspacesByUserIdWithoutKeywordWithoutPagination() throws Exceptio configRepository.writeStandardWorkspaceNoSecrets(workspace3); // create a workspace-level permission for workspace 1 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withWorkspaceId(workspace1Id) .withUserId(userId) .withPermissionType(PermissionType.WORKSPACE_READER)); // create an org-level permission that should grant access to workspace 2 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withOrganizationId(MockData.ORGANIZATION_ID_2) .withUserId(userId) @@ -525,7 +519,7 @@ void testListWorkspacesByUserIdWithoutKeywordWithoutPagination() throws Exceptio // create an org-member permission that should NOT grant access to workspace 3, because // org-member is too low of a permission to grant read-access to workspaces in the org. - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withOrganizationId(MockData.ORGANIZATION_ID_3) .withUserId(userId) diff --git a/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/RepositoryTestSetup.kt b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/RepositoryTestSetup.kt new file mode 100644 index 00000000000..e5bf540a567 --- /dev/null +++ b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/RepositoryTestSetup.kt @@ -0,0 +1,213 @@ +package io.airbyte.config.persistence + +import io.airbyte.config.ActorDefinitionVersion +import io.airbyte.config.DestinationConnection +import io.airbyte.config.Geography +import io.airbyte.config.SourceConnection +import io.airbyte.config.StandardDestinationDefinition +import io.airbyte.config.StandardSourceDefinition +import io.airbyte.config.StandardSync +import io.airbyte.config.StandardWorkspace +import io.airbyte.config.SupportLevel +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater +import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl +import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl +import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl +import io.airbyte.data.services.impls.jooq.WorkspaceServiceJooqImpl +import io.airbyte.db.factory.DSLContextFactory +import io.airbyte.db.instance.test.TestDatabaseProviders +import io.micronaut.context.ApplicationContext +import io.micronaut.context.env.PropertySource +import io.micronaut.data.connection.jdbc.advice.DelegatingDataSource +import io.mockk.every +import io.mockk.mockk +import org.jooq.DSLContext +import org.jooq.SQLDialect +import org.junit.jupiter.api.AfterAll +import org.junit.jupiter.api.BeforeAll +import org.testcontainers.containers.PostgreSQLContainer +import java.util.UUID +import javax.sql.DataSource + +open class RepositoryTestSetup { + companion object { + val connectionId1 = UUID.randomUUID() + val connectionId2 = UUID.randomUUID() + private lateinit var context: ApplicationContext + private lateinit var jooqDslContext: DSLContext + + // we run against an actual database to ensure micronaut data and jooq properly integrate + private val container: PostgreSQLContainer<*> = + PostgreSQLContainer("postgres:13-alpine") + .withDatabaseName("airbyte") + .withUsername("docker") + .withPassword("docker") + + @BeforeAll + @JvmStatic + fun setup() { + container.start() + // set the micronaut datasource properties to match our container we started up + context = + ApplicationContext.run( + PropertySource.of( + "test", + mapOf( + "datasources.config.driverClassName" to "org.postgresql.Driver", + "datasources.config.db-type" to "postgres", + "datasources.config.dialect" to "POSTGRES", + "datasources.config.url" to container.jdbcUrl, + "datasources.config.username" to container.username, + "datasources.config.password" to container.password, + ), + ), + ) + + // removes micronaut transactional wrapper that doesn't play nice with our non-micronaut factories + val dataSource = (context.getBean(DataSource::class.java) as DelegatingDataSource).targetDataSource + jooqDslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES) + val databaseProviders = TestDatabaseProviders(dataSource, jooqDslContext) + + // this line is what runs the migrations + val database = databaseProviders.createNewConfigsDatabase() + + val workspaceId = UUID.randomUUID() + val workspaceService = + WorkspaceServiceJooqImpl( + database, + mockk(), + mockk(), + mockk(), + mockk(), + ) + + workspaceService.writeStandardWorkspaceNoSecrets( + StandardWorkspace() + .withWorkspaceId(workspaceId) + .withDefaultGeography(Geography.US) + .withName("") + .withSlug("") + .withInitialSetupComplete(true), + ) + + val actorDefinitionUpdate: ActorDefinitionVersionUpdater = mockk() + + every { actorDefinitionUpdate.updateSourceDefaultVersion(any(), any(), any()) } returns Unit + every { actorDefinitionUpdate.updateDestinationDefaultVersion(any(), any(), any()) } returns Unit + + val sourceJooq = + SourceServiceJooqImpl( + database, + mockk(), + mockk(), + mockk(), + mockk(), + mockk(), + actorDefinitionUpdate, + ) + + val sourceDefinitionId = UUID.randomUUID() + val sourceDefinitionVersionId = UUID.randomUUID() + + sourceJooq.writeConnectorMetadata( + StandardSourceDefinition() + .withSourceDefinitionId(sourceDefinitionId) + .withName("sourceDef"), + ActorDefinitionVersion() + .withVersionId(sourceDefinitionVersionId) + .withActorDefinitionId(sourceDefinitionId) + .withDockerRepository("") + .withDockerImageTag("") + .withSupportState(ActorDefinitionVersion.SupportState.SUPPORTED) + .withSupportLevel(SupportLevel.CERTIFIED), + listOf(), + ) + + val actorDefinitionService = + ActorDefinitionServiceJooqImpl( + database, + ) + actorDefinitionService.updateActorDefinitionDefaultVersionId(sourceDefinitionId, sourceDefinitionVersionId) + + val sourceId = UUID.randomUUID() + sourceJooq.writeSourceConnectionNoSecrets( + SourceConnection() + .withSourceId(sourceId) + .withName("source") + .withSourceDefinitionId(sourceDefinitionId) + .withDefaultVersionId(sourceDefinitionVersionId) + .withWorkspaceId(workspaceId), + ) + + val destinationService = + DestinationServiceJooqImpl( + database, + mockk(), + mockk(), + mockk(), + mockk(), + mockk(), + actorDefinitionUpdate, + ) + + val destinationDefinitionId = UUID.randomUUID() + val destinationDefinitionVersionId = UUID.randomUUID() + destinationService.writeConnectorMetadata( + StandardDestinationDefinition() + .withDestinationDefinitionId(destinationDefinitionId) + .withName("sourceDef"), + ActorDefinitionVersion() + .withVersionId(destinationDefinitionVersionId) + .withActorDefinitionId(destinationDefinitionId) + .withDockerRepository("") + .withDockerImageTag("") + .withSupportState(ActorDefinitionVersion.SupportState.SUPPORTED) + .withSupportLevel(SupportLevel.CERTIFIED), + listOf(), + ) + + actorDefinitionService.updateActorDefinitionDefaultVersionId(destinationDefinitionId, destinationDefinitionVersionId) + + val destinationId = UUID.randomUUID() + destinationService.writeDestinationConnectionNoSecrets( + DestinationConnection() + .withDestinationId(destinationId) + .withName("destination") + .withDestinationDefinitionId(destinationDefinitionId) + .withDefaultVersionId(destinationDefinitionVersionId) + .withWorkspaceId(workspaceId), + ) + + val connectionRepo = StandardSyncPersistence(database) + connectionRepo.writeStandardSync( + StandardSync() + .withConnectionId(connectionId1) + .withGeography(Geography.US) + .withSourceId(sourceId) + .withDestinationId(destinationId) + .withName("not null") + .withBreakingChange(true), + ) + + connectionRepo.writeStandardSync( + StandardSync() + .withConnectionId(connectionId2) + .withGeography(Geography.US) + .withSourceId(sourceId) + .withDestinationId(destinationId) + .withName("not null") + .withBreakingChange(true), + ) + } + + @AfterAll + @JvmStatic + fun dbDown() { + container.close() + } + } + + fun getRepository(clazz: Class): T { + return context.getBean(clazz) + } +} diff --git a/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamGenerationRepositoryTest.kt b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamGenerationRepositoryTest.kt new file mode 100644 index 00000000000..62ac76a1209 --- /dev/null +++ b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamGenerationRepositoryTest.kt @@ -0,0 +1,120 @@ +package io.airbyte.config.persistence + +import io.airbyte.config.persistence.domain.Generation +import io.airbyte.config.persistence.domain.StreamGeneration +import io.micronaut.context.env.Environment +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import org.assertj.core.api.Assertions.assertThat +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test + +@MicronautTest(environments = [Environment.TEST]) +class StreamGenerationRepositoryTest : RepositoryTestSetup() { + @AfterEach + fun cleanDb() { + getRepository(StreamGenerationRepository::class.java).deleteAll() + } + + @Test + fun `test db insertion`() { + val streamGeneration = + StreamGeneration( + connectionId = connectionId1, + streamName = "sname", + streamNamespace = "snamespace", + generationId = 0, + startJobId = 0, + ) + + getRepository(StreamGenerationRepository::class.java).save(streamGeneration) + + assertEquals(1, getRepository(StreamGenerationRepository::class.java).findByConnectionId(streamGeneration.connectionId).size) + } + + @Test + fun `find by connection id and stream name`() { + val streamGeneration = + StreamGeneration( + connectionId = connectionId1, + streamName = "sname1", + streamNamespace = "snamespace1", + generationId = 0, + startJobId = 0, + ) + + getRepository(StreamGenerationRepository::class.java).save(streamGeneration) + + val streamGeneration2 = + StreamGeneration( + connectionId = connectionId1, + streamName = "sname2", + streamNamespace = "snamespace2", + generationId = 1, + startJobId = 1, + ) + + getRepository(StreamGenerationRepository::class.java).save(streamGeneration2) + + val streamGeneration3 = + StreamGeneration( + connectionId = connectionId2, + streamName = "sname3", + generationId = 2, + startJobId = 2, + ) + + getRepository(StreamGenerationRepository::class.java).save(streamGeneration3) + + val streamGenerationForConnectionIds = getRepository(StreamGenerationRepository::class.java).findByConnectionId(connectionId1) + assertEquals(2, streamGenerationForConnectionIds.size) + + val maxGenerationOfStreamsByConnectionId1 = + getRepository( + StreamGenerationRepository::class.java, + ).getMaxGenerationOfStreamsForConnectionId(connectionId1) + val expectedRecord1 = Generation("sname1", "snamespace1", 0) + val expectedRecord2 = Generation("sname2", "snamespace2", 1) + assertEquals(2, maxGenerationOfStreamsByConnectionId1.size) + assertThat(maxGenerationOfStreamsByConnectionId1).containsExactlyInAnyOrder(expectedRecord1, expectedRecord2) + + val maxGenerationOfStreamsByConnectionId2 = + getRepository( + StreamGenerationRepository::class.java, + ).getMaxGenerationOfStreamsForConnectionId(connectionId2) + assertEquals(1, maxGenerationOfStreamsByConnectionId2.size) + val expectedRecord3 = Generation(streamName = "sname3", generationId = 2) + assertThat(maxGenerationOfStreamsByConnectionId2).containsExactlyInAnyOrder(expectedRecord3) + } + + @Test + fun `delete by connection id`() { + val streamGeneration = + StreamGeneration( + connectionId = connectionId1, + streamName = "sname1", + streamNamespace = "snamespace1", + generationId = 0, + startJobId = 0, + ) + + getRepository(StreamGenerationRepository::class.java).save(streamGeneration) + + val streamGeneration2 = + StreamGeneration( + connectionId = connectionId2, + streamName = "sname2", + streamNamespace = "sname2", + generationId = 1, + startJobId = 1, + ) + + getRepository(StreamGenerationRepository::class.java).save(streamGeneration2) + + getRepository(StreamGenerationRepository::class.java).deleteByConnectionId(streamGeneration.connectionId) + + assertTrue(getRepository(StreamGenerationRepository::class.java).findByConnectionId(streamGeneration.connectionId).isEmpty()) + assertTrue(getRepository(StreamGenerationRepository::class.java).findByConnectionId(streamGeneration2.connectionId).isNotEmpty()) + } +} diff --git a/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamRefreshesRepositoryTest.kt b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamRefreshesRepositoryTest.kt index b26aa7d7cdb..fbb6f7cd0d5 100644 --- a/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamRefreshesRepositoryTest.kt +++ b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamRefreshesRepositoryTest.kt @@ -1,309 +1,140 @@ package io.airbyte.config.persistence -import io.airbyte.config.ActorDefinitionVersion -import io.airbyte.config.DestinationConnection -import io.airbyte.config.Geography -import io.airbyte.config.SourceConnection -import io.airbyte.config.StandardDestinationDefinition -import io.airbyte.config.StandardSourceDefinition -import io.airbyte.config.StandardSync -import io.airbyte.config.StandardWorkspace -import io.airbyte.config.SupportLevel import io.airbyte.config.persistence.domain.StreamRefresh -import io.airbyte.config.persistence.domain.StreamRefreshPK -import io.airbyte.data.helpers.ActorDefinitionVersionUpdater -import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl -import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl -import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl -import io.airbyte.data.services.impls.jooq.WorkspaceServiceJooqImpl -import io.airbyte.db.factory.DSLContextFactory -import io.airbyte.db.instance.test.TestDatabaseProviders -import io.micronaut.context.ApplicationContext import io.micronaut.context.env.Environment -import io.micronaut.context.env.PropertySource -import io.micronaut.data.connection.jdbc.advice.DelegatingDataSource import io.micronaut.test.extensions.junit5.annotation.MicronautTest -import io.mockk.every -import io.mockk.mockk -import org.jooq.DSLContext -import org.jooq.SQLDialect -import org.junit.jupiter.api.AfterAll import org.junit.jupiter.api.AfterEach import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Assertions.assertTrue -import org.junit.jupiter.api.BeforeAll import org.junit.jupiter.api.Test -import org.testcontainers.containers.PostgreSQLContainer -import java.util.UUID -import javax.sql.DataSource @MicronautTest(environments = [Environment.TEST]) -class StreamRefreshesRepositoryTest { - companion object { - private val connectionId1 = UUID.randomUUID() - private val connectionId2 = UUID.randomUUID() - private lateinit var context: ApplicationContext - lateinit var streamRefreshesRepository: StreamRefreshesRepository - private lateinit var jooqDslContext: DSLContext - - // we run against an actual database to ensure micronaut data and jooq properly integrate - private val container: PostgreSQLContainer<*> = - PostgreSQLContainer("postgres:13-alpine") - .withDatabaseName("airbyte") - .withUsername("docker") - .withPassword("docker") - - @BeforeAll - @JvmStatic - fun setup() { - container.start() - // set the micronaut datasource properties to match our container we started up - context = - ApplicationContext.run( - PropertySource.of( - "test", - mapOf( - "datasources.default.driverClassName" to "org.postgresql.Driver", - "datasources.default.db-type" to "postgres", - "datasources.default.dialect" to "POSTGRES", - "datasources.default.url" to container.jdbcUrl, - "datasources.default.username" to container.username, - "datasources.default.password" to container.password, - ), - ), - ) - - // removes micronaut transactional wrapper that doesn't play nice with our non-micronaut factories - val dataSource = (context.getBean(DataSource::class.java) as DelegatingDataSource).targetDataSource - jooqDslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES) - val databaseProviders = TestDatabaseProviders(dataSource, jooqDslContext) - - // this line is what runs the migrations - val database = databaseProviders.createNewConfigsDatabase() - streamRefreshesRepository = context.getBean(StreamRefreshesRepository::class.java) - - val workspaceId = UUID.randomUUID() - val workspaceService = - WorkspaceServiceJooqImpl( - database, - mockk(), - mockk(), - mockk(), - mockk(), - ) - - workspaceService.writeStandardWorkspaceNoSecrets( - StandardWorkspace() - .withWorkspaceId(workspaceId) - .withDefaultGeography(Geography.US) - .withName("") - .withSlug("") - .withInitialSetupComplete(true), - ) - - val actorDefinitionUpdate: ActorDefinitionVersionUpdater = mockk() - - every { actorDefinitionUpdate.updateSourceDefaultVersion(any(), any(), any()) } returns Unit - every { actorDefinitionUpdate.updateDestinationDefaultVersion(any(), any(), any()) } returns Unit - - val sourceJooq = - SourceServiceJooqImpl( - database, - mockk(), - mockk(), - mockk(), - mockk(), - mockk(), - actorDefinitionUpdate, - ) - - val sourceDefinitionId = UUID.randomUUID() - val sourceDefinitionVersionId = UUID.randomUUID() - - sourceJooq.writeConnectorMetadata( - StandardSourceDefinition() - .withSourceDefinitionId(sourceDefinitionId) - .withName("sourceDef"), - ActorDefinitionVersion() - .withVersionId(sourceDefinitionVersionId) - .withActorDefinitionId(sourceDefinitionId) - .withDockerRepository("") - .withDockerImageTag("") - .withSupportState(ActorDefinitionVersion.SupportState.SUPPORTED) - .withSupportLevel(SupportLevel.CERTIFIED), - listOf(), - ) - - val actorDefinitionService = - ActorDefinitionServiceJooqImpl( - database, - ) - actorDefinitionService.updateActorDefinitionDefaultVersionId(sourceDefinitionId, sourceDefinitionVersionId) - - val sourceId = UUID.randomUUID() - sourceJooq.writeSourceConnectionNoSecrets( - SourceConnection() - .withSourceId(sourceId) - .withName("source") - .withSourceDefinitionId(sourceDefinitionId) - .withDefaultVersionId(sourceDefinitionVersionId) - .withWorkspaceId(workspaceId), - ) - - val destinationService = - DestinationServiceJooqImpl( - database, - mockk(), - mockk(), - mockk(), - mockk(), - mockk(), - actorDefinitionUpdate, - ) - - val destinationDefinitionId = UUID.randomUUID() - val destinationDefinitionVersionId = UUID.randomUUID() - destinationService.writeConnectorMetadata( - StandardDestinationDefinition() - .withDestinationDefinitionId(destinationDefinitionId) - .withName("sourceDef"), - ActorDefinitionVersion() - .withVersionId(destinationDefinitionVersionId) - .withActorDefinitionId(destinationDefinitionId) - .withDockerRepository("") - .withDockerImageTag("") - .withSupportState(ActorDefinitionVersion.SupportState.SUPPORTED) - .withSupportLevel(SupportLevel.CERTIFIED), - listOf(), - ) - - actorDefinitionService.updateActorDefinitionDefaultVersionId(destinationDefinitionId, destinationDefinitionVersionId) - - val destinationId = UUID.randomUUID() - destinationService.writeDestinationConnectionNoSecrets( - DestinationConnection() - .withDestinationId(destinationId) - .withName("destination") - .withDestinationDefinitionId(destinationDefinitionId) - .withDefaultVersionId(destinationDefinitionVersionId) - .withWorkspaceId(workspaceId), - ) - - val connectionRepo = StandardSyncPersistence(database) - connectionRepo.writeStandardSync( - StandardSync() - .withConnectionId(connectionId1) - .withGeography(Geography.US) - .withSourceId(sourceId) - .withDestinationId(destinationId) - .withName("not null") - .withBreakingChange(true), - ) - - connectionRepo.writeStandardSync( - StandardSync() - .withConnectionId(connectionId2) - .withGeography(Geography.US) - .withSourceId(sourceId) - .withDestinationId(destinationId) - .withName("not null") - .withBreakingChange(true), - ) - } - - @AfterAll - @JvmStatic - fun dbDown() { - container.close() - } - } - +class StreamRefreshesRepositoryTest : RepositoryTestSetup() { @AfterEach fun cleanDb() { - streamRefreshesRepository.deleteAll() + getRepository(StreamRefreshesRepository::class.java).deleteAll() } @Test fun `test db insertion`() { val streamRefresh = StreamRefresh( - StreamRefreshPK( - connectionId = connectionId1, - streamName = "sname", - streamNamespace = "snamespace", - ), + connectionId = connectionId1, + streamName = "sname", + streamNamespace = "snamespace", ) - streamRefreshesRepository.save(streamRefresh) - - assertTrue(streamRefreshesRepository.existsById(streamRefresh.pk)) + getRepository(StreamRefreshesRepository::class.java).save(streamRefresh) + assertTrue(getRepository(StreamRefreshesRepository::class.java).existsByConnectionId(streamRefresh.connectionId)) } @Test fun `find by connection id`() { val streamRefresh1 = StreamRefresh( - StreamRefreshPK( - connectionId = connectionId1, - streamName = "sname1", - streamNamespace = "snamespace1", - ), + connectionId = connectionId1, + streamName = "sname1", + streamNamespace = "snamespace1", ) - streamRefreshesRepository.save(streamRefresh1) + getRepository(StreamRefreshesRepository::class.java).save(streamRefresh1) val streamRefresh2 = StreamRefresh( - StreamRefreshPK( - connectionId = connectionId1, - streamName = "sname2", - streamNamespace = "snamespace2", - ), + connectionId = connectionId1, + streamName = "sname2", + streamNamespace = "snamespace2", ) - streamRefreshesRepository.save(streamRefresh2) + getRepository(StreamRefreshesRepository::class.java).save(streamRefresh2) val streamRefresh3 = StreamRefresh( - StreamRefreshPK( - connectionId = connectionId2, - streamName = "sname3", - streamNamespace = "snamespace3", - ), + connectionId = connectionId2, + streamName = "sname3", + streamNamespace = "snamespace3", ) - streamRefreshesRepository.save(streamRefresh3) + getRepository(StreamRefreshesRepository::class.java).save(streamRefresh3) - assertEquals(2, streamRefreshesRepository.findByPkConnectionId(connectionId1).size) + assertEquals(2, getRepository(StreamRefreshesRepository::class.java).findByConnectionId(connectionId1).size) } @Test fun `delete by connection id`() { val streamRefresh1 = StreamRefresh( - StreamRefreshPK( - connectionId = connectionId1, - streamName = "sname1", - streamNamespace = "snamespace1", - ), + connectionId = connectionId1, + streamName = "sname1", + streamNamespace = "snamespace1", ) - streamRefreshesRepository.save(streamRefresh1) + getRepository(StreamRefreshesRepository::class.java).save(streamRefresh1) val streamRefresh2 = StreamRefresh( - StreamRefreshPK( - connectionId = connectionId2, - streamName = "sname2", - streamNamespace = "snamespace2", - ), + connectionId = connectionId2, + streamName = "sname2", + streamNamespace = "snamespace2", ) - streamRefreshesRepository.save(streamRefresh2) + getRepository(StreamRefreshesRepository::class.java).save(streamRefresh2) - streamRefreshesRepository.deleteByPkConnectionId(streamRefresh1.pk.connectionId) + getRepository(StreamRefreshesRepository::class.java).deleteByConnectionId(streamRefresh1.connectionId) - assertTrue(streamRefreshesRepository.findById(streamRefresh1.pk).isEmpty) - assertTrue(streamRefreshesRepository.findById(streamRefresh2.pk).isPresent) + assertTrue(getRepository(StreamRefreshesRepository::class.java).findByConnectionId(streamRefresh1.connectionId).isEmpty()) + assertTrue(getRepository(StreamRefreshesRepository::class.java).findByConnectionId(streamRefresh2.connectionId).isNotEmpty()) + } + + @Test + fun `delete by connection id and stream name and namespace`() { + val streamRefresh1 = + StreamRefresh( + connectionId = connectionId1, + streamName = "sname1", + streamNamespace = "snamespace1", + ) + + val streamRefresh2 = + StreamRefresh( + connectionId = connectionId1, + streamName = "sname2", + streamNamespace = "snamespace2", + ) + + val streamRefresh3 = + StreamRefresh( + connectionId = connectionId1, + streamName = "sname3", + ) + + getRepository(StreamRefreshesRepository::class.java).saveAll(listOf(streamRefresh1, streamRefresh2, streamRefresh3)) + + getRepository( + StreamRefreshesRepository::class.java, + ).deleteByConnectionIdAndStreamNameAndStreamNamespace(connectionId1, streamRefresh3.streamName, streamRefresh3.streamNamespace) + val refreshes: List = getRepository(StreamRefreshesRepository::class.java).findByConnectionId(connectionId1) + assertEquals(2, refreshes.size) + refreshes.forEach { + assertEquals(connectionId1, it.connectionId) + if (streamRefresh1.streamName.equals(it.streamName)) { + assertEquals(streamRefresh1.streamNamespace, it.streamNamespace) + } else if (streamRefresh2.streamName.equals(it.streamName)) { + assertEquals(streamRefresh2.streamNamespace, it.streamNamespace) + } else { + throw RuntimeException("Unknown stream name " + it.streamName) + } + } + + getRepository( + StreamRefreshesRepository::class.java, + ).deleteByConnectionIdAndStreamNameAndStreamNamespace(connectionId1, streamRefresh2.streamName, streamRefresh2.streamNamespace) + val refreshes2: List = getRepository(StreamRefreshesRepository::class.java).findByConnectionId(connectionId1) + assertEquals(1, refreshes2.size) + refreshes2.forEach { + assertEquals(connectionId1, it.connectionId) + assertEquals(streamRefresh1.streamName, (it.streamName)) + assertEquals(streamRefresh1.streamNamespace, it.streamNamespace) + } } } diff --git a/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/helper/GenerationBumperTest.kt b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/helper/GenerationBumperTest.kt new file mode 100644 index 00000000000..468afa5600e --- /dev/null +++ b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/helper/GenerationBumperTest.kt @@ -0,0 +1,91 @@ +package io.airbyte.config.persistence.helper + +import io.airbyte.config.persistence.StreamGenerationRepository +import io.airbyte.config.persistence.domain.Generation +import io.airbyte.config.persistence.domain.StreamGeneration +import io.airbyte.config.persistence.domain.StreamRefresh +import io.mockk.every +import io.mockk.mockk +import io.mockk.slot +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.util.UUID + +class GenerationBumperTest { + val streamGenerationRepository: StreamGenerationRepository = mockk() + + val generationBumper = GenerationBumper(streamGenerationRepository) + + val connectionId = UUID.randomUUID() + val jobId = 456L + + val generations = + listOf( + Generation( + streamName = "name1", + streamNamespace = "namespace1", + generationId = 42, + ), + Generation( + streamName = "name2", + streamNamespace = "namespace2", + generationId = 42, + ), + ) + + @Test + fun `increase the generation properly`() { + every { streamGenerationRepository.getMaxGenerationOfStreamsForConnectionId(connectionId) } returns generations + val generationSlot = slot>() + every { streamGenerationRepository.saveAll(capture(generationSlot)) } returns listOf() + + generationBumper.updateGenerationForStreams( + connectionId, + jobId, + listOf( + StreamRefresh( + connectionId = connectionId, + streamName = "name1", + streamNamespace = "namespace1", + ), + ), + ) + + val capturedStreamGenerations = generationSlot.captured + assertEquals(1, capturedStreamGenerations.size) + + val streamGeneration = capturedStreamGenerations[0] + assertEquals("name1", streamGeneration.streamName) + assertEquals("namespace1", streamGeneration.streamNamespace) + assertEquals(43, streamGeneration.generationId) + assertEquals(jobId, streamGeneration.startJobId) + } + + @Test + fun `increase the generation properly if generation is missing`() { + every { streamGenerationRepository.getMaxGenerationOfStreamsForConnectionId(connectionId) } returns generations + val generationSlot = slot>() + every { streamGenerationRepository.saveAll(capture(generationSlot)) } returns listOf() + + generationBumper.updateGenerationForStreams( + connectionId, + jobId, + listOf( + StreamRefresh( + connectionId = connectionId, + streamName = "name3", + streamNamespace = "namespace3", + ), + ), + ) + + val capturedStreamGenerations = generationSlot.captured + assertEquals(1, capturedStreamGenerations.size) + + val streamGeneration = capturedStreamGenerations[0] + assertEquals("name3", streamGeneration.streamName) + assertEquals("namespace3", streamGeneration.streamNamespace) + assertEquals(1L, streamGeneration.generationId) + assertEquals(jobId, streamGeneration.startJobId) + } +} diff --git a/airbyte-config/config-secrets/build.gradle.kts b/airbyte-config/config-secrets/build.gradle.kts index 9c6ad784c90..51fab71214c 100644 --- a/airbyte-config/config-secrets/build.gradle.kts +++ b/airbyte-config/config-secrets/build.gradle.kts @@ -1,58 +1,58 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - `java-test-fixtures` - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + `java-test-fixtures` + kotlin("jvm") + kotlin("kapt") } dependencies { - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) - api(libs.bundles.micronaut.annotation) - api(libs.bundles.micronaut.kotlin) - api(libs.kotlin.logging) - api(libs.slf4j.api) - api(libs.bundles.log4j) - api(libs.micronaut.jackson.databind) - api(libs.google.cloud.storage) - api(libs.micronaut.jooq) - api(libs.guava) - api(libs.bundles.secret.hydration) - api(libs.airbyte.protocol) - api(libs.jakarta.transaction.api) - api(libs.micronaut.data.tx) - api(libs.aws.java.sdk.sts) - api(project(":airbyte-commons")) + api(libs.bundles.micronaut.annotation) + api(libs.bundles.micronaut.kotlin) + api(libs.kotlin.logging) + api(libs.slf4j.api) + api(libs.bundles.log4j) + api(libs.micronaut.jackson.databind) + api(libs.google.cloud.storage) + api(libs.micronaut.jooq) + api(libs.guava) + api(libs.bundles.secret.hydration) + api(libs.airbyte.protocol) + api(libs.jakarta.transaction.api) + api(libs.micronaut.data.tx) + api(libs.aws.java.sdk.sts) + api(project(":airbyte-commons")) - /* - * Marked as "implementation" to avoid leaking these dependencies to services - * that only use the retrieval side of the secret infrastructure. The services - * that do need these dependencies will already have them declared, as they will - * need to define singletons from these modules in order for everything work. - */ - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-json-validation")) + /* + * Marked as "implementation" to avoid leaking these dependencies to services + * that only use the retrieval side of the secret infrastructure. The services + * that do need these dependencies will already have them declared, as they will + * need to define singletons from these modules in order for everything work. + */ + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-json-validation")) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockk) - testImplementation(libs.kotlin.test.runner.junit5) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.airbyte.protocol) - testImplementation(libs.apache.commons.lang) - testImplementation(libs.testcontainers.vault) - testImplementation(testFixtures(project(":airbyte-config:config-persistence"))) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockk) + testImplementation(libs.kotlin.test.runner.junit5) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.airbyte.protocol) + testImplementation(libs.apache.commons.lang) + testImplementation(libs.testcontainers.vault) + testImplementation(testFixtures(project(":airbyte-config:config-persistence"))) } // This is a workaround related to kaptBuild errors. It seems to be because there are no tests in cloud-airbyte-api-server. // TODO: this should be removed when we move to kotlin 1.9.20 // TODO: we should write tests afterEvaluate { - tasks.named("kaptGenerateStubsTestKotlin") { - enabled = false - } + tasks.named("kaptGenerateStubsTestKotlin") { + enabled = false + } } \ No newline at end of file diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsHelpers.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsHelpers.kt index c3a371b247b..580000c81f6 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsHelpers.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsHelpers.kt @@ -496,52 +496,6 @@ object SecretsHelpers { return SecretCoordinate(coordinateBase, version) } - /** - * This method takes in the key (JSON key or HMAC key) of a workspace service account as a secret - * and generates a co-ordinate for the secret so that the secret can be written in secret - * persistence at the generated co-ordinate. - * - * @param newSecret The JSON key or HMAC key value - * @param secretReader To read the value from secret persistence for comparison with the new value - * @param workspaceId of the service account - * @param uuidSupplier provided to allow a test case to produce known UUIDs in order for easy * - * fixture creation. - * @param oldSecretCoordinate a nullable full coordinate (base+version) retrieved from the * - * previous config - * @param keyType HMAC ot JSON key - * @return a coordinate (versioned reference to where the secret is stored in the persistence) - */ - fun convertServiceAccountCredsToSecret( - newSecret: String, - secretReader: ReadOnlySecretPersistence, - workspaceId: UUID, - uuidSupplier: Supplier, - oldSecretCoordinate: JsonNode?, - keyType: String, - ): SecretCoordinateToPayload { - val oldSecretFullCoordinate = - if (oldSecretCoordinate != null && oldSecretCoordinate.has(COORDINATE_FIELD)) oldSecretCoordinate[COORDINATE_FIELD].asText() else null - val coordinateForStagingConfig: SecretCoordinate = - getSecretCoordinate( - "service_account_" + keyType + "_", - newSecret, - secretReader, - workspaceId, - uuidSupplier, - oldSecretFullCoordinate, - ) - return SecretCoordinateToPayload( - coordinateForStagingConfig, - newSecret, - Jsons.jsonNode>( - java.util.Map.of( - COORDINATE_FIELD, - coordinateForStagingConfig.fullCoordinate, - ), - ), - ) - } - /** * Takes in the secret coordinate in form of a JSON and fetches the secret from the store. * diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsRepositoryWriter.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsRepositoryWriter.kt index 372b8fc4ba4..f1f3f3b48c5 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsRepositoryWriter.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsRepositoryWriter.kt @@ -13,11 +13,15 @@ import io.airbyte.validation.json.JsonValidationException import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Requires import jakarta.inject.Singleton +import java.time.Duration +import java.time.Instant import java.util.Optional import java.util.UUID private val logger = KotlinLogging.logger {} +private val EPHEMERAL_SECRET_LIFE_DURATION = Duration.ofHours(2) + /** * This class takes secrets as arguments but never returns a secrets as return values (even the ones * that are passed in as arguments). It is responsible for writing connector secrets to the correct @@ -178,6 +182,8 @@ open class SecretsRepositoryWriter( * Takes in a connector configuration with secrets. Saves the secrets and returns the configuration * object with the secrets removed and replaced with pointers to the environment secret persistence. * + * This method is intended for ephemeral secrets, hence the lack of workspace. + * * @param fullConfig full config * @param spec connector specification * @return partial config @@ -186,13 +192,21 @@ open class SecretsRepositoryWriter( fullConfig: JsonNode, spec: ConnectorSpecification, ): JsonNode { - return splitSecretConfig(NO_WORKSPACE, fullConfig, spec, secretPersistence) + return splitSecretConfig( + NO_WORKSPACE, + fullConfig, + spec, + secretPersistence, + Instant.now().plus(EPHEMERAL_SECRET_LIFE_DURATION), + ) } /** * Takes in a connector configuration with secrets. Saves the secrets and returns the configuration * object with the secrets removed and replaced with pointers to the provided runtime secret persistence. * + * This method is intended for ephemeral secrets, hence the lack of workspace. + * * @param fullConfig full config * @param spec connector specification * @param runtimeSecretPersistence runtime secret persistence @@ -203,7 +217,13 @@ open class SecretsRepositoryWriter( spec: ConnectorSpecification, runtimeSecretPersistence: RuntimeSecretPersistence, ): JsonNode { - return splitSecretConfig(NO_WORKSPACE, fullConfig, spec, runtimeSecretPersistence) + return splitSecretConfig( + NO_WORKSPACE, + fullConfig, + spec, + runtimeSecretPersistence, + Instant.now().plus(EPHEMERAL_SECRET_LIFE_DURATION), + ) } private fun splitSecretConfig( @@ -211,6 +231,7 @@ open class SecretsRepositoryWriter( fullConfig: JsonNode, spec: ConnectorSpecification, secretPersistence: SecretPersistence, + expireTime: Instant? = null, ): JsonNode { val splitSecretConfig: SplitSecretConfig = SecretsHelpers.splitConfig( @@ -219,8 +240,9 @@ open class SecretsRepositoryWriter( spec.connectionSpecification, secretPersistence, ) + // modify this to add expire time splitSecretConfig.getCoordinateToPayload().forEach { (coordinate: SecretCoordinate, payload: String) -> - secretPersistence.write(coordinate, payload) + secretPersistence.writeWithExpiry(coordinate, payload, expireTime) } return splitSecretConfig.partialConfig } diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/GoogleSecretManagerPersistence.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/GoogleSecretManagerPersistence.kt index ebe822fd8eb..60bb62312f7 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/GoogleSecretManagerPersistence.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/GoogleSecretManagerPersistence.kt @@ -7,6 +7,7 @@ package io.airbyte.config.secrets.persistence import com.google.api.gax.core.FixedCredentialsProvider import com.google.api.gax.rpc.NotFoundException import com.google.auth.oauth2.ServiceAccountCredentials +import com.google.cloud.Timestamp import com.google.cloud.secretmanager.v1.ProjectName import com.google.cloud.secretmanager.v1.Replication import com.google.cloud.secretmanager.v1.Secret @@ -24,6 +25,7 @@ import jakarta.inject.Named import jakarta.inject.Singleton import java.io.ByteArrayInputStream import java.nio.charset.StandardCharsets +import java.time.Instant private val logger = KotlinLogging.logger {} @@ -52,7 +54,7 @@ class GoogleSecretManagerPersistence( return response.payload.data.toStringUtf8() } } catch (e: NotFoundException) { - logger.warn(e) { "Unable to locate secret for coordinate ${coordinate.fullCoordinate}." } + logger.warn { "Unable to locate secret for coordinate ${coordinate.fullCoordinate}." } return "" } catch (e: Exception) { logger.error(e) { "Unable to read secret for coordinate ${coordinate.fullCoordinate}. " } @@ -64,15 +66,7 @@ class GoogleSecretManagerPersistence( coordinate: SecretCoordinate, payload: String, ) { - googleSecretManagerServiceClient.createClient().use { client -> - if (read(coordinate).isEmpty()) { - val secretBuilder = Secret.newBuilder().setReplication(replicationPolicy) - client.createSecret(ProjectName.of(gcpProjectId), coordinate.fullCoordinate, secretBuilder.build()) - } - val name = SecretName.of(gcpProjectId, coordinate.fullCoordinate) - val secretPayload = SecretPayload.newBuilder().setData(ByteString.copyFromUtf8(payload)).build() - client.addSecretVersion(name, secretPayload) - } + writeWithExpiry(coordinate, payload) } companion object { @@ -88,6 +82,29 @@ class GoogleSecretManagerPersistence( .setAutomatic(Replication.Automatic.newBuilder().build()) .build() } + + override fun writeWithExpiry( + coordinate: SecretCoordinate, + payload: String, + expiry: Instant?, + ) { + googleSecretManagerServiceClient.createClient().use { client -> + if (read(coordinate).isEmpty()) { + val secretBuilder = Secret.newBuilder().setReplication(replicationPolicy) + + expiry?.let { + val expireTime = com.google.protobuf.Timestamp.newBuilder().setSeconds(it.epochSecond).build() + secretBuilder.setExpireTime(expireTime) + } + + client.createSecret(ProjectName.of(gcpProjectId), coordinate.fullCoordinate, secretBuilder.build()) + } + + val name = SecretName.of(gcpProjectId, coordinate.fullCoordinate) + val secretPayload = SecretPayload.newBuilder().setData(ByteString.copyFromUtf8(payload)).build() + client.addSecretVersion(name, secretPayload) + } + } } @Singleton diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/SecretPersistence.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/SecretPersistence.kt index 55d00074252..c5f3f3719e5 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/SecretPersistence.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/SecretPersistence.kt @@ -5,6 +5,7 @@ package io.airbyte.config.secrets.persistence import io.airbyte.config.secrets.SecretCoordinate +import java.time.Instant /** * Provides a read-only interface to a backing secrets store similar to [SecretPersistence]. @@ -36,4 +37,13 @@ interface SecretPersistence : ReadOnlySecretPersistence { coordinate: SecretCoordinate, payload: String, ) + + fun writeWithExpiry( + coordinate: SecretCoordinate, + payload: String, + expiry: Instant? = null, + ) { + // Default implementation does not support expiry. + write(coordinate, payload) + } } diff --git a/airbyte-config/config-secrets/src/test/kotlin/secrets/persistence/GoogleSecretManagerPersistenceTest.kt b/airbyte-config/config-secrets/src/test/kotlin/secrets/persistence/GoogleSecretManagerPersistenceTest.kt index 462fff0a76e..81cec8d7296 100644 --- a/airbyte-config/config-secrets/src/test/kotlin/secrets/persistence/GoogleSecretManagerPersistenceTest.kt +++ b/airbyte-config/config-secrets/src/test/kotlin/secrets/persistence/GoogleSecretManagerPersistenceTest.kt @@ -16,12 +16,15 @@ import com.google.cloud.secretmanager.v1.SecretVersion import com.google.cloud.secretmanager.v1.SecretVersionName import com.google.protobuf.ByteString import io.airbyte.config.secrets.SecretCoordinate +import io.airbyte.config.secrets.persistence.GoogleSecretManagerPersistence.Companion.replicationPolicy import io.grpc.Status import io.mockk.every import io.mockk.mockk import io.mockk.verify import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.Test +import java.time.Duration +import java.time.Instant class GoogleSecretManagerPersistenceTest { @Test @@ -102,6 +105,43 @@ class GoogleSecretManagerPersistenceTest { verify { mockGoogleClient.addSecretVersion(any(), any()) } } + @Test + fun `test writing a secret with expiry via the client creates the secret with expiry`() { + val secret = "secret value" + val projectId = "test" + val coordinate = SecretCoordinate.fromFullCoordinate("secret_coordinate_v1") + val mockClient: GoogleSecretManagerServiceClient = mockk() + val mockGoogleClient: SecretManagerServiceClient = mockk() + val mockResponse: AccessSecretVersionResponse = mockk() + val mockPayload: SecretPayload = mockk() + val persistence = GoogleSecretManagerPersistence(projectId, mockClient) + + every { mockPayload.data } returns ByteString.copyFromUtf8(secret) + every { mockResponse.payload } returns mockPayload + every { mockGoogleClient.accessSecretVersion(ofType(SecretVersionName::class)) } throws + NotFoundException( + NullPointerException("test"), + GrpcStatusCode.of( + Status.Code.NOT_FOUND, + ), + false, + ) + every { mockGoogleClient.createSecret(any(), any(), any()) } returns mockk() + every { mockGoogleClient.addSecretVersion(any(), any()) } returns mockk() + every { mockGoogleClient.close() } returns Unit + every { mockClient.createClient() } returns mockGoogleClient + + val expiry = Instant.now().plus(Duration.ofMinutes(1)) + persistence.writeWithExpiry(coordinate, secret, expiry) + + val sb = + Secret.newBuilder().setReplication( + replicationPolicy, + ).setExpireTime(com.google.protobuf.Timestamp.newBuilder().setSeconds(expiry.epochSecond).build()).build() + verify { mockGoogleClient.createSecret(ProjectName.of("test"), coordinate.fullCoordinate, sb) } + verify { mockGoogleClient.addSecretVersion(any(), any()) } + } + @Test fun `test writing a secret via the client updates an existing secret`() { val secret = "secret value" diff --git a/airbyte-config/init/readme.md b/airbyte-config/init/README.md similarity index 100% rename from airbyte-config/init/readme.md rename to airbyte-config/init/README.md diff --git a/airbyte-config/init/build.gradle.kts b/airbyte-config/init/build.gradle.kts index d7db99f93d5..1c1dd12b3a5 100644 --- a/airbyte-config/init/build.gradle.kts +++ b/airbyte-config/init/build.gradle.kts @@ -1,51 +1,51 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.bundles.micronaut.annotation.processor) - api(libs.bundles.micronaut.annotation) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) + api(libs.bundles.micronaut.annotation) - implementation(project(":airbyte-commons")) - implementation("commons-cli:commons-cli:1.4") - implementation(project(":airbyte-config:specs")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-data")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-notification")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-persistence:job-persistence")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-json-validation")) - implementation(libs.guava) + implementation(project(":airbyte-commons")) + implementation("commons-cli:commons-cli:1.4") + implementation(project(":airbyte-config:specs")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-notification")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-persistence:job-persistence")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-json-validation")) + implementation(libs.guava) - testImplementation(project(":airbyte-test-utils")) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(project(":airbyte-test-utils")) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) } airbyte { - docker { - imageName = "init" - } + docker { + imageName = "init" + } } val copyScripts = tasks.register("copyScripts") { - from("scripts") - into("build/airbyte/docker/bin/scripts") + from("scripts") + into("build/airbyte/docker/bin/scripts") } tasks.named("dockerBuildImage") { - dependsOn(copyScripts) + dependsOn(copyScripts) } tasks.processResources { - from("${project.rootDir}/airbyte-connector-builder-resources") + from("${project.rootDir}/airbyte-connector-builder-resources") } diff --git a/airbyte-config/specs/build.gradle.kts b/airbyte-config/specs/build.gradle.kts index 57bd49c27cb..beecd09b264 100644 --- a/airbyte-config/specs/build.gradle.kts +++ b/airbyte-config/specs/build.gradle.kts @@ -1,54 +1,54 @@ import de.undercouch.gradle.tasks.download.Download plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("de.undercouch.download") version "5.4.0" + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + id("de.undercouch.download") version "5.4.0" } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.bundles.micronaut.annotation.processor) - - api(libs.bundles.micronaut.annotation) - - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-json-validation")) - - implementation(libs.commons.cli) - implementation(libs.commons.io) - implementation(platform(libs.fasterxml)) - implementation(libs.bundles.jackson) - implementation(libs.google.cloud.storage) - implementation(libs.micronaut.cache.caffeine) - implementation(libs.airbyte.protocol) - implementation(libs.okhttp) - - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.mockwebserver) - testImplementation(libs.junit.pioneer) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + api(libs.bundles.micronaut.annotation) + + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-json-validation")) + + implementation(libs.commons.cli) + implementation(libs.commons.io) + implementation(platform(libs.fasterxml)) + implementation(libs.bundles.jackson) + implementation(libs.google.cloud.storage) + implementation(libs.micronaut.cache.caffeine) + implementation(libs.airbyte.protocol) + implementation(libs.okhttp) + + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.mockwebserver) + testImplementation(libs.junit.pioneer) } airbyte { - spotless { - excludes = listOf( - "src/main/resources/seed/oss_registry.json", - "src/main/resources/seed/local_oss_registry.json", - ) - } + spotless { + excludes = listOf( + "src/main/resources/seed/oss_registry.json", + "src/main/resources/seed/local_oss_registry.json", + ) + } } val downloadConnectorRegistry = tasks.register("downloadConnectorRegistry") { - src("https://connectors.airbyte.com/files/registries/v0/oss_registry.json") - dest(File(projectDir, "src/main/resources/seed/local_oss_registry.json")) - overwrite(true) - onlyIfModified(true) + src("https://connectors.airbyte.com/files/registries/v0/oss_registry.json") + dest(File(projectDir, "src/main/resources/seed/local_oss_registry.json")) + overwrite(true) + onlyIfModified(true) } tasks.processResources { - dependsOn(downloadConnectorRegistry) + dependsOn(downloadConnectorRegistry) } diff --git a/airbyte-connector-builder-resources/CDK_VERSION b/airbyte-connector-builder-resources/CDK_VERSION index b88fccfdcb8..89266660890 100644 --- a/airbyte-connector-builder-resources/CDK_VERSION +++ b/airbyte-connector-builder-resources/CDK_VERSION @@ -1 +1 @@ -0.78.1 +0.79.1 diff --git a/airbyte-connector-builder-server/Dockerfile b/airbyte-connector-builder-server/Dockerfile index 36872d8e885..6467bc79be8 100644 --- a/airbyte-connector-builder-server/Dockerfile +++ b/airbyte-connector-builder-server/Dockerfile @@ -2,7 +2,7 @@ ARG JAVA_PYTHON_BASE_IMAGE_VERSION=2.1.0 FROM airbyte/airbyte-base-java-python-image:${JAVA_PYTHON_BASE_IMAGE_VERSION} AS connector-builder-server # Set up CDK requirements -ARG CDK_VERSION=0.78.1 +ARG CDK_VERSION=0.79.1 ENV CDK_PYTHON=${PYENV_ROOT}/versions/${PYTHON_VERSION}/bin/python ENV CDK_ENTRYPOINT ${PYENV_ROOT}/versions/${PYTHON_VERSION}/lib/python3.9/site-packages/airbyte_cdk/connector_builder/main.py # Set up CDK diff --git a/airbyte-connector-builder-server/build.gradle.kts b/airbyte-connector-builder-server/build.gradle.kts index 23613e4d2a6..1cfe65ef841 100644 --- a/airbyte-connector-builder-server/build.gradle.kts +++ b/airbyte-connector-builder-server/build.gradle.kts @@ -1,160 +1,166 @@ import com.bmuschko.gradle.docker.tasks.image.DockerBuildImage -import java.util.Properties import org.openapitools.generator.gradle.plugin.tasks.GenerateTask +import java.util.Properties plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("org.openapi.generator") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("org.openapi.generator") + id("io.airbyte.gradle.publish") } dependencies { - // Micronaut dependencies) - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - - implementation(libs.jackson.datatype) - implementation("com.googlecode.json-simple:json-simple:1.1.1") - - // Cloud service dependencies. These are not strictly necessary yet, but likely needed for any full-fledged cloud service) - implementation(libs.bundles.datadog) - // implementation(libs.bundles.temporal uncomment this when we start using temporal to invoke connector commands) - implementation(libs.sentry.java) - implementation(libs.guava) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.micronaut.cache) - implementation(libs.micronaut.http) - implementation(libs.micronaut.security) - implementation(libs.jakarta.annotation.api) - implementation(libs.jakarta.ws.rs.api) - - implementation(project(":airbyte-commons")) - - // OpenAPI code generation(dependencies) - implementation(libs.swagger.annotations) - - // Internal dependencies) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-commons-server")) - implementation(project(":airbyte-commons-worker")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-config:init")) - implementation(project(":airbyte-metrics:metrics-lib")) - - implementation(libs.airbyte.protocol) - - runtimeOnly(libs.snakeyaml) - - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - - testImplementation(libs.junit.pioneer) + // Micronaut dependencies) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + implementation(libs.jackson.datatype) + implementation("com.googlecode.json-simple:json-simple:1.1.1") + + // Cloud service dependencies. These are not strictly necessary yet, but likely needed for any full-fledged cloud service) + implementation(libs.bundles.datadog) + // implementation(libs.bundles.temporal uncomment this when we start using temporal to invoke connector commands) + implementation(libs.sentry.java) + implementation(libs.guava) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.micronaut.cache) + implementation(libs.micronaut.http) + implementation(libs.micronaut.security) + implementation(libs.jakarta.annotation.api) + implementation(libs.jakarta.ws.rs.api) + + implementation(project(":airbyte-commons")) + + // OpenAPI code generation(dependencies) + implementation(libs.swagger.annotations) + + // Internal dependencies) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-commons-server")) + implementation(project(":airbyte-commons-worker")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-config:init")) + implementation(project(":airbyte-metrics:metrics-lib")) + + implementation(libs.airbyte.protocol) + + runtimeOnly(libs.snakeyaml) + + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + + testImplementation(libs.junit.pioneer) } val env = Properties().apply { - load(rootProject.file(".env.dev").inputStream()) + load(rootProject.file(".env.dev").inputStream()) } airbyte { - application { - mainClass = "io.airbyte.connector_builder.MicronautConnectorBuilderServerRunner" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - @Suppress("UNCHECKED_CAST") - localEnvVars.putAll(env.toMap() as Map) - localEnvVars.putAll(mapOf( - "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: ""), + application { + mainClass = "io.airbyte.connector_builder.MicronautConnectorBuilderServerRunner" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + @Suppress("UNCHECKED_CAST") + localEnvVars.putAll(env.toMap() as Map) + localEnvVars.putAll( + mapOf( + "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: ""), "AIRBYTE_VERSION" to env["VERSION"].toString(), // path to CDK virtual environment) - "CDK_PYTHON" to (System.getenv("CDK_PYTHON") ?: ""), + "CDK_PYTHON" to (System.getenv("CDK_PYTHON") ?: ""), // path to CDK connector builder"s main.py) "CDK_ENTRYPOINT" to (System.getenv("CDK_ENTRYPOINT") ?: ""), - )) - } - docker { - imageName = "connector-builder-server" - } + ) + ) + } + docker { + imageName = "connector-builder-server" + } } val generateOpenApiServer = tasks.register("generateOpenApiServer") { - val specFile = "$projectDir/src/main/openapi/openapi.yaml" - inputs.file(specFile) - inputSpec = specFile - outputDir = "$buildDir/generated/api/server" - - generatorName = "jaxrs-spec" - apiPackage = "io.airbyte.connector_builder.api.generated" - invokerPackage = "io.airbyte.connector_builder.api.invoker.generated" - modelPackage = "io.airbyte.connector_builder.api.model.generated" - - schemaMappings.putAll(mapOf( - "ConnectorConfig" to "com.fasterxml.jackson.databind.JsonNode", - "ConnectorManifest" to "com.fasterxml.jackson.databind.JsonNode", - )) - - // Our spec does not have nullable, but if it changes, this would be a gotcha that we would want to avoid) - configOptions.putAll(mapOf( - "dateLibrary" to "java8", - "generatePom" to "false", - "interfaceOnly" to "true", - /*) - JAX-RS generator does not respect nullable properties defined in the OpenApi Spec. - It means that if a field is not nullable but not set it is still returning a null value for this field in the serialized json. - The below Jackson annotation(is made to only(keep non null values in serialized json. - We are not yet using nullable=true properties in our OpenApi so this is a valid(workaround at the moment to circumvent the default JAX-RS behavior described above. - Feel free to read the conversation(on https://github.com/airbytehq/airbyte/pull/13370 for more details. - */ - "additionalModelTypeAnnotations" to "\n@com.fasterxml.jackson.annotation.JsonInclude(com.fasterxml.jackson.annotation.JsonInclude.Include.NON_NULL)", - )) - - doLast { - updateToJakartaApi(file("${outputDir.get()}/src/gen/java/${apiPackage.get().replace(".", "/")}")) - updateToJakartaApi(file("${outputDir.get()}/src/gen/java/${modelPackage.get().replace(".", "/")}")) - } + val specFile = "$projectDir/src/main/openapi/openapi.yaml" + inputs.file(specFile) + inputSpec = specFile + outputDir = "$buildDir/generated/api/server" + + generatorName = "jaxrs-spec" + apiPackage = "io.airbyte.connector_builder.api.generated" + invokerPackage = "io.airbyte.connector_builder.api.invoker.generated" + modelPackage = "io.airbyte.connector_builder.api.model.generated" + + schemaMappings.putAll( + mapOf( + "ConnectorConfig" to "com.fasterxml.jackson.databind.JsonNode", + "ConnectorManifest" to "com.fasterxml.jackson.databind.JsonNode", + ) + ) + + // Our spec does not have nullable, but if it changes, this would be a gotcha that we would want to avoid) + configOptions.putAll( + mapOf( + "dateLibrary" to "java8", + "generatePom" to "false", + "interfaceOnly" to "true", + /*) + JAX-RS generator does not respect nullable properties defined in the OpenApi Spec. + It means that if a field is not nullable but not set it is still returning a null value for this field in the serialized json. + The below Jackson annotation(is made to only(keep non null values in serialized json. + We are not yet using nullable=true properties in our OpenApi so this is a valid(workaround at the moment to circumvent the default JAX-RS behavior described above. + Feel free to read the conversation(on https://github.com/airbytehq/airbyte/pull/13370 for more details. + */ + "additionalModelTypeAnnotations" to "\n@com.fasterxml.jackson.annotation.JsonInclude(com.fasterxml.jackson.annotation.JsonInclude.Include.NON_NULL)", + ) + ) + + doLast { + updateToJakartaApi(file("${outputDir.get()}/src/gen/java/${apiPackage.get().replace(".", "/")}")) + updateToJakartaApi(file("${outputDir.get()}/src/gen/java/${modelPackage.get().replace(".", "/")}")) + } } tasks.named("compileJava") { - dependsOn(generateOpenApiServer) + dependsOn(generateOpenApiServer) } //// Ensures that the generated models are compiled during the build step so they are available for use at runtime) sourceSets { - main { - java { - srcDirs("$buildDir/generated/api/server/src/gen/java") - } - resources { - srcDir("$projectDir/src/main/openapi/") - } + main { + java { + srcDirs("$buildDir/generated/api/server/src/gen/java") + } + resources { + srcDir("$projectDir/src/main/openapi/") } + } } val copyPythonDeps = tasks.register("copyPythonDependencies") { - from("$projectDir/requirements.txt") - into("$buildDir/airbyte/docker/") + from("$projectDir/requirements.txt") + into("$buildDir/airbyte/docker/") } // tasks.named("dockerBuildImage") { - // Set build args - // Current CDK version(used by the Connector Builder and workers running Connector Builder connectors - val cdkVersion: String = File(project.projectDir.parentFile, "airbyte-connector-builder-resources/CDK_VERSION").readText().trim() - buildArgs.put("CDK_VERSION", cdkVersion) + // Set build args + // Current CDK version(used by the Connector Builder and workers running Connector Builder connectors + val cdkVersion: String = File(project.projectDir.parentFile, "airbyte-connector-builder-resources/CDK_VERSION").readText().trim() + buildArgs.put("CDK_VERSION", cdkVersion) - dependsOn(copyPythonDeps, generateOpenApiServer) + dependsOn(copyPythonDeps, generateOpenApiServer) } -private fun updateToJakartaApi(srcDir:File) { - srcDir.walk().forEach { file -> - if(file.isFile) { - var contents = file.readText() - contents = contents.replace("javax.ws.rs", "jakarta.ws.rs") - .replace("javax.validation", "jakarta.validation") - .replace("javax.annotation", "jakarta.annotation") - file.writeText(contents) - } +private fun updateToJakartaApi(srcDir: File) { + srcDir.walk().forEach { file -> + if (file.isFile) { + var contents = file.readText() + contents = contents.replace("javax.ws.rs", "jakarta.ws.rs") + .replace("javax.validation", "jakarta.validation") + .replace("javax.annotation", "jakarta.annotation") + file.writeText(contents) } + } } diff --git a/airbyte-connector-builder-server/requirements.in b/airbyte-connector-builder-server/requirements.in index 61d8e0d4f4b..da168ba5d88 100644 --- a/airbyte-connector-builder-server/requirements.in +++ b/airbyte-connector-builder-server/requirements.in @@ -1 +1 @@ -airbyte-cdk==0.78.1 +airbyte-cdk==0.79.1 diff --git a/airbyte-connector-builder-server/requirements.txt b/airbyte-connector-builder-server/requirements.txt index 3d0fe2cd6ac..acf7199b26a 100644 --- a/airbyte-connector-builder-server/requirements.txt +++ b/airbyte-connector-builder-server/requirements.txt @@ -4,7 +4,7 @@ # # pip-compile # -airbyte-cdk==0.78.1 +airbyte-cdk==0.79.1 # via -r requirements.in airbyte-protocol-models==0.5.1 # via airbyte-cdk @@ -49,7 +49,7 @@ pendulum==2.1.2 # via airbyte-cdk platformdirs==4.2.0 # via requests-cache -pydantic==1.10.14 +pydantic==1.10.15 # via # airbyte-cdk # airbyte-protocol-models @@ -77,7 +77,7 @@ six==1.16.0 # jsonschema # python-dateutil # url-normalize -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via # cattrs # pydantic diff --git a/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/config/ApplicationBeanFactory.java b/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/config/ApplicationBeanFactory.java index f1f9f373909..bcb21696518 100644 --- a/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/config/ApplicationBeanFactory.java +++ b/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/config/ApplicationBeanFactory.java @@ -48,7 +48,7 @@ public SynchronousCdkCommandRunner synchronousPythonCdkCommandRunner() { return new SynchronousPythonCdkCommandRunner( new AirbyteFileWriterImpl(), // This should eventually be constructed via DI. - VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory(true), + VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory(), this.getPython(), this.getCdkEntrypoint(), this.getPythonPath()); diff --git a/airbyte-connector-builder-server/src/main/openapi/openapi.yaml b/airbyte-connector-builder-server/src/main/openapi/openapi.yaml index 9ac26451acc..bc85e0a6ce4 100644 --- a/airbyte-connector-builder-server/src/main/openapi/openapi.yaml +++ b/airbyte-connector-builder-server/src/main/openapi/openapi.yaml @@ -218,9 +218,6 @@ components: url: type: string description: URL that the request was sent to - parameters: - type: object - description: The request parameters that were set on the HTTP request, if any body: type: string description: The body of the HTTP request, if present diff --git a/airbyte-connector-builder-server/src/main/resources/application.yml b/airbyte-connector-builder-server/src/main/resources/application.yml index 651bab096ca..14df703e3c7 100644 --- a/airbyte-connector-builder-server/src/main/resources/application.yml +++ b/airbyte-connector-builder-server/src/main/resources/application.yml @@ -33,6 +33,7 @@ micronaut: enabled: ${HTTP_ACCESS_LOG_ENABLED:true} aggregator: max-content-length: 52428800 # 50MB + max-header-size: ${NETTY_MAX_HEADER_SIZE:32768} endpoints: v1/manifest_template: enable: true diff --git a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/controllers/ConnectorBuilderControllerIntegrationTest.java b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/controllers/ConnectorBuilderControllerIntegrationTest.java index 6522a8a94bb..4df64ab7723 100644 --- a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/controllers/ConnectorBuilderControllerIntegrationTest.java +++ b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/controllers/ConnectorBuilderControllerIntegrationTest.java @@ -69,7 +69,7 @@ class ConnectorBuilderControllerIntegrationTest { void setup() { this.healthHandler = mock(HealthHandler.class); this.writer = new MockAirbyteFileWriterImpl(); - this.streamFactory = VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory(false); + this.streamFactory = VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory(); } @BeforeAll diff --git a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/fixtures/RecordStreamRead.json b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/fixtures/RecordStreamRead.json index 1d53e62a5a4..5b2a57a8808 100644 --- a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/fixtures/RecordStreamRead.json +++ b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/fixtures/RecordStreamRead.json @@ -10,8 +10,7 @@ { "records": [], "request": { - "url": "https://api.courier.com/messages", - "parameters": { "page_size": ["1"] }, + "url": "https://api.courier.com/messages?page_size=1", "body": null, "headers": { "User-Agent": "python-requests/2.28.2", diff --git a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/requester/AirbyteCdkRequesterImplTest.java b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/requester/AirbyteCdkRequesterImplTest.java index d029e50d5d5..43eae71e48b 100644 --- a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/requester/AirbyteCdkRequesterImplTest.java +++ b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/requester/AirbyteCdkRequesterImplTest.java @@ -64,7 +64,7 @@ ArgumentCaptor testReadStreamSuccess(final Integer recordLimit, final In + "\"2023-11-01T00:00:00+00:00\", \"listItem\": \"item\"}, \"state\": {\"airbyte\": \"state\"}}, {\"pages\": []}]," + "\"inferred_schema\": {\"schema\": 1}, \"latest_config_update\": { \"config_key\": \"config_value\"}," + "\"auxiliary_requests\": [{\"title\": \"Refresh token\",\"description\": \"Obtains access token\",\"request\": {\"url\": " - + "\"https://a-url.com/oauth2/v1/tokens/bearer\",\"parameters\": null,\"headers\": {\"Content-Type\": " + + "\"https://a-url.com/oauth2/v1/tokens/bearer\",\"headers\": {\"Content-Type\": " + "\"application/x-www-form-urlencoded\"},\"http_method\": \"POST\",\"body\": \"a_request_body\"},\"response\": {\"status\": 200," + "\"body\": \"a_response_body\",\"headers\": {\"Date\": \"Tue, 11 Jul 2023 16:28:10 GMT\"}}}]}"); final ArgumentCaptor configCaptor = ArgumentCaptor.forClass(String.class); diff --git a/airbyte-connector-sidecar/Dockerfile b/airbyte-connector-sidecar/Dockerfile index 0f0a284d136..d645421e2a7 100644 --- a/airbyte-connector-sidecar/Dockerfile +++ b/airbyte-connector-sidecar/Dockerfile @@ -1,4 +1,9 @@ -ARG JAVA_WORKER_BASE_IMAGE_VERSION=2.1.0 +ARG JAVA_WORKER_BASE_IMAGE_VERSION=2.2.0 + +FROM scratch as builder +WORKDIR /app +ADD airbyte-app.tar /app + FROM airbyte/airbyte-base-java-worker-image:${JAVA_WORKER_BASE_IMAGE_VERSION} ARG DOCKER_BUILD_ARCH=amd64 @@ -9,12 +14,8 @@ ARG VERSION=dev ENV APPLICATION airbyte-connector-sidecar ENV VERSION=${VERSION} -USER root -COPY WellKnownTypes.json /app - -# Move connector-sidecar app -ADD airbyte-app.tar /app -RUN chown -R airbyte:airbyte /app +COPY --chown=airbyte:airbyte WellKnownTypes.json /app +COPY --chown=airbyte:airbyte --from=builder /app /app USER airbyte:airbyte # wait for upstream dependencies to become available before starting server diff --git a/airbyte-connector-sidecar/build.gradle.kts b/airbyte-connector-sidecar/build.gradle.kts index 1ba2240e423..1830f46de07 100644 --- a/airbyte-connector-sidecar/build.gradle.kts +++ b/airbyte-connector-sidecar/build.gradle.kts @@ -6,144 +6,146 @@ import java.util.Properties import java.util.zip.ZipFile plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.publish") - id("io.airbyte.gradle.docker") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.docker") + kotlin("jvm") + kotlin("kapt") } buildscript { - repositories { - mavenCentral() - } - dependencies { - // necessary to convert the well_know_types from yaml to json - val jacksonVersion = libs.versions.fasterxml.version.get() - classpath("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$jacksonVersion") - classpath("com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion") - } + repositories { + mavenCentral() + } + dependencies { + // necessary to convert the well_know_types from yaml to json + val jacksonVersion = libs.versions.fasterxml.version.get() + classpath("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$jacksonVersion") + classpath("com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion") + } } val airbyteProtocol by configurations.creating configurations.all { - resolutionStrategy { - // Ensure that the versions defined in deps.toml are used) - // instead of versions from transitive dependencies) - // Force to avoid(updated version brought in transitively from Micronaut 3.8+) - // that is incompatible with our current Helm setup) - force (libs.s3, libs.aws.java.sdk.s3) - } + resolutionStrategy { + // Ensure that the versions defined in deps.toml are used) + // instead of versions from transitive dependencies) + // Force to avoid(updated version brought in transitively from Micronaut 3.8+) + // that is incompatible with our current Helm setup) + force(libs.s3, libs.aws.java.sdk.s3) + } } configurations.all { - exclude(group = "io.micronaut", module = "micronaut-http-server-netty") - exclude(group = "io.micronaut.openapi") - exclude(group = "io.micronaut.flyway") - exclude(group = "io.micronaut.sql") + exclude(group = "io.micronaut", module = "micronaut-http-server-netty") + exclude(group = "io.micronaut.openapi") + exclude(group = "io.micronaut.flyway") + exclude(group = "io.micronaut.sql") } dependencies { - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) - - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.log4j) - implementation(libs.bundles.micronaut.light) - implementation(libs.google.cloud.storage) - implementation(libs.java.jwt) - implementation(libs.kotlin.logging) - implementation(libs.micronaut.jackson.databind) - implementation(libs.slf4j.api) - - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-converters")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-commons-worker")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-metrics:metrics-lib")) // necessary for doc store - implementation(project(":airbyte-worker-models")) - implementation(libs.airbyte.protocol) - - runtimeOnly(libs.snakeyaml) - runtimeOnly(libs.kotlin.reflect) - runtimeOnly(libs.appender.log4j2) - runtimeOnly(libs.bundles.bouncycastle) // cryptography package - - kaptTest(platform(libs.micronaut.platform)) - kaptTest(libs.bundles.micronaut.annotation.processor) - kaptTest(libs.bundles.micronaut.test.annotation.processor) - - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockk) - testImplementation(libs.kotlin.test.runner.junit5) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.airbyte.protocol) - testImplementation(libs.apache.commons.lang) - - airbyteProtocol(libs.airbyte.protocol) { - isTransitive = false - } + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.log4j) + implementation(libs.bundles.micronaut.light) + implementation(libs.google.cloud.storage) + implementation(libs.java.jwt) + implementation(libs.kotlin.logging) + implementation(libs.micronaut.jackson.databind) + implementation(libs.slf4j.api) + + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-converters")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-commons-worker")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-metrics:metrics-lib")) // necessary for doc store + implementation(project(":airbyte-worker-models")) + implementation(libs.airbyte.protocol) + + runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.kotlin.reflect) + runtimeOnly(libs.appender.log4j2) + runtimeOnly(libs.bundles.bouncycastle) // cryptography package + + kaptTest(platform(libs.micronaut.platform)) + kaptTest(libs.bundles.micronaut.annotation.processor) + kaptTest(libs.bundles.micronaut.test.annotation.processor) + + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockk) + testImplementation(libs.kotlin.test.runner.junit5) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.airbyte.protocol) + testImplementation(libs.apache.commons.lang) + + airbyteProtocol(libs.airbyte.protocol) { + isTransitive = false + } } val env = Properties().apply { - load(rootProject.file(".env.dev").inputStream()) + load(rootProject.file(".env.dev").inputStream()) } airbyte { - application { - mainClass.set("io.airbyte.connectorSidecar.ApplicationKt") - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - @Suppress("UNCHECKED_CAST") - localEnvVars.putAll(env.toMutableMap() as Map) - localEnvVars.putAll(mapOf( - "AIRBYTE_VERSION" to env["VERSION"].toString(), - "DATA_PLANE_ID" to "local", - "MICRONAUT_ENVIRONMENTS" to "test" - )) - } - docker { - imageName.set("connector-sidecar") - } + application { + mainClass.set("io.airbyte.connectorSidecar.ApplicationKt") + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + @Suppress("UNCHECKED_CAST") + localEnvVars.putAll(env.toMutableMap() as Map) + localEnvVars.putAll( + mapOf( + "AIRBYTE_VERSION" to env["VERSION"].toString(), + "DATA_PLANE_ID" to "local", + "MICRONAUT_ENVIRONMENTS" to "test" + ) + ) + } + docker { + imageName.set("connector-sidecar") + } } // Duplicated from :airbyte-worker, eventually, this should be handled in :airbyte-protocol) val generateWellKnownTypes = tasks.register("generateWellKnownTypes") { - inputs.files(airbyteProtocol) // declaring inputs) - val targetFile = project.file("build/airbyte/docker/WellKnownTypes.json") - outputs.file(targetFile) // declaring outputs) - - doLast { - val wellKnownTypesYamlPath = "airbyte_protocol/well_known_types.yaml" - airbyteProtocol.files.forEach { - val zip = ZipFile(it) - val entry = zip.getEntry(wellKnownTypesYamlPath) - - val wellKnownTypesYaml = zip.getInputStream(entry).bufferedReader().use { reader -> reader.readText() } - val rawJson = yamlToJson(wellKnownTypesYaml) - targetFile.getParentFile().mkdirs() - targetFile.writeText(rawJson) - } + inputs.files(airbyteProtocol) // declaring inputs) + val targetFile = project.file("build/airbyte/docker/WellKnownTypes.json") + outputs.file(targetFile) // declaring outputs) + + doLast { + val wellKnownTypesYamlPath = "airbyte_protocol/well_known_types.yaml" + airbyteProtocol.files.forEach { + val zip = ZipFile(it) + val entry = zip.getEntry(wellKnownTypesYamlPath) + + val wellKnownTypesYaml = zip.getInputStream(entry).bufferedReader().use { reader -> reader.readText() } + val rawJson = yamlToJson(wellKnownTypesYaml) + targetFile.getParentFile().mkdirs() + targetFile.writeText(rawJson) } + } } tasks.named("dockerBuildImage") { - dependsOn(generateWellKnownTypes) + dependsOn(generateWellKnownTypes) } fun yamlToJson(rawYaml: String): String { - val mappedYaml: Any = YAMLMapper().registerKotlinModule().readValue(rawYaml) - return ObjectMapper().registerKotlinModule().writeValueAsString(mappedYaml) + val mappedYaml: Any = YAMLMapper().registerKotlinModule().readValue(rawYaml) + return ObjectMapper().registerKotlinModule().writeValueAsString(mappedYaml) } // This is a workaround related to kaptBuild errors. It seems to be because there are no tests in cloud-airbyte-api-server. // TODO: this should be removed when we move to kotlin 1.9.20 // TODO: we should write tests afterEvaluate { - tasks.named("kaptGenerateStubsTestKotlin") { - enabled = false - } + tasks.named("kaptGenerateStubsTestKotlin") { + enabled = false + } } diff --git a/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorMessageProcessor.kt b/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorMessageProcessor.kt index 399b8eac463..c5bef6d646a 100644 --- a/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorMessageProcessor.kt +++ b/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorMessageProcessor.kt @@ -92,7 +92,7 @@ class ConnectorMessageProcessor( val errorMessage: String = String.format("Lost connection to the connector") throw WorkerException(errorMessage, e) } catch (e: Exception) { - throw WorkerException("Unexpected error while getting checking connection.", e) + throw WorkerException("Unexpected error performing $operationType.", e) } } @@ -155,7 +155,7 @@ class ConnectorMessageProcessor( .withMessage(result.connectionStatus.message) jobOutput.checkConnection = output } else if (failureReason.isEmpty) { - throw WorkerException("Error checking connection status: no status nor failure reason were outputted") + throw WorkerException("Error checking connection status: no status nor failure reason provided") } OperationType.DISCOVER -> @@ -165,7 +165,7 @@ class ConnectorMessageProcessor( .writeDiscoverCatalogResult(buildSourceDiscoverSchemaWriteRequestBody(input.discoveryInput, result.catalog)) jobOutput.discoverCatalogId = apiResult.catalogId } else if (failureReason.isEmpty) { - throw WorkerException("Error checking connection status: no status nor failure reason were outputted") + throw WorkerException("Error discovering catalog: no failure reason provided") } OperationType.SPEC -> diff --git a/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorWatcher.kt b/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorWatcher.kt index d18875bbb5b..0855275229f 100644 --- a/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorWatcher.kt +++ b/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorWatcher.kt @@ -2,6 +2,7 @@ package io.airbyte.connectorSidecar import com.google.common.annotations.VisibleForTesting import com.google.common.base.Stopwatch +import io.airbyte.api.client.WorkloadApiClient import io.airbyte.commons.json.Jsons import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider import io.airbyte.commons.protocol.AirbyteProtocolVersionedMigratorFactory @@ -21,7 +22,6 @@ import io.airbyte.workers.internal.VersionedAirbyteStreamFactory.InvalidLineFail import io.airbyte.workers.models.SidecarInput import io.airbyte.workers.sync.OrchestratorConstants import io.airbyte.workers.workload.JobOutputDocStore -import io.airbyte.workload.api.client.generated.WorkloadApi import io.airbyte.workload.api.client.model.generated.WorkloadFailureRequest import io.airbyte.workload.api.client.model.generated.WorkloadSuccessRequest import io.github.oshai.kotlinlogging.KotlinLogging @@ -47,7 +47,7 @@ class ConnectorWatcher( val serDeProvider: AirbyteMessageSerDeProvider, val airbyteProtocolVersionedMigratorFactory: AirbyteProtocolVersionedMigratorFactory, val gsonPksExtractor: GsonPksExtractor, - val workloadApi: WorkloadApi, + val workloadApiClient: WorkloadApiClient, val jobOutputDocStore: JobOutputDocStore, ) { fun run() { @@ -61,7 +61,13 @@ class ConnectorWatcher( while (!areNeededFilesPresent()) { Thread.sleep(100) if (fileTimeoutReach(stopwatch)) { - failWorkload(workloadId, null) + logger.warn { "Failed to find output files from connector within timeout $fileTimeoutMinutes. Is the connector still running?" } + val failureReason = + FailureReason() + .withFailureOrigin(FailureReason.FailureOrigin.UNKNOWN) + .withExternalMessage("Failed to find output files from connector within timeout $fileTimeoutMinutes.") + + failWorkload(workloadId, failureReason) exitFileNotFound() // The return is needed for the test return @@ -112,7 +118,7 @@ class ConnectorWatcher( } } jobOutputDocStore.write(workloadId, connectorOutput) - workloadApi.workloadSuccess(WorkloadSuccessRequest(workloadId)) + workloadApiClient.workloadApi.workloadSuccess(WorkloadSuccessRequest(workloadId)) } catch (e: Exception) { logger.error(e) { "Error performing operation: ${e.javaClass.name}" } @@ -153,24 +159,26 @@ class ConnectorWatcher( }, Optional.empty(), Optional.empty(), - Optional.empty>(), - InvalidLineFailureConfiguration(false, false), + InvalidLineFailureConfiguration(false), gsonPksExtractor, ) } @VisibleForTesting fun exitProperly() { + logger.info { "Deliberately exiting process with code 0." } exitProcess(0) } @VisibleForTesting fun exitInternalError() { + logger.info { "Deliberately exiting process with code 1." } exitProcess(1) } @VisibleForTesting fun exitFileNotFound() { + logger.info { "Deliberately exiting process with code 2." } exitProcess(2) } @@ -239,8 +247,9 @@ class ConnectorWatcher( workloadId: String, failureReason: FailureReason?, ) { + logger.info { "Failing workload $workloadId." } if (failureReason != null) { - workloadApi.workloadFailure( + workloadApiClient.workloadApi.workloadFailure( WorkloadFailureRequest( workloadId, failureReason.failureOrigin.value(), @@ -248,7 +257,7 @@ class ConnectorWatcher( ), ) } else { - workloadApi.workloadFailure(WorkloadFailureRequest(workloadId)) + workloadApiClient.workloadApi.workloadFailure(WorkloadFailureRequest(workloadId)) } } } diff --git a/airbyte-connector-sidecar/src/main/resources/application.yml b/airbyte-connector-sidecar/src/main/resources/application.yml index ea07c578d5f..767d69a1c5a 100644 --- a/airbyte-connector-sidecar/src/main/resources/application.yml +++ b/airbyte-connector-sidecar/src/main/resources/application.yml @@ -49,6 +49,8 @@ airbyte: credentials-path: ${DATA_PLANE_SERVICE_ACCOUNT_CREDENTIALS_PATH:} email: ${DATA_PLANE_SERVICE_ACCOUNT_EMAIL:} sidecar: + # Can we bump this value? Does it need to be configured per operation? + # Should we pass it in from the launcher? file-timeout-minutes: ${SIDECAR_FILE_TIMEOUT_MINUTES:9} workload-api: base-path: ${WORKLOAD_API_HOST:} diff --git a/airbyte-connector-sidecar/src/test/kotlin/io/airbyte/connectorSidecar/ConnectorWatchTest.kt b/airbyte-connector-sidecar/src/test/kotlin/io/airbyte/connectorSidecar/ConnectorWatchTest.kt index 204b58bcee7..2c495917a79 100644 --- a/airbyte-connector-sidecar/src/test/kotlin/io/airbyte/connectorSidecar/ConnectorWatchTest.kt +++ b/airbyte-connector-sidecar/src/test/kotlin/io/airbyte/connectorSidecar/ConnectorWatchTest.kt @@ -1,5 +1,6 @@ package io.airbyte.connectorSidecar +import io.airbyte.api.client.WorkloadApiClient import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider import io.airbyte.commons.protocol.AirbyteProtocolVersionedMigratorFactory import io.airbyte.config.ActorType @@ -50,6 +51,9 @@ class ConnectorWatchTest { @MockK private lateinit var workloadApi: WorkloadApi + @MockK + private lateinit var workloadApiClient: WorkloadApiClient + @MockK private lateinit var jobOutputDocStore: JobOutputDocStore @@ -66,6 +70,8 @@ class ConnectorWatchTest { @BeforeEach fun init() { + every { workloadApiClient.workloadApi } returns workloadApi + connectorWatcher = spyk( ConnectorWatcher( @@ -76,7 +82,7 @@ class ConnectorWatchTest { serDeProvider, airbyteProtocolVersionedMigratorFactory, gsonPksExtractor, - workloadApi, + workloadApiClient, jobOutputDocStore, ), ) @@ -192,12 +198,12 @@ class ConnectorWatchTest { every { connectorWatcher.exitFileNotFound() } returns Unit - every { workloadApi.workloadFailure(WorkloadFailureRequest(workloadId)) } returns Unit + every { workloadApi.workloadFailure(any()) } returns Unit connectorWatcher.run() verifyOrder { - workloadApi.workloadFailure(WorkloadFailureRequest(workloadId)) + workloadApi.workloadFailure(any()) connectorWatcher.exitFileNotFound() } } diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 765b6e9a765..7321567c686 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -1,4 +1,9 @@ -ARG JAVA_WORKER_BASE_IMAGE_VERSION=2.1.0 +ARG JAVA_WORKER_BASE_IMAGE_VERSION=2.2.0 + +FROM scratch as builder +WORKDIR /app +ADD airbyte-app.tar /app + FROM airbyte/airbyte-base-java-worker-image:${JAVA_WORKER_BASE_IMAGE_VERSION} # Don't change this manually. Bump version expects to make moves based on this string @@ -8,13 +13,8 @@ ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} WORKDIR /app - -USER root -COPY WellKnownTypes.json /app - -# Move orchestrator app -ADD airbyte-app.tar /app -RUN chown -R airbyte:airbyte /app +COPY --chown=airbyte:airbyte WellKnownTypes.json /app +COPY --chown=airbyte:airbyte --from=builder /app /app USER airbyte:airbyte # wait for upstream dependencies to become available before starting server diff --git a/airbyte-container-orchestrator/build.gradle.kts b/airbyte-container-orchestrator/build.gradle.kts index b09d9fb1d76..17ce6488b62 100644 --- a/airbyte-container-orchestrator/build.gradle.kts +++ b/airbyte-container-orchestrator/build.gradle.kts @@ -6,123 +6,123 @@ import com.fasterxml.jackson.module.kotlin.registerKotlinModule import java.util.zip.ZipFile buildscript { - repositories { - mavenCentral() - } - dependencies { - // necessary to convert the well_know_types from yaml to json - val jacksonVersion = libs.versions.fasterxml.version.get() - classpath("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$jacksonVersion") - classpath("com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion") - } + repositories { + mavenCentral() + } + dependencies { + // necessary to convert the well_know_types from yaml to json + val jacksonVersion = libs.versions.fasterxml.version.get() + classpath("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$jacksonVersion") + classpath("com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion") + } } plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } val airbyteProtocol by configurations.creating configurations.all { - resolutionStrategy { - // Ensure that the versions defined in deps.toml are used) - // instead of versions from transitive dependencies) - // Force to avoid(updated version brought in transitively from Micronaut 3.8+) - // that is incompatible with our current Helm setup) - force (libs.s3, libs.aws.java.sdk.s3) - } + resolutionStrategy { + // Ensure that the versions defined in deps.toml are used) + // instead of versions from transitive dependencies) + // Force to avoid(updated version brought in transitively from Micronaut 3.8+) + // that is incompatible with our current Helm setup) + force(libs.s3, libs.aws.java.sdk.s3) + } } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.micronaut.cache) - implementation(libs.bundles.micronaut.metrics) - implementation(libs.guava) - implementation(libs.s3) - implementation(libs.aws.java.sdk.s3) - implementation(libs.sts) - implementation(libs.kubernetes.client) - implementation(libs.bundles.datadog) - implementation(libs.bundles.log4j) - - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-commons-converters")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-commons-micronaut")) - implementation(project(":airbyte-commons-micronaut-security")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-commons-with-dependencies")) - implementation(project(":airbyte-commons-worker")) - implementation(project(":airbyte-config:init")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-json-validation")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-worker-models")) - - runtimeOnly(libs.snakeyaml) - - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.bundles.mockito.inline) - testImplementation(libs.bundles.bouncycastle) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers) - testImplementation(libs.platform.testcontainers.postgresql) - - airbyteProtocol(libs.airbyte.protocol) { - isTransitive = false - } + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.micronaut.cache) + implementation(libs.bundles.micronaut.metrics) + implementation(libs.guava) + implementation(libs.s3) + implementation(libs.aws.java.sdk.s3) + implementation(libs.sts) + implementation(libs.kubernetes.client) + implementation(libs.bundles.datadog) + implementation(libs.bundles.log4j) + + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-commons-converters")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-commons-micronaut")) + implementation(project(":airbyte-commons-micronaut-security")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-commons-with-dependencies")) + implementation(project(":airbyte-commons-worker")) + implementation(project(":airbyte-config:init")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-json-validation")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-worker-models")) + + runtimeOnly(libs.snakeyaml) + + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.bundles.mockito.inline) + testImplementation(libs.bundles.bouncycastle) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers) + testImplementation(libs.platform.testcontainers.postgresql) + + airbyteProtocol(libs.airbyte.protocol) { + isTransitive = false + } } airbyte { - application { - mainClass = "io.airbyte.container_orchestrator.Application" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - } - docker { - imageName = "container-orchestrator" - } + application { + mainClass = "io.airbyte.container_orchestrator.Application" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + } + docker { + imageName = "container-orchestrator" + } } // Duplicated from :airbyte-worker, eventually, this should be handled in :airbyte-protocol) val generateWellKnownTypes = tasks.register("generateWellKnownTypes") { - inputs.files(airbyteProtocol) // declaring inputs) - val targetFile = project.file("build/airbyte/docker/WellKnownTypes.json") - outputs.file(targetFile) // declaring outputs) - - doLast { - val wellKnownTypesYamlPath = "airbyte_protocol/well_known_types.yaml" - airbyteProtocol.files.forEach { - val zip = ZipFile(it) - val entry = zip.getEntry(wellKnownTypesYamlPath) - - val wellKnownTypesYaml = zip.getInputStream(entry).bufferedReader().use { reader -> reader.readText() } - val rawJson = yamlToJson(wellKnownTypesYaml) - targetFile.getParentFile().mkdirs() - targetFile.writeText(rawJson) - } + inputs.files(airbyteProtocol) // declaring inputs) + val targetFile = project.file("build/airbyte/docker/WellKnownTypes.json") + outputs.file(targetFile) // declaring outputs) + + doLast { + val wellKnownTypesYamlPath = "airbyte_protocol/well_known_types.yaml" + airbyteProtocol.files.forEach { + val zip = ZipFile(it) + val entry = zip.getEntry(wellKnownTypesYamlPath) + + val wellKnownTypesYaml = zip.getInputStream(entry).bufferedReader().use { reader -> reader.readText() } + val rawJson = yamlToJson(wellKnownTypesYaml) + targetFile.getParentFile().mkdirs() + targetFile.writeText(rawJson) } + } } tasks.named("dockerBuildImage") { - dependsOn(generateWellKnownTypes) + dependsOn(generateWellKnownTypes) } fun yamlToJson(rawYaml: String): String { - val mappedYaml: Any = YAMLMapper().registerKotlinModule().readValue(rawYaml) - return ObjectMapper().registerKotlinModule().writeValueAsString(mappedYaml) + val mappedYaml: Any = YAMLMapper().registerKotlinModule().readValue(rawYaml) + return ObjectMapper().registerKotlinModule().writeValueAsString(mappedYaml) } diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java index 6130ed8a2ab..9cab558d6da 100644 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java +++ b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java @@ -4,6 +4,7 @@ package io.airbyte.container_orchestrator.config; +import io.airbyte.api.client.WorkloadApiClient; import io.airbyte.commons.envvar.EnvVar; import io.airbyte.commons.features.EnvVariableFeatureFlags; import io.airbyte.commons.features.FeatureFlags; @@ -36,7 +37,6 @@ import io.airbyte.workers.sync.ReplicationLauncherWorker; import io.airbyte.workers.workload.JobOutputDocStore; import io.airbyte.workers.workload.WorkloadIdGenerator; -import io.airbyte.workload.api.client.generated.WorkloadApi; import io.fabric8.kubernetes.client.DefaultKubernetesClient; import io.micronaut.context.annotation.Factory; import io.micronaut.context.annotation.Prototype; @@ -120,13 +120,13 @@ JobOrchestrator jobOrchestrator( final JobRunConfig jobRunConfig, final ReplicationWorkerFactory replicationWorkerFactory, final AsyncStateManager asyncStateManager, - final WorkloadApi workloadApi, + final WorkloadApiClient workloadApiClient, final WorkloadIdGenerator workloadIdGenerator, @Value("${airbyte.workload.enabled}") final boolean workloadEnabled, final JobOutputDocStore jobOutputDocStore) { return switch (application) { case ReplicationLauncherWorker.REPLICATION -> new ReplicationJobOrchestrator(configDir, envConfigs, jobRunConfig, - replicationWorkerFactory, asyncStateManager, workloadApi, workloadIdGenerator, workloadEnabled, jobOutputDocStore); + replicationWorkerFactory, asyncStateManager, workloadApiClient, workloadIdGenerator, workloadEnabled, jobOutputDocStore); case NormalizationLauncherWorker.NORMALIZATION -> new NormalizationJobOrchestrator(envConfigs, processFactory, jobRunConfig, asyncStateManager); case DbtLauncherWorker.DBT -> new DbtJobOrchestrator(envConfigs, workerConfigsProvider, processFactory, jobRunConfig, asyncStateManager); case AsyncOrchestratorPodProcess.NO_OP -> new NoOpOrchestrator(); diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestrator.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestrator.java index 0558dd3d396..2018ad4ac24 100644 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestrator.java +++ b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestrator.java @@ -11,6 +11,7 @@ import com.google.common.annotations.VisibleForTesting; import datadog.trace.api.Trace; +import io.airbyte.api.client.WorkloadApiClient; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.temporal.TemporalUtils; import io.airbyte.config.Configs; @@ -31,7 +32,6 @@ import io.airbyte.workers.sync.ReplicationLauncherWorker; import io.airbyte.workers.workload.JobOutputDocStore; import io.airbyte.workers.workload.WorkloadIdGenerator; -import io.airbyte.workload.api.client.generated.WorkloadApi; import io.airbyte.workload.api.client.model.generated.WorkloadCancelRequest; import io.airbyte.workload.api.client.model.generated.WorkloadFailureRequest; import io.airbyte.workload.api.client.model.generated.WorkloadSuccessRequest; @@ -55,7 +55,7 @@ public class ReplicationJobOrchestrator implements JobOrchestrator failureReason) throws IOException { if (failureReason.isPresent()) { - workloadApi.workloadFailure(new WorkloadFailureRequest(workloadId, + workloadApiClient.getWorkloadApi().workloadFailure(new WorkloadFailureRequest(workloadId, failureReason.get().getFailureOrigin().value(), failureReason.get().getExternalMessage())); } else { - workloadApi.workloadFailure(new WorkloadFailureRequest(workloadId, null, null)); + workloadApiClient.getWorkloadApi().workloadFailure(new WorkloadFailureRequest(workloadId, null, null)); } } private void succeedWorkload(final String workloadId) throws IOException { - workloadApi.workloadSuccess(new WorkloadSuccessRequest(workloadId)); + workloadApiClient.getWorkloadApi().workloadSuccess(new WorkloadSuccessRequest(workloadId)); } private void markJobRunning() { diff --git a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java index 08166c624ef..a7adaea2408 100644 --- a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java +++ b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java @@ -10,6 +10,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; +import io.airbyte.api.client.WorkloadApiClient; import io.airbyte.commons.envvar.EnvVar; import io.airbyte.commons.features.FeatureFlags; import io.airbyte.commons.workers.config.WorkerConfigs; @@ -29,7 +30,6 @@ import io.airbyte.workers.sync.ReplicationLauncherWorker; import io.airbyte.workers.workload.JobOutputDocStore; import io.airbyte.workers.workload.WorkloadIdGenerator; -import io.airbyte.workload.api.client.generated.WorkloadApi; import io.micronaut.context.annotation.Bean; import io.micronaut.context.annotation.Replaces; import io.micronaut.context.env.Environment; @@ -69,7 +69,7 @@ class ContainerOrchestratorFactoryTest { JobRunConfig jobRunConfig; @Inject - WorkloadApi workloadApi; + WorkloadApiClient workloadApiClient; @Inject ReplicationWorkerFactory replicationWorkerFactory; @@ -124,29 +124,29 @@ void jobOrchestrator() { final var repl = factory.jobOrchestrator( ReplicationLauncherWorker.REPLICATION, configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, replicationWorkerFactory, - asyncStateManager, workloadApi, new WorkloadIdGenerator(), false, jobOutputDocStore); + asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); assertEquals("Replication", repl.getOrchestratorName()); final var norm = factory.jobOrchestrator( NormalizationLauncherWorker.NORMALIZATION, configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, replicationWorkerFactory, - asyncStateManager, workloadApi, new WorkloadIdGenerator(), false, jobOutputDocStore); + asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); assertEquals("Normalization", norm.getOrchestratorName()); final var dbt = factory.jobOrchestrator( DbtLauncherWorker.DBT, configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, - replicationWorkerFactory, asyncStateManager, workloadApi, new WorkloadIdGenerator(), false, jobOutputDocStore); + replicationWorkerFactory, asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); assertEquals("DBT Transformation", dbt.getOrchestratorName()); final var noop = factory.jobOrchestrator( AsyncOrchestratorPodProcess.NO_OP, configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, replicationWorkerFactory, - asyncStateManager, workloadApi, new WorkloadIdGenerator(), false, jobOutputDocStore); + asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); assertEquals("NO_OP", noop.getOrchestratorName()); var caught = false; try { factory.jobOrchestrator("does not exist", configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, replicationWorkerFactory, - asyncStateManager, workloadApi, new WorkloadIdGenerator(), false, jobOutputDocStore); + asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); } catch (final Exception e) { caught = true; } diff --git a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestratorTest.java b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestratorTest.java index cddb7d66f6f..7de9a8b04dc 100644 --- a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestratorTest.java +++ b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestratorTest.java @@ -13,6 +13,7 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import io.airbyte.api.client.WorkloadApiClient; import io.airbyte.config.Configs; import io.airbyte.config.ReplicationAttemptSummary; import io.airbyte.config.ReplicationOutput; @@ -43,6 +44,7 @@ class ReplicationJobOrchestratorTest { private ReplicationWorkerFactory replicationWorkerFactory; private WorkloadApi workloadApi; + private WorkloadApiClient workloadApiClient; private WorkloadIdGenerator workloadIdGenerator; private ReplicationWorker replicationWorker; @@ -50,8 +52,11 @@ class ReplicationJobOrchestratorTest { void setUp() { replicationWorkerFactory = mock(ReplicationWorkerFactory.class); workloadApi = mock(WorkloadApi.class); + workloadApiClient = mock(WorkloadApiClient.class); workloadIdGenerator = mock(WorkloadIdGenerator.class); replicationWorker = mock(ReplicationWorker.class); + + when(workloadApiClient.getWorkloadApi()).thenReturn(workloadApi); } @Test @@ -69,7 +74,7 @@ void testRunWithWorkloadEnabledRunCancelled() throws Exception { jobRunConfig, replicationWorkerFactory, mock(AsyncStateManager.class), - workloadApi, + workloadApiClient, workloadIdGenerator, true, mock(JobOutputDocStore.class)); @@ -99,7 +104,7 @@ void testRunWithWorkloadEnabledRunCompleted() throws Exception { jobRunConfig, replicationWorkerFactory, mock(AsyncStateManager.class), - workloadApi, + workloadApiClient, workloadIdGenerator, true, mock(JobOutputDocStore.class)); @@ -128,7 +133,7 @@ void testRunWithWorkloadEnabledRunFailed() throws Exception { jobRunConfig, replicationWorkerFactory, mock(AsyncStateManager.class), - workloadApi, + workloadApiClient, workloadIdGenerator, true, mock(JobOutputDocStore.class)); @@ -157,7 +162,7 @@ void testRunWithWorkloadEnabledRunThrowsException() throws Exception { jobRunConfig, replicationWorkerFactory, mock(AsyncStateManager.class), - workloadApi, + workloadApiClient, workloadIdGenerator, true, mock(JobOutputDocStore.class)); diff --git a/airbyte-cron/Dockerfile b/airbyte-cron/Dockerfile index b83e567150e..2fbf270cbd6 100644 --- a/airbyte-cron/Dockerfile +++ b/airbyte-cron/Dockerfile @@ -1,11 +1,12 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.1.0 -FROM ${JDK_IMAGE} +ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.2.1 +FROM scratch as builder WORKDIR /app - -USER root ADD airbyte-app.tar /app -RUN chown -R airbyte:airbyte /app + +FROM ${JDK_IMAGE} +WORKDIR /app +COPY --chown=airbyte:airbyte --from=builder /app /app USER airbyte:airbyte ENTRYPOINT ["/bin/bash", "-c", "airbyte-app/bin/airbyte-cron"] diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/config/ApiBeanFactory.kt b/airbyte-cron/src/main/java/io/airbyte/cron/config/ApiBeanFactory.kt deleted file mode 100644 index 868bb998c97..00000000000 --- a/airbyte-cron/src/main/java/io/airbyte/cron/config/ApiBeanFactory.kt +++ /dev/null @@ -1,134 +0,0 @@ -package io.airbyte.cron.config - -import dev.failsafe.RetryPolicy -import io.airbyte.api.client.WorkloadApiClient -import io.airbyte.commons.auth.AuthenticationInterceptor -import io.airbyte.commons.temporal.config.WorkerMode -import io.airbyte.workload.api.client.generated.WorkloadApi -import io.github.oshai.kotlinlogging.KotlinLogging -import io.micrometer.core.instrument.MeterRegistry -import io.micronaut.context.annotation.Factory -import io.micronaut.context.annotation.Value -import io.micronaut.context.env.Environment -import jakarta.inject.Named -import jakarta.inject.Singleton -import okhttp3.HttpUrl -import okhttp3.OkHttpClient -import okhttp3.Response -import org.openapitools.client.infrastructure.ClientException -import org.openapitools.client.infrastructure.ServerException -import java.io.IOException -import java.time.Duration -import java.util.Optional - -private val logger = KotlinLogging.logger {} - -@Factory -class ApiBeanFactory { - @Singleton - fun workloadApiClient( - @Value("\${airbyte.workload-api.base-path}") workloadApiBasePath: String, - @Value("\${airbyte.workload-api.connect-timeout-seconds}") connectTimeoutSeconds: Long, - @Value("\${airbyte.workload-api.read-timeout-seconds}") readTimeoutSeconds: Long, - @Value("\${airbyte.workload-api.retries.delay-seconds}") retryDelaySeconds: Long, - @Value("\${airbyte.workload-api.retries.max}") maxRetries: Int, - authenticationInterceptor: AuthenticationInterceptor, - meterRegistry: Optional, - ): WorkloadApi { - val builder: OkHttpClient.Builder = OkHttpClient.Builder() - builder.addInterceptor(authenticationInterceptor) - builder.readTimeout(Duration.ofSeconds(readTimeoutSeconds)) - builder.connectTimeout(Duration.ofSeconds(connectTimeoutSeconds)) - - val okHttpClient: OkHttpClient = builder.build() - val metricTags = arrayOf("max-retries", maxRetries.toString()) - - val retryPolicy: RetryPolicy = - RetryPolicy.builder() - .handle( - listOf( - IllegalStateException::class.java, - IOException::class.java, - UnsupportedOperationException::class.java, - ClientException::class.java, - ServerException::class.java, - ), - ) - // TODO move these metrics into a centralized metric registery as part of the MetricClient refactor/cleanup - .onAbort { l -> - logger.warn { "Attempt aborted. Attempt count ${l.attemptCount}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.abort", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .onFailure { l -> - logger.error(l.exception) { "Failed to call ${l.result.request.url}. Last response: ${l.result}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.failure", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .onRetry { l -> - logger.warn { "Retry attempt ${l.attemptCount} of $maxRetries. Last response: ${l.lastResult}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.retry", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.lastResult.request.method), - *getUrlTags(l.lastResult.request.url), - ).increment() - } - } - .onRetriesExceeded { l -> - logger.error(l.exception) { "Retry attempts exceeded." } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.retries_exceeded", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .onSuccess { l -> - logger.debug { "Successfully called ${l.result.request.url}. Response: ${l.result}, isRetry: ${l.isRetry}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.success", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .withDelay(Duration.ofSeconds(retryDelaySeconds)) - .withMaxRetries(maxRetries) - .build() - - return WorkloadApiClient(workloadApiBasePath, retryPolicy, okHttpClient).workloadApi - } - - @Singleton - @Named("internalApiScheme") - fun internalApiScheme(environment: Environment): String { - return if (environment.activeNames.contains(WorkerMode.CONTROL_PLANE)) "http" else "https" - } - - private fun getUrlTags(httpUrl: HttpUrl): Array { - val last = httpUrl.pathSegments.last() - if (last.contains("[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}".toRegex())) { - return arrayOf("url", httpUrl.toString().removeSuffix(last), "workload-id", last) - } else { - return arrayOf("url", httpUrl.toString()) - } - } -} diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/jobs/WorkloadMonitor.kt b/airbyte-cron/src/main/java/io/airbyte/cron/jobs/WorkloadMonitor.kt index a40b2731f93..ddcb3d86309 100644 --- a/airbyte-cron/src/main/java/io/airbyte/cron/jobs/WorkloadMonitor.kt +++ b/airbyte-cron/src/main/java/io/airbyte/cron/jobs/WorkloadMonitor.kt @@ -1,14 +1,13 @@ package io.airbyte.cron.jobs import datadog.trace.api.Trace -import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.api.client.WorkloadApiClient import io.airbyte.metrics.annotations.Instrument import io.airbyte.metrics.annotations.Tag import io.airbyte.metrics.lib.MetricAttribute import io.airbyte.metrics.lib.MetricClient import io.airbyte.metrics.lib.MetricTags import io.airbyte.metrics.lib.OssMetricsRegistry -import io.airbyte.workload.api.client.generated.WorkloadApi import io.airbyte.workload.api.client.model.generated.ExpiredDeadlineWorkloadListRequest import io.airbyte.workload.api.client.model.generated.LongRunningWorkloadRequest import io.airbyte.workload.api.client.model.generated.Workload @@ -32,10 +31,9 @@ private val logger = KotlinLogging.logger { } value = "true", ) open class WorkloadMonitor( - private val workloadApi: WorkloadApi, + private val workloadApiClient: WorkloadApiClient, @Property(name = "airbyte.workload.monitor.non-sync-workload-timeout") private val nonSyncWorkloadTimeout: Duration, @Property(name = "airbyte.workload.monitor.sync-workload-timeout") private val syncWorkloadTimeout: Duration, - private val featureFlagClient: FeatureFlagClient, private val metricClient: MetricClient, private val timeProvider: (ZoneId) -> OffsetDateTime = OffsetDateTime::now, ) { @@ -59,7 +57,7 @@ open class WorkloadMonitor( logger.info { "Checking for not started workloads." } val oldestStartedTime = timeProvider(ZoneOffset.UTC) val notStartedWorkloads = - workloadApi.workloadListWithExpiredDeadline( + workloadApiClient.workloadApi.workloadListWithExpiredDeadline( ExpiredDeadlineWorkloadListRequest( oldestStartedTime, status = listOf(WorkloadStatus.CLAIMED), @@ -80,7 +78,7 @@ open class WorkloadMonitor( logger.info { "Checking for not claimed workloads." } val oldestClaimTime = timeProvider(ZoneOffset.UTC) val notClaimedWorkloads = - workloadApi.workloadListWithExpiredDeadline( + workloadApiClient.workloadApi.workloadListWithExpiredDeadline( ExpiredDeadlineWorkloadListRequest( oldestClaimTime, status = listOf(WorkloadStatus.PENDING), @@ -102,7 +100,7 @@ open class WorkloadMonitor( logger.info { "Checking for non heartbeating workloads." } val oldestHeartbeatTime = timeProvider(ZoneOffset.UTC) val nonHeartbeatingWorkloads = - workloadApi.workloadListWithExpiredDeadline( + workloadApiClient.workloadApi.workloadListWithExpiredDeadline( ExpiredDeadlineWorkloadListRequest( oldestHeartbeatTime, status = listOf(WorkloadStatus.RUNNING, WorkloadStatus.LAUNCHED), @@ -123,7 +121,7 @@ open class WorkloadMonitor( open fun cancelRunningForTooLongNonSyncWorkloads() { logger.info { "Checking for workloads running for too long with timeout value $nonSyncWorkloadTimeout" } val nonHeartbeatingWorkloads = - workloadApi.workloadListOldNonSync( + workloadApiClient.workloadApi.workloadListOldNonSync( LongRunningWorkloadRequest( createdBefore = timeProvider(ZoneOffset.UTC).minus(nonSyncWorkloadTimeout), ), @@ -143,7 +141,7 @@ open class WorkloadMonitor( open fun cancelRunningForTooLongSyncWorkloads() { logger.info { "Checking for sync workloads running for too long with timeout value $syncWorkloadTimeout" } val nonHeartbeatingWorkloads = - workloadApi.workloadListOldSync( + workloadApiClient.workloadApi.workloadListOldSync( LongRunningWorkloadRequest( createdBefore = timeProvider(ZoneOffset.UTC).minus(syncWorkloadTimeout), ), @@ -161,7 +159,7 @@ open class WorkloadMonitor( var status = "fail" try { logger.info { "Cancelling workload ${it.id}, reason: $reason" } - workloadApi.workloadFailure(WorkloadFailureRequest(workloadId = it.id, reason = reason, source = source)) + workloadApiClient.workloadApi.workloadFailure(WorkloadFailureRequest(workloadId = it.id, reason = reason, source = source)) status = "ok" } catch (e: Exception) { logger.warn(e) { "Failed to cancel workload ${it.id}" } diff --git a/airbyte-cron/src/test/kotlin/io/airbyte/cron/jobs/WorkloadMonitorTest.kt b/airbyte-cron/src/test/kotlin/io/airbyte/cron/jobs/WorkloadMonitorTest.kt index 9a51fa7c4a9..5dd2fc3616f 100644 --- a/airbyte-cron/src/test/kotlin/io/airbyte/cron/jobs/WorkloadMonitorTest.kt +++ b/airbyte-cron/src/test/kotlin/io/airbyte/cron/jobs/WorkloadMonitorTest.kt @@ -1,7 +1,6 @@ package io.airbyte.cron.jobs -import io.airbyte.featureflag.FeatureFlagClient -import io.airbyte.featureflag.TestClient +import io.airbyte.api.client.WorkloadApiClient import io.airbyte.metrics.lib.MetricAttribute import io.airbyte.metrics.lib.MetricClient import io.airbyte.metrics.lib.MetricTags @@ -32,8 +31,8 @@ class WorkloadMonitorTest { lateinit var currentTime: OffsetDateTime lateinit var metricClient: MetricClient lateinit var workloadApi: WorkloadApi + lateinit var workloadApiClient: WorkloadApiClient lateinit var workloadMonitor: WorkloadMonitor - lateinit var featureFlagClient: FeatureFlagClient @BeforeEach fun beforeEach() { @@ -41,14 +40,14 @@ class WorkloadMonitorTest { mockk().also { every { it.count(any(), any(), *anyVararg()) } returns Unit } - featureFlagClient = TestClient(emptyMap()) workloadApi = mockk() + workloadApiClient = mockk() + every { workloadApiClient.workloadApi } returns workloadApi workloadMonitor = WorkloadMonitor( - workloadApi = workloadApi, + workloadApiClient = workloadApiClient, nonSyncWorkloadTimeout = nonSyncTimeout, syncWorkloadTimeout = syncTimeout, - featureFlagClient = featureFlagClient, metricClient = metricClient, timeProvider = { _: ZoneId -> currentTime }, ) diff --git a/airbyte-data/build.gradle.kts b/airbyte-data/build.gradle.kts index 539912933ac..b1a8d31f515 100644 --- a/airbyte-data/build.gradle.kts +++ b/airbyte-data/build.gradle.kts @@ -1,60 +1,60 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("org.jetbrains.kotlin.jvm") - id("org.jetbrains.kotlin.kapt") - `java-test-fixtures` + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + id("org.jetbrains.kotlin.jvm") + id("org.jetbrains.kotlin.kapt") + `java-test-fixtures` } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - - api(libs.bundles.micronaut.annotation) - - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) - - kaptTest(platform(libs.micronaut.platform)) - kaptTest(libs.bundles.micronaut.test.annotation.processor) - - implementation(libs.bundles.apache) - implementation(libs.bundles.jackson) - implementation(libs.bundles.micronaut.data.jdbc) - implementation(libs.guava) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-commons-license")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-secrets")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-db:jooq")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-featureflag")) - implementation(libs.airbyte.protocol) - // For Keycloak Application Management - implementation(libs.bundles.keycloak.client) - - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) - - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.mockk) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.bundles.junit) - - // TODO: flip this import - MockData should live in airbyte-data's testFixtures - // and be imported in this manner by config-persistence - // We can move the BaseConfigDatasets to airbyte-data's testFixtures as well. - testImplementation(testFixtures(project(":airbyte-config:config-persistence"))) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + + api(libs.bundles.micronaut.annotation) + + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + + kaptTest(platform(libs.micronaut.platform)) + kaptTest(libs.bundles.micronaut.test.annotation.processor) + + implementation(libs.bundles.apache) + implementation(libs.bundles.jackson) + implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.guava) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-commons-license")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-secrets")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-db:jooq")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-featureflag")) + implementation(libs.airbyte.protocol) + // For Keycloak Application Management + implementation(libs.bundles.keycloak.client) + + testCompileOnly(libs.lombok) + testAnnotationProcessor(libs.lombok) + + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.mockk) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.bundles.junit) + + // TODO: flip this import - MockData should live in airbyte-data's testFixtures + // and be imported in this manner by config-persistence + // We can move the BaseConfigDatasets to airbyte-data's testFixtures as well. + testImplementation(testFixtures(project(":airbyte-config:config-persistence"))) } // Even though Kotlin is excluded on Spotbugs, this project // still runs into spotbug issues. Working theory is that // generated code is being picked up. Disable as a short-term fix. tasks.named("spotbugsMain") { - enabled = false + enabled = false } diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java index 31e96fe5d26..660e8ee7201 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java @@ -112,22 +112,22 @@ public Application createApplication(final User user, final String name) { /** * List all Applications for a user. * - * @param userId The user to list Applications for. + * @param user The user to list Applications for. * @return The list of Applications for the user. */ @Override - public List listApplicationsByUser(final User userId) { - final var users = keycloakAdminClient + public List listApplicationsByUser(final User user) { + final var clientUsers = keycloakAdminClient .realm(keycloakConfiguration.getClientRealm()) .users() - .searchByAttributes(USER_ID + ":" + userId.getAuthUserId()); + .searchByAttributes(USER_ID + ":" + user.getAuthUserId()); final var existingClient = new ArrayList(); - for (final var user : users) { + for (final var clientUser : clientUsers) { final var client = keycloakAdminClient .realm(keycloakConfiguration.getClientRealm()) .clients() - .findByClientId(user.getAttributes().get(CLIENT_ID).getFirst()) + .findByClientId(clientUser.getAttributes().get(CLIENT_ID).getFirst()) .stream() .findFirst(); @@ -159,13 +159,10 @@ public Optional deleteApplication(final User user, final String app return Optional.empty(); } - // Get the user_id attribute from the client - final var userId = client.get().getAttributes().getOrDefault(USER_ID, null); - if (userId == null) { - throw new BadRequestException("Client does not have a user_id attribute"); - } + final var userApplications = listApplicationsByUser(user); - if (!userId.equals(String.valueOf(user.getAuthUserId()))) { + // Only allow the user to delete their own Applications. + if (userApplications.stream().noneMatch(application -> application.getClientId().equals(applicationId))) { throw new BadRequestException("You do not have permission to delete this Application"); } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/PermissionRepository.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/PermissionRepository.kt index 547a94b45e6..570d2d405e9 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/PermissionRepository.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/PermissionRepository.kt @@ -11,4 +11,12 @@ import java.util.UUID * NOTE: eventually this will fully replace the PermissionPersistence class. */ @JdbcRepository(dialect = Dialect.POSTGRES, dataSource = "config") -interface PermissionRepository : PageableRepository +interface PermissionRepository : PageableRepository { + fun findByIdIn(permissionIds: List): List + + fun findByUserId(userId: UUID): List + + fun findByOrganizationId(organizationId: UUID): List + + fun deleteByIdIn(permissionIds: List) +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/PermissionService.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/PermissionService.kt new file mode 100644 index 00000000000..8ec67ce41f3 --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/PermissionService.kt @@ -0,0 +1,48 @@ +package io.airbyte.data.services + +import io.airbyte.config.Permission +import java.util.UUID + +/** + * A service that manages permissions. + */ +interface PermissionService { + /** + * Get all permissions for a given user. + */ + fun getPermissionsForUser(userId: UUID): List + + /** + * Delete a permission by its unique id. + */ + @Throws(RemoveLastOrgAdminPermissionException::class) + fun deletePermission(permissionId: UUID) + + /** + * Delete a list of permissions by their unique ids. + */ + @Throws(RemoveLastOrgAdminPermissionException::class) + fun deletePermissions(permissionIds: List) + + /** + * Create a permission. + */ + @Throws(PermissionRedundantException::class) + fun createPermission(permission: Permission): Permission + + /** + * Update a permission + */ + @Throws(RemoveLastOrgAdminPermissionException::class) + fun updatePermission(permission: Permission) +} + +/** + * Exception thrown when an operation on a permission cannot be performed because it is redundant. + */ +class PermissionRedundantException(message: String) : Exception(message) + +/** + * Exception thrown when attempting an operation on a permission that would result in an organization without any org-admin. + */ +class RemoveLastOrgAdminPermissionException(message: String) : Exception(message) diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImpl.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImpl.kt new file mode 100644 index 00000000000..78c6468473d --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImpl.kt @@ -0,0 +1,170 @@ +package io.airbyte.data.services.impls.data + +import io.airbyte.commons.auth.OrganizationAuthRole +import io.airbyte.commons.auth.WorkspaceAuthRole +import io.airbyte.config.ConfigSchema +import io.airbyte.config.Permission +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.data.repositories.PermissionRepository +import io.airbyte.data.services.PermissionRedundantException +import io.airbyte.data.services.PermissionService +import io.airbyte.data.services.RemoveLastOrgAdminPermissionException +import io.airbyte.data.services.WorkspaceService +import io.airbyte.data.services.impls.data.mappers.toConfigModel +import io.airbyte.data.services.impls.data.mappers.toEntity +import io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType +import io.micronaut.transaction.annotation.Transactional +import jakarta.inject.Singleton +import java.util.UUID + +@Singleton +open class PermissionServiceDataImpl( + private val workspaceService: WorkspaceService, + private val permissionRepository: PermissionRepository, +) : PermissionService { + override fun getPermissionsForUser(userId: UUID): List { + return permissionRepository.findByUserId(userId).map { it.toConfigModel() } + } + + @Transactional("config") + override fun deletePermission(permissionId: UUID) { + throwIfDeletingLastOrgAdmin(listOf(permissionId)) + permissionRepository.deleteById(permissionId) + } + + @Transactional("config") + override fun deletePermissions(permissionIds: List) { + throwIfDeletingLastOrgAdmin(permissionIds) + permissionRepository.deleteByIdIn(permissionIds) + } + + @Transactional("config") + override fun createPermission(permission: Permission): Permission { + val existingUserPermissions = getPermissionsForUser(permission.userId).toSet() + + // throw if new permission would be redundant + if (isRedundantWorkspacePermission(permission, existingUserPermissions)) { + throw PermissionRedundantException( + "Permission type ${permission.permissionType} would be redundant for user ${permission.userId}. Preventing creation.", + ) + } + + // remove any permissions that would be made redundant by adding in the new permission + deletePermissionsMadeRedundantByPermission(permission, existingUserPermissions) + + return permissionRepository.save(permission.toEntity()).toConfigModel() + } + + @Transactional("config") + override fun updatePermission(permission: Permission) { + // throw early if the update would remove the last org admin + throwIfUpdateWouldRemoveLastOrgAdmin(permission) + + val otherPermissionsForUser = getPermissionsForUser(permission.userId).filter { it.permissionId != permission.permissionId }.toSet() + + // remove the permission being updated if it is now redundant. + if (isRedundantWorkspacePermission(permission, otherPermissionsForUser)) { + permissionRepository.deleteById(permission.permissionId) + return + } + + // remove any permissions that would be made redundant by adding in the newly-updated permission + deletePermissionsMadeRedundantByPermission(permission, otherPermissionsForUser) + + permissionRepository.update(permission.toEntity()).toConfigModel() + } + + private fun deletePermissionsMadeRedundantByPermission( + permission: Permission, + otherPermissions: Set, + ) { + otherPermissions.filter { isRedundantWorkspacePermission(it, otherPermissions - it + permission) } + .map { it.permissionId } + .takeIf { it.isNotEmpty() } + ?.let { permissionRepository.deleteByIdIn(it) } + } + + private fun throwIfDeletingLastOrgAdmin(permissionIdsToDelete: List) { + // get all org admin permissions being deleted, if any + val deletedOrgAdminPermissions = + permissionRepository.findByIdIn(permissionIdsToDelete).filter { + it.permissionType == PermissionType.organization_admin + } + + // group deleted org admin permission IDs by organization ID + val orgIdToDeletedOrgAdminPermissionIds = deletedOrgAdminPermissions.groupBy({ it.organizationId!! }, { it.id!! }) + + // for each group, make sure the last org-admin isn't being deleted + orgIdToDeletedOrgAdminPermissionIds.forEach { + (orgId, deletedOrgAdminIds) -> + throwIfDeletingLastOrgAdminForOrg(orgId, deletedOrgAdminIds.toSet()) + } + } + + private fun throwIfDeletingLastOrgAdminForOrg( + orgId: UUID, + deletedOrgAdminPermissionIds: Set, + ) { + // get all other permissions for the organization that are not being deleted + val otherOrgPermissions = permissionRepository.findByOrganizationId(orgId).filter { it.id !in deletedOrgAdminPermissionIds } + + // if there are no other org-admin permissions remaining in the org, throw an exception + if (otherOrgPermissions.none { it.permissionType == PermissionType.organization_admin }) { + throw RemoveLastOrgAdminPermissionException("Cannot delete the last admin in Organization $orgId.") + } + } + + private fun throwIfUpdateWouldRemoveLastOrgAdmin(updatedPermission: Permission) { + // return early if the permission is not for an organization + val orgId = updatedPermission.organizationId ?: return + + // get the current state of the permission in the database + val priorPermission = + permissionRepository.findById(updatedPermission.permissionId) + .orElseThrow { ConfigNotFoundException(ConfigSchema.PERMISSION, "Permission not found: ${updatedPermission.permissionId}") } + + // return early if the permission was not an org admin prior to the update + if (priorPermission.permissionType != PermissionType.organization_admin) { + return + } + + // get all other permissions for the organization + val otherOrgPermissions = permissionRepository.findByOrganizationId(orgId).filter { it.id != updatedPermission.permissionId } + + // if the permission being updated is the last org admin, throw an exception + if (otherOrgPermissions.none { it.permissionType == PermissionType.organization_admin }) { + throw RemoveLastOrgAdminPermissionException("Cannot demote the last admin in Organization $orgId.") + } + } + + private fun isRedundantWorkspacePermission( + permission: Permission, + existingUserPermissions: Set, + ): Boolean { + // only workspace permissions can be redundant + val workspaceId = permission.workspaceId ?: return false + + // if the workspace is not in an organization, it cannot have redundant permissions + val orgIdForWorkspace = workspaceService.getOrganizationIdFromWorkspaceId(workspaceId).orElse(null) ?: return false + + // if the user has no org-level permission, the workspace permission cannot be redundant + val existingOrgPermission = existingUserPermissions.find { it.organizationId == orgIdForWorkspace } ?: return false + + // if the new permission is less than or equal to the existing org-level permission, it is redundant + return getAuthority(permission.permissionType) <= getAuthority(existingOrgPermission.permissionType) + } + + private fun getAuthority(permissionType: Permission.PermissionType): Int { + return when (permissionType) { + Permission.PermissionType.INSTANCE_ADMIN -> throw IllegalArgumentException("INSTANCE_ADMIN permissions are not supported") + Permission.PermissionType.ORGANIZATION_ADMIN -> OrganizationAuthRole.ORGANIZATION_ADMIN.authority + Permission.PermissionType.ORGANIZATION_EDITOR -> OrganizationAuthRole.ORGANIZATION_EDITOR.authority + Permission.PermissionType.ORGANIZATION_READER -> OrganizationAuthRole.ORGANIZATION_READER.authority + Permission.PermissionType.ORGANIZATION_MEMBER -> OrganizationAuthRole.ORGANIZATION_MEMBER.authority + Permission.PermissionType.WORKSPACE_OWNER -> WorkspaceAuthRole.WORKSPACE_ADMIN.authority + Permission.PermissionType.WORKSPACE_ADMIN -> WorkspaceAuthRole.WORKSPACE_ADMIN.authority + Permission.PermissionType.WORKSPACE_EDITOR -> WorkspaceAuthRole.WORKSPACE_EDITOR.authority + Permission.PermissionType.WORKSPACE_READER -> WorkspaceAuthRole.WORKSPACE_READER.authority + } + } +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/PermissionTypeMapper.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/PermissionTypeMapper.kt index a00a3abe24b..0f40bfe3978 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/PermissionTypeMapper.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/PermissionTypeMapper.kt @@ -12,12 +12,13 @@ fun EntityPermissionType.toConfigModel(): ModelPermissionType { EntityPermissionType.organization_editor -> ModelPermissionType.ORGANIZATION_EDITOR EntityPermissionType.organization_reader -> ModelPermissionType.ORGANIZATION_READER EntityPermissionType.organization_member -> ModelPermissionType.ORGANIZATION_MEMBER - else -> throw IllegalArgumentException("Unexpected permission type: $this") + EntityPermissionType.instance_admin -> ModelPermissionType.INSTANCE_ADMIN } } fun ModelPermissionType.toEntity(): EntityPermissionType { return when (this) { + ModelPermissionType.WORKSPACE_OWNER -> EntityPermissionType.workspace_admin ModelPermissionType.WORKSPACE_ADMIN -> EntityPermissionType.workspace_admin ModelPermissionType.WORKSPACE_EDITOR -> EntityPermissionType.workspace_editor ModelPermissionType.WORKSPACE_READER -> EntityPermissionType.workspace_reader @@ -25,6 +26,6 @@ fun ModelPermissionType.toEntity(): EntityPermissionType { ModelPermissionType.ORGANIZATION_EDITOR -> EntityPermissionType.organization_editor ModelPermissionType.ORGANIZATION_READER -> EntityPermissionType.organization_reader ModelPermissionType.ORGANIZATION_MEMBER -> EntityPermissionType.organization_member - else -> throw IllegalArgumentException("Unexpected permission type: $this") + ModelPermissionType.INSTANCE_ADMIN -> EntityPermissionType.instance_admin } } diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/AbstractConfigRepositoryTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/AbstractConfigRepositoryTest.kt index 7c2c1f043f4..4654789a0d5 100644 --- a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/AbstractConfigRepositoryTest.kt +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/AbstractConfigRepositoryTest.kt @@ -39,6 +39,10 @@ abstract class AbstractConfigRepositoryTest>( fun setupBase() { container.start() + // occasionally, the container is not yet accepting connections even though start() has returned. + // this createConnection() call will block until the container is ready to accept connections. + container.createConnection("").use { } + // set the micronaut datasource properties to match our container we started up context = ApplicationContext.run( diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/PermissionRepositoryTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/PermissionRepositoryTest.kt index 3fbe72171c0..31c5e9d0375 100644 --- a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/PermissionRepositoryTest.kt +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/PermissionRepositoryTest.kt @@ -9,6 +9,7 @@ import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Assertions.assertNotNull import org.junit.jupiter.api.Assertions.assertNull import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test import java.util.UUID @@ -18,12 +19,18 @@ internal class PermissionRepositoryTest : AbstractConfigRepositoryTest { permissionService.deletePermission(permId) } + + verify { permissionRepository.findByIdIn(listOf(permId)) } + verify { permissionRepository.findByOrganizationId(orgId) } + verify(exactly = 0) { permissionRepository.deleteById(any()) } + confirmVerified(permissionRepository) + } + } + + @Nested + inner class DeletePermissions { + @Test + fun `deletePermissions should delete from repository when not deleting the last org admin`() { + val permId1 = UUID.randomUUID() + val permId2 = UUID.randomUUID() + val orgId = UUID.randomUUID() + + val permissionToDelete1 = + Permission().apply { + permissionId = permId1 + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN + } + + val permissionToDelete2 = + Permission().apply { + permissionId = permId2 + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_EDITOR + } + + every { permissionRepository.findByIdIn(listOf(permId1, permId2)) } returns + listOf( + permissionToDelete1.toEntity(), + permissionToDelete2.toEntity(), + ) + + every { permissionRepository.findByOrganizationId(orgId) } returns + listOf( + permissionToDelete1, + permissionToDelete2, + Permission().apply { + permissionId = UUID.randomUUID() + userId = UUID.randomUUID() + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN // another org admin exists for a different user, so don't throw + }, + ).map { it.toEntity() } + + every { permissionRepository.deleteByIdIn(listOf(permId1, permId2)) } just Runs + + permissionService.deletePermissions(listOf(permId1, permId2)) + + verify { permissionRepository.findByIdIn(listOf(permId1, permId2)) } + verify { permissionRepository.findByOrganizationId(orgId) } + verify { permissionRepository.deleteByIdIn(listOf(permId1, permId2)) } + confirmVerified(permissionRepository) + } + + @Test + fun `deletePermissions should throw when deleting the last org admin`() { + val permId1 = UUID.randomUUID() + val permId2 = UUID.randomUUID() + val orgId1 = UUID.randomUUID() + val orgId2 = UUID.randomUUID() + + val permissionToDelete1 = + Permission().apply { + permissionId = permId1 + userId = testUserId + organizationId = orgId1 + permissionType = PermissionType.ORGANIZATION_ADMIN // not the last admin in org 1 + } + + val permissionToDelete2 = + Permission().apply { + permissionId = permId2 + userId = testUserId + organizationId = orgId2 + permissionType = PermissionType.ORGANIZATION_ADMIN // is the last admin in org 2, should throw + } + + every { permissionRepository.findByIdIn(listOf(permId1, permId2)) } returns + listOf( + permissionToDelete1.toEntity(), + permissionToDelete2.toEntity(), + ) + + every { permissionRepository.findByOrganizationId(orgId1) } returns + listOf( + permissionToDelete1, + Permission().apply { + permissionId = UUID.randomUUID() + userId = UUID.randomUUID() + organizationId = orgId1 + permissionType = PermissionType.ORGANIZATION_ADMIN // another admin exists in org 1, so this doesn't cause the throw + }, + ).map { it.toEntity() } + + every { permissionRepository.findByOrganizationId(orgId2) } returns + listOf( + permissionToDelete2, + Permission().apply { + permissionId = UUID.randomUUID() + userId = UUID.randomUUID() + organizationId = orgId2 + permissionType = PermissionType.ORGANIZATION_EDITOR // only other perm in org 2 is editor, so this causes a throw + }, + ).map { it.toEntity() } + + assertThrows { permissionService.deletePermissions(listOf(permId1, permId2)) } + + verify { permissionRepository.findByIdIn(listOf(permId1, permId2)) } + verify { permissionRepository.findByOrganizationId(orgId1) } + verify { permissionRepository.findByOrganizationId(orgId2) } + verify(exactly = 0) { permissionRepository.deleteByIdIn(any()) } + confirmVerified(permissionRepository) + } + } + + @Nested + inner class CreatePermission { + @Test + fun `createPermission should save permission when no redundant permissions exist`() { + val existingOrgPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_EDITOR + } + val existingPermissionDifferentOrg = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_ADMIN // different org than new permission + } + val newPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + workspaceId = UUID.randomUUID() + permissionType = PermissionType.WORKSPACE_ADMIN // higher than existing org permission, not redundant + } + + every { permissionRepository.findByUserId(testUserId) } returns + listOf( + existingOrgPermission.toEntity(), + existingPermissionDifferentOrg.toEntity(), + ) + every { workspaceService.getOrganizationIdFromWorkspaceId(newPermission.workspaceId) } returns + Optional.of( + existingOrgPermission.organizationId, + ) + every { permissionRepository.save(newPermission.toEntity()) } returns newPermission.toEntity() + + val result = permissionService.createPermission(newPermission) + + assertEquals(result, newPermission) + + verify { permissionRepository.findByUserId(testUserId) } + verify(exactly = 1) { permissionRepository.save(newPermission.toEntity()) } + confirmVerified(permissionRepository) + } + + @Test + fun `createPermission should throw when redundant permission is detected`() { + val existingOrgPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_ADMIN + } + val newPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + workspaceId = UUID.randomUUID() + permissionType = PermissionType.WORKSPACE_ADMIN // equal to existing org permission, redundant + } + + // new permission is for a workspace that belongs to the existing permission's org + every { workspaceService.getOrganizationIdFromWorkspaceId(newPermission.workspaceId) } returns + Optional.of( + existingOrgPermission.organizationId, + ) + every { permissionRepository.findByUserId(testUserId) } returns listOf(existingOrgPermission.toEntity()) + + assertThrows { permissionService.createPermission(newPermission) } + + // nothing saved or deleted + verify(exactly = 0) { permissionRepository.save(any()) } + verify(exactly = 0) { permissionRepository.deleteById(any()) } + } + + @Test + fun `createPermission should work for instance admin permissions`() { + val newPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + permissionType = PermissionType.INSTANCE_ADMIN + } + + every { permissionRepository.findByUserId(testUserId) } returns emptyList() + every { permissionRepository.save(newPermission.toEntity()) } returns newPermission.toEntity() + + val result = permissionService.createPermission(newPermission) + + assertEquals(result, newPermission) + + verify { permissionRepository.findByUserId(testUserId) } + verify(exactly = 1) { permissionRepository.save(newPermission.toEntity()) } + confirmVerified(permissionRepository) + } + } + + @Nested + inner class UpdatePermission { + @Nested + inner class UpdateWorkspacePermission { + @Test + fun `updatePermission should update workspace permission when not redundant`() { + val existingOrgPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_READER // lower than updated permission, so nothing redundant + } + val existingPermissionDifferentOrg = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_ADMIN // different org than new permission, so nothing redundant + } + val workspacePermissionPreUpdate = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + workspaceId = UUID.randomUUID() + permissionType = PermissionType.WORKSPACE_ADMIN + } + val updatedWorkspacePermission = + Permission().apply { + permissionId = workspacePermissionPreUpdate.permissionId + userId = workspacePermissionPreUpdate.userId + workspaceId = workspacePermissionPreUpdate.workspaceId + permissionType = PermissionType.WORKSPACE_EDITOR // update from admin to editor + } + + every { permissionRepository.findByUserId(testUserId) } returns + listOf( + existingOrgPermission.toEntity(), + existingPermissionDifferentOrg.toEntity(), + workspacePermissionPreUpdate.toEntity(), + ) + every { workspaceService.getOrganizationIdFromWorkspaceId(workspacePermissionPreUpdate.workspaceId) } returns + Optional.of( + existingOrgPermission.organizationId, + ) + every { permissionRepository.update(updatedWorkspacePermission.toEntity()) } returns updatedWorkspacePermission.toEntity() + + permissionService.updatePermission(updatedWorkspacePermission) + + verify { permissionRepository.findByUserId(testUserId) } + verify(exactly = 1) { permissionRepository.update(updatedWorkspacePermission.toEntity()) } + confirmVerified(permissionRepository) + } + + @Test + fun `updatePermission should delete updated workspace permission when made redundant`() { + val existingOrgPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_EDITOR // higher than updated permission, so update becomes redundant + } + val workspacePermissionPreUpdate = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + workspaceId = UUID.randomUUID() + permissionType = PermissionType.WORKSPACE_ADMIN + } + val workspacePermissionUpdated = + Permission().apply { + permissionId = workspacePermissionPreUpdate.permissionId + userId = workspacePermissionPreUpdate.userId + workspaceId = workspacePermissionPreUpdate.workspaceId + permissionType = PermissionType.WORKSPACE_READER // update from admin to reader, permission is now redundant + } + + every { permissionRepository.findByUserId(testUserId) } returns + listOf( + existingOrgPermission.toEntity(), + workspacePermissionPreUpdate.toEntity(), + ) + every { workspaceService.getOrganizationIdFromWorkspaceId(workspacePermissionPreUpdate.workspaceId) } returns + Optional.of( + existingOrgPermission.organizationId, + ) + every { permissionRepository.update(workspacePermissionUpdated.toEntity()) } returns workspacePermissionUpdated.toEntity() + every { permissionRepository.deleteById(workspacePermissionUpdated.permissionId) } just Runs + + permissionService.updatePermission(workspacePermissionUpdated) + + verify { permissionRepository.findByUserId(testUserId) } + verify(exactly = 0) { permissionRepository.update(any()) } // no update because deleted instead + verify(exactly = 1) { permissionRepository.deleteById(workspacePermissionUpdated.permissionId) } + confirmVerified(permissionRepository) + } + } + + @Nested + inner class UpdateOrgPermission { + @Test + fun `updatePermission should delete any workspace permissions that are made redundant by updating an org permission`() { + val existingWorkspacePermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + workspaceId = UUID.randomUUID() + permissionType = PermissionType.WORKSPACE_ADMIN // will be made redundant by updated org permission + } + val orgPermissionPreUpdate = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_READER + } + val updatedOrgPermission = + Permission().apply { + permissionId = orgPermissionPreUpdate.permissionId + userId = orgPermissionPreUpdate.userId + organizationId = orgPermissionPreUpdate.organizationId + permissionType = PermissionType.ORGANIZATION_ADMIN // update from org reader to admin + } + + every { permissionRepository.findById(orgPermissionPreUpdate.permissionId) } returns Optional.of(orgPermissionPreUpdate.toEntity()) + every { permissionRepository.findByUserId(testUserId) } returns + listOf( + existingWorkspacePermission.toEntity(), + orgPermissionPreUpdate.toEntity(), + ) + every { workspaceService.getOrganizationIdFromWorkspaceId(existingWorkspacePermission.workspaceId) } returns + Optional.of( + orgPermissionPreUpdate.organizationId, + ) + every { permissionRepository.update(updatedOrgPermission.toEntity()) } returns updatedOrgPermission.toEntity() + every { permissionRepository.deleteByIdIn(listOf(existingWorkspacePermission.permissionId)) } just Runs + + permissionService.updatePermission(updatedOrgPermission) + + verify { permissionRepository.findById(orgPermissionPreUpdate.permissionId) } + verify { permissionRepository.findByUserId(testUserId) } + verify(exactly = 1) { permissionRepository.update(updatedOrgPermission.toEntity()) } + verify(exactly = 1) { permissionRepository.deleteByIdIn(listOf(existingWorkspacePermission.permissionId)) } + confirmVerified(permissionRepository) + } + + @Test + fun `updatePermission should throw if demoting the last org admin`() { + val orgId = UUID.randomUUID() + + val existingOtherOrgPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_EDITOR // other org permission is not admin + } + val orgPermissionPreUpdate = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN + } + val orgPermissionUpdated = + Permission().apply { + permissionId = orgPermissionPreUpdate.permissionId + userId = orgPermissionPreUpdate.userId + organizationId = orgPermissionPreUpdate.organizationId + permissionType = + PermissionType.ORGANIZATION_EDITOR // org permission update is from admin to editor, throws because it's the last admin + } + + every { permissionRepository.findById(orgPermissionPreUpdate.permissionId) } returns Optional.of(orgPermissionPreUpdate.toEntity()) + every { permissionRepository.findByOrganizationId(orgId) } returns + listOf( + existingOtherOrgPermission.toEntity(), + orgPermissionPreUpdate.toEntity(), + ) + + assertThrows { permissionService.updatePermission(orgPermissionUpdated) } + + verify { permissionRepository.findById(orgPermissionPreUpdate.permissionId) } + verify { permissionRepository.findByOrganizationId(orgId) } + verify(exactly = 0) { permissionRepository.update(any()) } + confirmVerified(permissionRepository) + } + + @Test + fun `updatePermission should allow org admin demotion if another org admin exists`() { + val orgId = UUID.randomUUID() + + val existingOtherOrgPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN // other org permission is admin + } + val orgPermissionPreUpdate = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN + } + val orgPermissionUpdated = + Permission().apply { + permissionId = orgPermissionPreUpdate.permissionId + userId = orgPermissionPreUpdate.userId + organizationId = orgPermissionPreUpdate.organizationId + permissionType = PermissionType.ORGANIZATION_EDITOR // org permission update is from admin to editor + } + + every { permissionRepository.findByUserId(testUserId) } returns + listOf( + existingOtherOrgPermission.toEntity(), + orgPermissionPreUpdate.toEntity(), + ) + every { permissionRepository.findById(orgPermissionPreUpdate.permissionId) } returns + Optional.of( + orgPermissionPreUpdate.toEntity(), + ) + every { permissionRepository.findByOrganizationId(orgId) } returns + listOf( + existingOtherOrgPermission.toEntity(), + orgPermissionPreUpdate.toEntity(), + ) + every { permissionRepository.update(orgPermissionUpdated.toEntity()) } returns orgPermissionUpdated.toEntity() + + permissionService.updatePermission(orgPermissionUpdated) + + verify { permissionRepository.findByUserId(testUserId) } + verify { permissionRepository.findById(orgPermissionPreUpdate.permissionId) } + verify { permissionRepository.findByOrganizationId(orgId) } + verify(exactly = 1) { permissionRepository.update(orgPermissionUpdated.toEntity()) } + confirmVerified(permissionRepository) + } + } + } +} diff --git a/airbyte-db/db-lib/build.gradle.kts b/airbyte-db/db-lib/build.gradle.kts index 537eb5708f1..d69e9fb976f 100644 --- a/airbyte-db/db-lib/build.gradle.kts +++ b/airbyte-db/db-lib/build.gradle.kts @@ -1,115 +1,115 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } // Add a configuration(for our migrations(tasks defined below to encapsulate their dependencies) val migrations by configurations.creating { - extendsFrom(configurations.getByName("implementation")) + extendsFrom(configurations.getByName("implementation")) } configurations.all { - exclude(group = "io.micronaut.flyway") - resolutionStrategy { - force (libs.platform.testcontainers.postgresql) - } + exclude(group = "io.micronaut.flyway") + resolutionStrategy { + force(libs.platform.testcontainers.postgresql) + } } airbyte { - docker { - imageName = "db" - } + docker { + imageName = "db" + } } dependencies { - api(libs.hikaricp) - api(libs.jooq.meta) - api(libs.jooq) - api(libs.postgresql) - - implementation(project(":airbyte-commons")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-config:config-models")) - implementation(libs.bundles.flyway) - implementation(libs.guava) - implementation(platform(libs.fasterxml)) - implementation(libs.bundles.jackson) - implementation(libs.commons.io) - - migrations(libs.platform.testcontainers.postgresql) - migrations(sourceSets["main"].output) - - // Mark as compile Only to avoid leaking transitively to connectors) - compileOnly(libs.platform.testcontainers.postgresql) - - // These are required because gradle might be using lower version of Jna from other) - // library transitive dependency. Can be removed if we can figure out which library is the cause.) - // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079) - implementation(libs.jna) - implementation(libs.jna.platform) - - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.apache.commons.lang) - testImplementation(libs.platform.testcontainers.postgresql) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - - testImplementation(libs.junit.pioneer) - testImplementation(libs.json.assert) + api(libs.hikaricp) + api(libs.jooq.meta) + api(libs.jooq) + api(libs.postgresql) + + implementation(project(":airbyte-commons")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-config:config-models")) + implementation(libs.bundles.flyway) + implementation(libs.guava) + implementation(platform(libs.fasterxml)) + implementation(libs.bundles.jackson) + implementation(libs.commons.io) + + migrations(libs.platform.testcontainers.postgresql) + migrations(sourceSets["main"].output) + + // Mark as compile Only to avoid leaking transitively to connectors) + compileOnly(libs.platform.testcontainers.postgresql) + + // These are required because gradle might be using lower version of Jna from other) + // library transitive dependency. Can be removed if we can figure out which library is the cause.) + // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079) + implementation(libs.jna) + implementation(libs.jna.platform) + + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.apache.commons.lang) + testImplementation(libs.platform.testcontainers.postgresql) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + + testImplementation(libs.junit.pioneer) + testImplementation(libs.json.assert) } tasks.register("newConfigsMigration") { - mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" - classpath = files(migrations.files) - args = listOf("configs", "create") - dependsOn(tasks.named("classes")) + mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" + classpath = files(migrations.files) + args = listOf("configs", "create") + dependsOn(tasks.named("classes")) } tasks.register("runConfigsMigration") { - mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" - classpath = files(migrations.files) - args = listOf("configs", "migrate") - dependsOn(tasks.named("classes")) + mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" + classpath = files(migrations.files) + args = listOf("configs", "migrate") + dependsOn(tasks.named("classes")) } tasks.register("dumpConfigsSchema") { - mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" - classpath = files(migrations.files) - args = listOf("configs", "dump_schema") - dependsOn(tasks.named("classes")) + mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" + classpath = files(migrations.files) + args = listOf("configs", "dump_schema") + dependsOn(tasks.named("classes")) } tasks.register("newJobsMigration") { - mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" - classpath = files(migrations.files) - args = listOf("jobs", "create") - dependsOn(tasks.named("classes")) + mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" + classpath = files(migrations.files) + args = listOf("jobs", "create") + dependsOn(tasks.named("classes")) } tasks.register("runJobsMigration") { - mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" - classpath = files(migrations.files) - args = listOf( "jobs", "migrate") - dependsOn(tasks.named("classes")) + mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" + classpath = files(migrations.files) + args = listOf("jobs", "migrate") + dependsOn(tasks.named("classes")) } tasks.register("dumpJobsSchema") { - mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" - classpath = files(migrations.files) - args = listOf("jobs", "dump_schema") - dependsOn(tasks.named("classes")) + mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" + classpath = files(migrations.files) + args = listOf("jobs", "dump_schema") + dependsOn(tasks.named("classes")) } val copyInitSql = tasks.register("copyInitSql") { - from("src/main/resources") { - include("init.sql") - } - into("build/airbyte/docker/bin") + from("src/main/resources") { + include("init.sql") + } + into("build/airbyte/docker/bin") } tasks.named("dockerBuildImage") { - dependsOn(copyInitSql) + dependsOn(copyInitSql) } diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_002__AddGenerationTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_002__AddGenerationTable.java new file mode 100644 index 00000000000..87eefadbe1a --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_002__AddGenerationTable.java @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static org.jooq.impl.DSL.currentOffsetDateTime; +import static org.jooq.impl.DSL.foreignKey; +import static org.jooq.impl.DSL.primaryKey; + +import java.time.OffsetDateTime; +import java.util.UUID; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_55_1_002__AddGenerationTable extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_55_1_002__AddGenerationTable.class); + + static final String STREAM_GENERATION_TABLE_NAME = "stream_generation"; + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + createGenerationTable(ctx); + } + + static void createGenerationTable(final DSLContext ctx) { + final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); + final Field connectionId = DSL.field("connection_id", SQLDataType.UUID.nullable(false)); + final Field streamName = DSL.field("stream_name", SQLDataType.VARCHAR.nullable(false)); + final Field streamNamespace = DSL.field("stream_namespace", SQLDataType.VARCHAR.nullable(true)); + final Field generationId = DSL.field("generation_id", SQLDataType.BIGINT.nullable(false)); + final Field startJobId = DSL.field("start_job_id", SQLDataType.BIGINT.nullable(false)); + final Field createdAt = + DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); + final Field updatedAt = + DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); + + ctx.createTable(STREAM_GENERATION_TABLE_NAME) + .columns(id, + connectionId, + streamName, + streamNamespace, + generationId, + startJobId, + createdAt, + updatedAt) + .constraints( + primaryKey(id), + foreignKey(connectionId).references("connection", "id").onDeleteCascade()) + .execute(); + + final String indexCreationQuery = String.format("CREATE INDEX ON %s USING btree (%s, %s, %s DESC)", + STREAM_GENERATION_TABLE_NAME, connectionId.getName(), streamName.getName(), generationId.getName()); + final String indexCreationQuery2 = String.format("CREATE INDEX ON %s USING btree (%s, %s, %s, %s DESC)", + STREAM_GENERATION_TABLE_NAME, connectionId.getName(), streamName.getName(), streamNamespace.getName(), generationId.getName()); + ctx.execute(indexCreationQuery); + ctx.execute(indexCreationQuery2); + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_003__EditRefreshTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_003__EditRefreshTable.java new file mode 100644 index 00000000000..fbb5f641800 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_003__EditRefreshTable.java @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static org.jooq.impl.DSL.currentOffsetDateTime; +import static org.jooq.impl.DSL.foreignKey; +import static org.jooq.impl.DSL.primaryKey; + +import java.time.OffsetDateTime; +import java.util.UUID; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_55_1_003__EditRefreshTable extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_55_1_003__EditRefreshTable.class); + + static final String STREAM_REFRESHES_TABLE = "stream_refreshes"; + + private static final Field connectionId = DSL.field("connection_id", SQLDataType.UUID.nullable(false)); + private static final Field streamName = DSL.field("stream_name", SQLDataType.VARCHAR.nullable(false)); + private static final Field streamNamespace = DSL.field("stream_namespace", SQLDataType.VARCHAR.nullable(true)); + + private static final Field createdAtField = + DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + editRefreshTable(ctx); + } + + static void editRefreshTable(final DSLContext ctx) { + ctx.truncate(STREAM_REFRESHES_TABLE).execute(); + ctx.dropTable(STREAM_REFRESHES_TABLE).execute(); + + final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); + ctx.createTable(STREAM_REFRESHES_TABLE) + .columns(id, + connectionId, + streamName, + streamNamespace, + createdAtField) + .constraints( + primaryKey(id), + foreignKey(connectionId).references("connection", "id").onDeleteCascade()) + .execute(); + + final String indexCreationQuery = String.format("CREATE INDEX ON %s USING btree (%s)", + STREAM_REFRESHES_TABLE, connectionId.getName()); + final String indexCreationQuery2 = String.format("CREATE INDEX ON %s USING btree (%s, %s)", + STREAM_REFRESHES_TABLE, connectionId.getName(), streamName.getName()); + final String indexCreationQuery3 = String.format("CREATE INDEX ON %s USING btree (%s, %s, %s)", + STREAM_REFRESHES_TABLE, connectionId.getName(), streamName.getName(), streamNamespace.getName()); + ctx.execute(indexCreationQuery); + ctx.execute(indexCreationQuery2); + ctx.execute(indexCreationQuery3); + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_57_2_001__AddRefreshJobType.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_57_2_001__AddRefreshJobType.java new file mode 100644 index 00000000000..707f9bc0c65 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_57_2_001__AddRefreshJobType.java @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.jobs.migrations; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_57_2_001__AddRefreshJobType extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_57_2_001__AddRefreshJobType.class); + + @Override + public void migrate(Context context) throws Exception { + final DSLContext ctx = DSL.using(context.getConnection()); + ctx.alterType("job_config_type").addValue("refresh").execute(); + } + +} diff --git a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt index e34e930588b..2fad95ffcc4 100644 --- a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt +++ b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt @@ -321,12 +321,24 @@ create table "public"."state" ( constraint "state_pkey" primary key ("id", "connection_id"), constraint "state__connection_id__stream_name__namespace__uq" unique ("connection_id", "stream_name", "namespace") ); +create table "public"."stream_generation" ( + "id" uuid not null, + "connection_id" uuid not null, + "stream_name" varchar(2147483647) not null, + "stream_namespace" varchar(2147483647), + "generation_id" bigint not null, + "start_job_id" bigint not null, + "created_at" timestamp(6) with time zone not null default current_timestamp, + "updated_at" timestamp(6) with time zone not null default current_timestamp, + constraint "stream_generation_pkey" primary key ("id") +); create table "public"."stream_refreshes" ( + "id" uuid not null, "connection_id" uuid not null, "stream_name" varchar(2147483647) not null, - "stream_namespace" varchar(2147483647) not null, + "stream_namespace" varchar(2147483647), "created_at" timestamp(6) with time zone not null default current_timestamp, - constraint "stream_refreshes_pkey" primary key ("connection_id", "stream_name", "stream_namespace") + constraint "stream_refreshes_pkey" primary key ("id") ); create table "public"."stream_reset" ( "id" uuid not null, @@ -449,7 +461,11 @@ create index "permission_workspace_id_idx" on "public"."permission"("workspace_i create index "connection_idx" on "public"."schema_management"("connection_id" asc); create index "sso_config_keycloak_realm_idx" on "public"."sso_config"("keycloak_realm" asc); create index "sso_config_organization_id_idx" on "public"."sso_config"("organization_id" asc); +create index "stream_generation_connection_id_stream_name_generation_id_idx" on "public"."stream_generation"("connection_id" asc, "stream_name" asc, "generation_id" desc); +create index "stream_generation_connection_id_stream_name_stream_namespac_idx" on "public"."stream_generation"("connection_id" asc, "stream_name" asc, "stream_namespace" asc, "generation_id" desc); create index "stream_refreshes_connection_id_idx" on "public"."stream_refreshes"("connection_id" asc); +create index "stream_refreshes_connection_id_stream_name_idx" on "public"."stream_refreshes"("connection_id" asc, "stream_name" asc); +create index "stream_refreshes_connection_id_stream_name_stream_namespace_idx" on "public"."stream_refreshes"("connection_id" asc, "stream_name" asc, "stream_namespace" asc); create index "connection_id_stream_name_namespace_idx" on "public"."stream_reset"("connection_id" asc, "stream_name" asc, "stream_namespace" asc); create index "user_auth_provider_auth_user_id_idx" on "public"."user"("auth_provider" asc, "auth_user_id" asc); create index "user_email_idx" on "public"."user"("email" asc); @@ -494,6 +510,7 @@ alter table "public"."permission" add constraint "permission_workspace_id_fkey" alter table "public"."schema_management" add constraint "schema_management_connection_id_fkey" foreign key ("connection_id") references "public"."connection" ("id"); alter table "public"."sso_config" add constraint "sso_config_organization_id_fkey" foreign key ("organization_id") references "public"."organization" ("id"); alter table "public"."state" add constraint "state_connection_id_fkey" foreign key ("connection_id") references "public"."connection" ("id"); +alter table "public"."stream_generation" add constraint "stream_generation_connection_id_fkey" foreign key ("connection_id") references "public"."connection" ("id"); alter table "public"."stream_refreshes" add constraint "stream_refreshes_connection_id_fkey" foreign key ("connection_id") references "public"."connection" ("id"); alter table "public"."user" add constraint "user_default_workspace_id_fkey" foreign key ("default_workspace_id") references "public"."workspace" ("id"); alter table "public"."user_invitation" add constraint "user_invitation_accepted_by_user_id_fkey" foreign key ("accepted_by_user_id") references "public"."user" ("id"); diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java index 3bce25bf50f..a4e0079099f 100644 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java @@ -11,10 +11,7 @@ import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.COLUMN_UPDATED_AT; import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.TABLE_AIRBYTE_CONFIGS; import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.getStandardSyncState; -import static org.jooq.impl.DSL.field; -import static org.jooq.impl.DSL.table; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -22,9 +19,6 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.ConfigSchema; import io.airbyte.config.Configs; -import io.airbyte.config.JobOutput; -import io.airbyte.config.JobOutput.OutputType; -import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StandardSyncState; import io.airbyte.config.State; import io.airbyte.db.Database; @@ -33,20 +27,13 @@ import io.airbyte.db.instance.jobs.JobsDatabaseTestProvider; import jakarta.annotation.Nullable; import java.io.IOException; -import java.sql.Connection; import java.sql.SQLException; import java.time.OffsetDateTime; import java.util.Collections; import java.util.Set; import java.util.UUID; import java.util.concurrent.TimeUnit; -import org.flywaydb.core.api.configuration.Configuration; -import org.flywaydb.core.api.migration.Context; import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.Table; -import org.jooq.impl.SQLDataType; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Order; @@ -58,27 +45,11 @@ @TestMethodOrder(MethodOrderer.OrderAnnotation.class) class V0_30_22_001__Store_last_sync_state_test extends AbstractConfigsDatabaseTest { - private static final OffsetDateTime TIMESTAMP = OffsetDateTime.now(); - - private static final Table JOBS_TABLE = table("jobs"); - private static final Field JOB_ID_FIELD = field("id", SQLDataType.BIGINT); - private static final Field JOB_SCOPE_FIELD = field("scope", SQLDataType.VARCHAR); - private static final Field JOB_CREATED_AT_FIELD = field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE); - - private static final Table ATTEMPTS_TABLE = table("attempts"); - private static final Field ATTEMPT_ID_FIELD = field("id", SQLDataType.BIGINT); - private static final Field ATTEMPT_JOB_ID_FIELD = field("job_id", SQLDataType.BIGINT); - private static final Field ATTEMPT_NUMBER_FIELD = field("attempt_number", SQLDataType.INTEGER); - private static final Field ATTEMPT_OUTPUT_FIELD = field("output", SQLDataType.JSONB); - private static final Field ATTEMPT_CREATED_AT_FIELD = field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE); - - private static final UUID CONNECTION_1_ID = UUID.randomUUID(); private static final UUID CONNECTION_2_ID = UUID.randomUUID(); private static final UUID CONNECTION_3_ID = UUID.randomUUID(); private static final State CONNECTION_2_STATE = Jsons.deserialize("{ \"state\": { \"cursor\": 2222 } }", State.class); private static final State CONNECTION_3_STATE = Jsons.deserialize("{ \"state\": { \"cursor\": 3333 } }", State.class); - private static final State CONNECTION_OLD_STATE = Jsons.deserialize("{ \"state\": { \"cursor\": -1 } }", State.class); private static final StandardSyncState STD_CONNECTION_STATE_2 = getStandardSyncState(CONNECTION_2_ID, CONNECTION_2_STATE); private static final StandardSyncState STD_CONNECTION_STATE_3 = getStandardSyncState(CONNECTION_3_ID, CONNECTION_3_STATE); @@ -108,43 +79,6 @@ void testGetJobsDatabase() { .getJobsDatabase(configs.getDatabaseUser(), configs.getDatabasePassword(), configs.getDatabaseUrl()).isPresent()); } - @Test - @Order(20) - void testGetStandardSyncStates() throws Exception { - jobDatabase.query(ctx -> { - // Connection 1 has 1 job, no attempt. - // This is to test that connection without no state is not returned. - createJob(ctx, CONNECTION_1_ID, 30); - - // Connection 2 has two jobs, each has one attempt. - // This is to test that only the state from the latest job is returned. - final long job21 = createJob(ctx, CONNECTION_2_ID, 10); - final long job22 = createJob(ctx, CONNECTION_2_ID, 20); - assertNotEquals(job21, job22); - createAttempt(ctx, job21, 1, createAttemptOutput(CONNECTION_OLD_STATE), 11); - createAttempt(ctx, job22, 1, createAttemptOutput(CONNECTION_2_STATE), 21); - - // Connection 3 has two jobs. - // The first job has multiple attempts. Its third attempt has the latest state. - // The second job has two attempts with no state. - // This is to test that only the state from the latest attempt is returned. - final long job31 = createJob(ctx, CONNECTION_3_ID, 5); - final long job32 = createJob(ctx, CONNECTION_3_ID, 15); - assertNotEquals(job31, job32); - createAttempt(ctx, job31, 1, createAttemptOutput(CONNECTION_OLD_STATE), 6); - createAttempt(ctx, job31, 2, null, 7); - createAttempt(ctx, job31, 3, createAttemptOutput(CONNECTION_3_STATE), 8); - createAttempt(ctx, job31, 4, null, 9); - createAttempt(ctx, job31, 5, null, 10); - createAttempt(ctx, job32, 1, null, 20); - createAttempt(ctx, job32, 2, null, 25); - - assertEquals(STD_CONNECTION_STATES, V0_30_22_001__Store_last_sync_state.getStandardSyncStates(jobDatabase)); - - return null; - }); - } - @Test @Order(30) void testCopyData() throws SQLException { @@ -175,100 +109,6 @@ void testCopyData() throws SQLException { }); } - /** - * Clear the table and test the migration end-to-end. - */ - @Test - @Order(40) - void testMigration() throws Exception { - jobDatabase.query(ctx -> ctx.deleteFrom(TABLE_AIRBYTE_CONFIGS) - .where(COLUMN_CONFIG_TYPE.eq(ConfigSchema.STANDARD_SYNC_STATE.name())) - .execute()); - - final var migration = new V0_30_22_001__Store_last_sync_state(); - // this context is a flyway class - final Context context = new Context() { - - @Override - public Configuration getConfiguration() { - final Configuration configuration = mock(Configuration.class); - when(configuration.getUser()).thenReturn(container.getUsername()); - when(configuration.getPassword()).thenReturn(container.getPassword()); - when(configuration.getUrl()).thenReturn(container.getJdbcUrl()); - return configuration; - } - - @Override - public Connection getConnection() { - try { - return dataSource.getConnection(); - } catch (final SQLException e) { - throw new RuntimeException(e); - } - } - - }; - migration.migrate(context); - jobDatabase.query(ctx -> { - checkSyncStates(ctx, STD_CONNECTION_STATES, null); - return null; - }); - } - - /** - * Create a job record whose scope equals to the passed in connection id, and return the job id. - * - * @param creationOffset Set the creation timestamp to {@code TIMESTAMP} + this passed in offset. - */ - private static long createJob(final DSLContext ctx, final UUID connectionId, final long creationOffset) { - final int insertCount = ctx.insertInto(JOBS_TABLE) - .set(JOB_SCOPE_FIELD, connectionId.toString()) - .set(JOB_CREATED_AT_FIELD, TIMESTAMP.plusDays(creationOffset)) - .execute(); - assertEquals(1, insertCount); - - return ctx.select(JOB_ID_FIELD) - .from(JOBS_TABLE) - .where(JOB_SCOPE_FIELD.eq(connectionId.toString())) - .orderBy(JOB_CREATED_AT_FIELD.desc()) - .limit(1) - .fetchOne() - .get(JOB_ID_FIELD); - } - - /* - * @param creationOffset Set the creation timestamp to {@code TIMESTAMP} + this passed in offset. - */ - private static void createAttempt(final DSLContext ctx, - final long jobId, - final int attemptNumber, - final JobOutput attemptOutput, - final long creationOffset) { - final int insertCount = ctx.insertInto(ATTEMPTS_TABLE) - .set(ATTEMPT_JOB_ID_FIELD, jobId) - .set(ATTEMPT_NUMBER_FIELD, attemptNumber) - .set(ATTEMPT_OUTPUT_FIELD, JSONB.valueOf(Jsons.serialize(attemptOutput))) - .set(ATTEMPT_CREATED_AT_FIELD, TIMESTAMP.plusDays(creationOffset)) - .execute(); - assertEquals(1, insertCount); - - ctx.select(ATTEMPT_ID_FIELD) - .from(ATTEMPTS_TABLE) - .where(ATTEMPT_JOB_ID_FIELD.eq(jobId), ATTEMPT_NUMBER_FIELD.eq(attemptNumber)) - .fetchOne() - .get(ATTEMPT_ID_FIELD); - } - - /** - * Create an JobOutput object whose output type is StandardSyncOutput. - * - * @param state The state object within a StandardSyncOutput. - */ - private static JobOutput createAttemptOutput(final State state) { - final StandardSyncOutput standardSyncOutput = new StandardSyncOutput().withState(state); - return new JobOutput().withOutputType(OutputType.SYNC).withSync(standardSyncOutput); - } - private static void checkSyncStates(final DSLContext ctx, final Set standardSyncStates, @Nullable final OffsetDateTime expectedTimestamp) { diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_55_1_002__AddGenerationTableTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_55_1_002__AddGenerationTableTest.java new file mode 100644 index 00000000000..fb1f4608fa2 --- /dev/null +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_55_1_002__AddGenerationTableTest.java @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static io.airbyte.db.instance.configs.migrations.V0_55_1_002__AddGenerationTable.STREAM_GENERATION_TABLE_NAME; +import static org.jooq.impl.DSL.field; +import static org.jooq.impl.DSL.table; +import static org.junit.jupiter.api.Assertions.*; + +import io.airbyte.db.factory.FlywayFactory; +import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; +import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; +import io.airbyte.db.instance.development.DevDatabaseMigrator; +import java.util.Set; +import java.util.stream.Collectors; +import org.flywaydb.core.Flyway; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.jooq.DSLContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class V0_55_1_002__AddGenerationTableTest extends AbstractConfigsDatabaseTest { + + @BeforeEach + void beforeEach() { + final Flyway flyway = + FlywayFactory.create(dataSource, "V0_55_1_001__AddRefreshesTable", ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + final ConfigsDatabaseMigrator configsDbMigrator = new ConfigsDatabaseMigrator(database, flyway); + + final BaseJavaMigration previousMigration = new V0_55_1_001__AddRefreshesTable(); + final DevDatabaseMigrator devConfigsDbMigrator = new DevDatabaseMigrator(configsDbMigrator, previousMigration.getVersion()); + devConfigsDbMigrator.createBaseline(); + } + + @Test + public void test() { + final DSLContext dslContext = getDslContext(); + final boolean tableExists = generationTableExists(dslContext); + + assertFalse(tableExists); + + V0_55_1_002__AddGenerationTable.createGenerationTable(dslContext); + + final boolean tableExistsPostMigration = generationTableExists(dslContext); + + assertTrue(tableExistsPostMigration); + + final Set index = dslContext.select() + .from(table("pg_indexes")) + .where(field("tablename").eq(STREAM_GENERATION_TABLE_NAME)) + .fetch() + .stream() + .map(c -> c.getValue("indexdef", String.class)) + .collect(Collectors.toSet()); + assertEquals(3, index.size()); + assertTrue(index.contains("CREATE UNIQUE INDEX stream_generation_pkey ON public.stream_generation USING btree (id)")); + assertTrue(index.contains( + "CREATE INDEX stream_generation_connection_id_stream_name_generation_id_idx " + + "ON public.stream_generation USING btree (connection_id, stream_name, generation_id DESC)")); + assertTrue(index.contains( + "CREATE INDEX stream_generation_connection_id_stream_name_stream_namespac_idx ON public.stream_generation " + + "USING btree (connection_id, stream_name, stream_namespace, generation_id DESC)")); + } + + private static boolean generationTableExists(final DSLContext dslContext) { + final int size = dslContext.select() + .from(table("pg_tables")) + .where(field("tablename").eq(STREAM_GENERATION_TABLE_NAME)) + .fetch() + .size(); + return size > 0; + } + +} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_55_1_003__EditRefreshTableTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_55_1_003__EditRefreshTableTest.java new file mode 100644 index 00000000000..e195a13501a --- /dev/null +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_55_1_003__EditRefreshTableTest.java @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static io.airbyte.db.instance.configs.migrations.V0_55_1_003__EditRefreshTable.STREAM_REFRESHES_TABLE; +import static org.jooq.impl.DSL.field; +import static org.jooq.impl.DSL.table; +import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.airbyte.db.factory.FlywayFactory; +import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; +import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; +import io.airbyte.db.instance.development.DevDatabaseMigrator; +import java.util.Set; +import java.util.stream.Collectors; +import org.flywaydb.core.Flyway; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.jooq.DSLContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class V0_55_1_003__EditRefreshTableTest extends AbstractConfigsDatabaseTest { + + @BeforeEach + void beforeEach() { + final Flyway flyway = + FlywayFactory.create(dataSource, "V0_55_1_002__AddGenerationTable", ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + final ConfigsDatabaseMigrator configsDbMigrator = new ConfigsDatabaseMigrator(database, flyway); + + final BaseJavaMigration previousMigration = new V0_55_1_002__AddGenerationTable(); + final DevDatabaseMigrator devConfigsDbMigrator = new DevDatabaseMigrator(configsDbMigrator, previousMigration.getVersion()); + devConfigsDbMigrator.createBaseline(); + } + + @Test + public void test() { + final DSLContext dslContext = getDslContext(); + V0_55_1_003__EditRefreshTable.editRefreshTable(dslContext); + final Set index = dslContext.select() + .from(table("pg_indexes")) + .where(field("tablename").eq(STREAM_REFRESHES_TABLE)) + .fetch() + .stream() + .map(c -> c.getValue("indexdef", String.class)) + .collect(Collectors.toSet()); + assertEquals(4, index.size()); + assertTrue(index.contains( + "CREATE UNIQUE INDEX stream_refreshes_pkey ON public.stream_refreshes USING btree (id)")); + assertTrue(index.contains( + "CREATE INDEX stream_refreshes_connection_id_idx ON public.stream_refreshes USING btree (connection_id)")); + assertTrue(index.contains( + "CREATE INDEX stream_refreshes_connection_id_stream_name_idx ON public.stream_refreshes " + + "USING btree (connection_id, stream_name)")); + assertTrue(index.contains( + "CREATE INDEX stream_refreshes_connection_id_stream_name_stream_namespace_idx ON public.stream_refreshes" + + " USING btree (connection_id, stream_name, stream_namespace)")); + } + +} diff --git a/airbyte-db/jooq/build.gradle.kts b/airbyte-db/jooq/build.gradle.kts index 8de680c5d64..00684268d53 100644 --- a/airbyte-db/jooq/build.gradle.kts +++ b/airbyte-db/jooq/build.gradle.kts @@ -1,114 +1,114 @@ import nu.studer.gradle.jooq.JooqGenerate plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - alias(libs.plugins.nu.studer.jooq) + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + alias(libs.plugins.nu.studer.jooq) } configurations.all { - resolutionStrategy { - force(libs.platform.testcontainers.postgresql) - } + resolutionStrategy { + force(libs.platform.testcontainers.postgresql) + } } dependencies { - implementation(libs.jooq.meta) - implementation(libs.jooq) - implementation(libs.postgresql) - implementation(libs.bundles.flyway) - implementation(project(":airbyte-db:db-lib")) + implementation(libs.jooq.meta) + implementation(libs.jooq) + implementation(libs.postgresql) + implementation(libs.bundles.flyway) + implementation(project(":airbyte-db:db-lib")) - // jOOQ code generation) - implementation(libs.jooq.codegen) - implementation(libs.platform.testcontainers.postgresql) + // jOOQ code generation) + implementation(libs.jooq.codegen) + implementation(libs.platform.testcontainers.postgresql) - // These are required because gradle might be using lower version of Jna from other - // library transitive dependency. Can be removed if we can figure out which library is the cause. - // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079 - implementation(libs.jna) - implementation(libs.jna.platform) + // These are required because gradle might be using lower version of Jna from other + // library transitive dependency. Can be removed if we can figure out which library is the cause. + // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079 + implementation(libs.jna) + implementation(libs.jna.platform) - // The jOOQ code generator(only has access to classes added to the jooqGenerator configuration - jooqGenerator(project(":airbyte-db:db-lib")) { - isTransitive = false - } - jooqGenerator(project(":airbyte-commons")) { - isTransitive = false - } - jooqGenerator(project(":airbyte-config:config-models")) { - isTransitive = false - } - jooqGenerator(libs.bundles.flyway) - jooqGenerator(libs.guava) - jooqGenerator(libs.hikaricp) - jooqGenerator(libs.jackson.datatype) - jooqGenerator(libs.postgresql) - jooqGenerator(libs.slf4j.simple) - jooqGenerator(libs.platform.testcontainers.postgresql) + // The jOOQ code generator(only has access to classes added to the jooqGenerator configuration + jooqGenerator(project(":airbyte-db:db-lib")) { + isTransitive = false + } + jooqGenerator(project(":airbyte-commons")) { + isTransitive = false + } + jooqGenerator(project(":airbyte-config:config-models")) { + isTransitive = false + } + jooqGenerator(libs.bundles.flyway) + jooqGenerator(libs.guava) + jooqGenerator(libs.hikaricp) + jooqGenerator(libs.jackson.datatype) + jooqGenerator(libs.postgresql) + jooqGenerator(libs.slf4j.simple) + jooqGenerator(libs.platform.testcontainers.postgresql) } jooq { - version = libs.versions.jooq - edition = nu.studer.gradle.jooq.JooqEdition.OSS + version = libs.versions.jooq + edition = nu.studer.gradle.jooq.JooqEdition.OSS - configurations { - create("configsDatabase") { - generateSchemaSourceOnCompilation = true - jooqConfiguration.apply { - generator.apply { - name = "org.jooq.codegen.DefaultGenerator" - database.apply { - name = "io.airbyte.db.instance.configs.ConfigsFlywayMigrationDatabase" - inputSchema = "public" - excludes = "airbyte_configs_migrations" - } - target.apply { - packageName = "io.airbyte.db.instance.configs.jooq.generated" - directory = "build/generated/configsDatabase/src/main/java" - } - } - } + configurations { + create("configsDatabase") { + generateSchemaSourceOnCompilation = true + jooqConfiguration.apply { + generator.apply { + name = "org.jooq.codegen.DefaultGenerator" + database.apply { + name = "io.airbyte.db.instance.configs.ConfigsFlywayMigrationDatabase" + inputSchema = "public" + excludes = "airbyte_configs_migrations" + } + target.apply { + packageName = "io.airbyte.db.instance.configs.jooq.generated" + directory = "build/generated/configsDatabase/src/main/java" + } } + } + } - create("jobsDatabase") { - generateSchemaSourceOnCompilation = true - jooqConfiguration.apply { - generator.apply { - name = "org.jooq.codegen.DefaultGenerator" - database.apply { - name = "io.airbyte.db.instance.jobs.JobsFlywayMigrationDatabase" - inputSchema = "public" - excludes = "airbyte_jobs_migrations" - } - target.apply { - packageName = "io.airbyte.db.instance.jobs.jooq.generated" - directory = "build/generated/jobsDatabase/src/main/java" - } - } - } + create("jobsDatabase") { + generateSchemaSourceOnCompilation = true + jooqConfiguration.apply { + generator.apply { + name = "org.jooq.codegen.DefaultGenerator" + database.apply { + name = "io.airbyte.db.instance.jobs.JobsFlywayMigrationDatabase" + inputSchema = "public" + excludes = "airbyte_jobs_migrations" + } + target.apply { + packageName = "io.airbyte.db.instance.jobs.jooq.generated" + directory = "build/generated/jobsDatabase/src/main/java" + } } + } } + } } sourceSets["main"].java { - srcDirs( - tasks.named("generateConfigsDatabaseJooq").flatMap { it.outputDir }, - tasks.named("generateJobsDatabaseJooq").flatMap { it.outputDir }, - ) + srcDirs( + tasks.named("generateConfigsDatabaseJooq").flatMap { it.outputDir }, + tasks.named("generateJobsDatabaseJooq").flatMap { it.outputDir }, + ) } sourceSets["main"].java { - srcDirs("$buildDir/generated/configsDatabase/src/main/java", "$buildDir/generated/jobsDatabase/src/main/java") + srcDirs("$buildDir/generated/configsDatabase/src/main/java", "$buildDir/generated/jobsDatabase/src/main/java") } tasks.named("generateConfigsDatabaseJooq") { - allInputsDeclared = true - outputs.cacheIf { true } + allInputsDeclared = true + outputs.cacheIf { true } } tasks.named("generateJobsDatabaseJooq") { - allInputsDeclared = true - outputs.cacheIf { true } + allInputsDeclared = true + outputs.cacheIf { true } } diff --git a/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt b/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt index 53d09c91364..57065609302 100644 --- a/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt +++ b/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt @@ -146,8 +146,6 @@ object FieldSelectionWorkspaces : EnvVar(envVar = "FIELD_SELECTION_WORKSPACES") object RunSocatInConnectorContainer : Temporary(key = "platform.run-socat-in-connector-container", default = false) -object FailSyncIfTooBig : Temporary(key = "platform.fail-sync-if-too-big", default = false) - object DefaultOrgForNewWorkspace : Temporary(key = "platform.set-default-org-for-new-workspace", default = false) object WorkloadHeartbeatRate : Permanent(key = "workload.heartbeat.rate", default = 5) @@ -172,6 +170,8 @@ object UseWorkloadApi : Temporary(key = "platform.use-workload-api", de object EmitStateStatsToSegment : Temporary(key = "platform.emit-state-stats-segment", default = true) +object LogsForStripeChecksumDebugging : Temporary(key = "platform.logs-for-stripe-checksum-debug", default = false) + object AddInitialCreditsForWorkspace : Temporary(key = "add-credits-at-workspace-creation-for-org", default = 0) object WorkloadApiRouting : Permanent(key = "workload-api-routing", default = "workload_default") @@ -194,6 +194,12 @@ object UseWorkloadApiForDiscover : Temporary(key = "platform.use-worklo object UseWorkloadApiForSpec : Temporary(key = "platform.use-workload-api-for-spec", default = false) -object EnforceMutexKeyOnCreate : Temporary(key = "platform.enforce-mutex-key-on-create", default = false) - object ActivateRefreshes : Temporary(key = "platform.activate-refreshes", default = false) + +object WriteOutputCatalogToObjectStorage : Temporary(key = "platform.write-output-catalog-to-object-storage", default = false) + +object NullOutputCatalogOnSyncOutput : Temporary(key = "platform.null-output-catalog-on-sync-output", default = false) + +object UseCustomK8sInitCheck : Temporary(key = "platform.use-custom-k8s-init-check", default = true) + +object UseClear : Temporary(key = "connection.clearNotReset", default = false) diff --git a/airbyte-json-validation/build.gradle.kts b/airbyte-json-validation/build.gradle.kts index fed6824bd5a..9dc6ad460ec 100644 --- a/airbyte-json-validation/build.gradle.kts +++ b/airbyte-json-validation/build.gradle.kts @@ -1,18 +1,18 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - implementation(project(":airbyte-commons")) - implementation(libs.guava) - implementation("com.networknt:json-schema-validator:1.0.72") - // needed so that we can follow $ref when parsing json. jackson does not support this natively. - implementation("me.andrz.jackson:jackson-json-reference-core:0.3.2") + implementation(project(":airbyte-commons")) + implementation(libs.guava) + implementation("com.networknt:json-schema-validator:1.0.72") + // needed so that we can follow $ref when parsing json. jackson does not support this natively. + implementation("me.andrz.jackson:jackson-json-reference-core:0.3.2") - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.junit.pioneer) } diff --git a/airbyte-keycloak-setup/Dockerfile b/airbyte-keycloak-setup/Dockerfile index ab40d89e5de..b02ae3769b1 100644 --- a/airbyte-keycloak-setup/Dockerfile +++ b/airbyte-keycloak-setup/Dockerfile @@ -1,11 +1,12 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.1.0 -FROM ${JDK_IMAGE} AS keycloak-setup +ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.2.1 +FROM scratch as builder WORKDIR /app - -USER root ADD airbyte-app.tar /app -RUN chown -R airbyte:airbyte /app + +FROM ${JDK_IMAGE} AS keycloak-setup +WORKDIR /app +COPY --chown=airbyte:airbyte --from=builder /app /app USER airbyte:airbyte ENTRYPOINT ["/bin/bash", "-c", "airbyte-app/bin/airbyte-keycloak-setup"] diff --git a/airbyte-keycloak-setup/build.gradle.kts b/airbyte-keycloak-setup/build.gradle.kts index 8a904297ee8..e364ab896ee 100644 --- a/airbyte-keycloak-setup/build.gradle.kts +++ b/airbyte-keycloak-setup/build.gradle.kts @@ -1,48 +1,48 @@ import java.util.Properties plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.keycloak.client) - - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-micronaut")) - implementation(project(":airbyte-commons-micronaut-security")) - implementation(project(":airbyte-data")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-db:jooq")) - - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.bundles.junit) - testImplementation(libs.junit.jupiter.system.stubs) - - testImplementation(project(":airbyte-test-utils")) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.keycloak.client) + + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-micronaut")) + implementation(project(":airbyte-commons-micronaut-security")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-db:jooq")) + + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.bundles.junit) + testImplementation(libs.junit.jupiter.system.stubs) + + testImplementation(project(":airbyte-test-utils")) } val env = Properties().apply { - load(rootProject.file(".env.dev").inputStream()) + load(rootProject.file(".env.dev").inputStream()) } airbyte { - application { - mainClass = "io.airbyte.keycloak.setup.Application" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - } - docker { - imageName = "keycloak-setup" - } + application { + mainClass = "io.airbyte.keycloak.setup.Application" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + } + docker { + imageName = "keycloak-setup" + } } diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ConfigurationMapService.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ConfigurationMapService.java index 8798ee137f5..7dc1c916bbb 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ConfigurationMapService.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ConfigurationMapService.java @@ -19,6 +19,8 @@ @Singleton public class ConfigurationMapService { + public static final String HTTPS_PREFIX = "https://"; + public static final String WELL_KNOWN_OPENID_CONFIGURATION_SUFFIX = ".well-known/openid-configuration"; private final String webappUrl; private final AirbyteKeycloakConfiguration keycloakConfiguration; @@ -59,8 +61,15 @@ private String getProviderRedirectUrl(final IdentityProviderConfiguration provid } private String getProviderDiscoveryUrl(final IdentityProviderConfiguration provider) { - final String domainWithTrailingSlash = provider.getDomain().endsWith("/") ? provider.getDomain() : provider.getDomain() + "/"; - return "https://" + domainWithTrailingSlash + ".well-known/openid-configuration"; + String domain = provider.getDomain(); + if (!domain.startsWith(HTTPS_PREFIX)) { + domain = HTTPS_PREFIX + domain; + } + if (!domain.endsWith(WELL_KNOWN_OPENID_CONFIGURATION_SUFFIX)) { + domain = domain.endsWith("/") ? domain : domain + "/"; + domain = domain + WELL_KNOWN_OPENID_CONFIGURATION_SUFFIX; + } + return domain; } } diff --git a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/ConfigurationMapServiceTest.java b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/ConfigurationMapServiceTest.java index f34348e0fe7..8088217e52b 100644 --- a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/ConfigurationMapServiceTest.java +++ b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/ConfigurationMapServiceTest.java @@ -14,6 +14,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.keycloak.admin.client.resource.IdentityProvidersResource; import org.keycloak.admin.client.resource.RealmResource; import org.mockito.InjectMocks; @@ -40,9 +42,17 @@ public void setUp() { configurationMapService = new ConfigurationMapService(WEBAPP_URL, keycloakConfiguration); } - @Test - void testImportProviderFrom() { - when(identityProviderConfiguration.getDomain()).thenReturn("trial-577.okta.com"); + @ParameterizedTest + @ValueSource(strings = { + "trial-577.okta.com", + "https://trial-577.okta.com", + "trial-577.okta.com/.well-known/openid-configuration", + "https://trial-577.okta.com/.well-known/openid-configuration", + "trial-577.okta.com/", + "https://trial-577.okta.com/", + }) + void testImportProviderFrom(String url) { + when(identityProviderConfiguration.getDomain()).thenReturn(url); when(realmResource.identityProviders()).thenReturn(identityProvidersResource); Map importFromMap = new HashMap<>(); diff --git a/airbyte-keycloak/Dockerfile b/airbyte-keycloak/Dockerfile index 958d3043b78..7f9158f8a6b 100644 --- a/airbyte-keycloak/Dockerfile +++ b/airbyte-keycloak/Dockerfile @@ -11,4 +11,21 @@ WORKDIR /opt/keycloak COPY bin/scripts/entrypoint.sh entrypoint.sh COPY bin/themes themes +# Doing this instead of creating a separate file and copying it to ensure that we get any keycloak updates to this conf file. +RUN cp conf/cache-ispn.xml conf/cache-ispn-override.xml && \ +sed -i conf/cache-ispn-override.xml -e 's///g' && \ +sed -i conf/cache-ispn-override.xml -e 's///g' && \ +# Make sure that the two lines we wanted to be there are actually there +# i.e. keycloak didn't change its config file +grep '' conf/cache-ispn-override.xml -q && \ +grep '' conf/cache-ispn-override.xml -q && \ +# Create the directory for the infinispan global-state persistence +mkdir -p /opt/keycloak/data/infinispan && \ +# Inserting the block after the start tag +sed -i '/<\/global-state>' conf/cache-ispn-override.xml && \ +# Make sure that the block is actually there +# i.e. keycloak didn't change its config file +grep '' conf/cache-ispn-override.xml -q + + ENTRYPOINT ["./entrypoint.sh"] diff --git a/airbyte-keycloak/build.gradle.kts b/airbyte-keycloak/build.gradle.kts index 1c44ffcc4f2..001ef13adff 100644 --- a/airbyte-keycloak/build.gradle.kts +++ b/airbyte-keycloak/build.gradle.kts @@ -1,24 +1,24 @@ plugins { - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } airbyte { - docker { - imageName = "keycloak" - } + docker { + imageName = "keycloak" + } } val copyTheme = tasks.register("copyTheme") { - from("themes") - into("build/airbyte/docker/bin/themes") + from("themes") + into("build/airbyte/docker/bin/themes") } val copyScripts = tasks.register("copyScripts") { - from("scripts") - into("build/airbyte/docker/bin/scripts") + from("scripts") + into("build/airbyte/docker/bin/scripts") } tasks.named("dockerBuildImage") { - dependsOn(copyScripts, copyTheme) + dependsOn(copyScripts, copyTheme) } diff --git a/airbyte-keycloak/scripts/entrypoint.sh b/airbyte-keycloak/scripts/entrypoint.sh index de58afa63d7..651b72f05b9 100755 --- a/airbyte-keycloak/scripts/entrypoint.sh +++ b/airbyte-keycloak/scripts/entrypoint.sh @@ -10,7 +10,8 @@ export KC_HOSTNAME_URL=$KEYCLOAK_HOSTNAME_URL export KC_HTTP_PORT=$KEYCLOAK_PORT export KC_HOSTNAME_ADMIN_URL=$KEYCLOAK_HOSTNAME_ADMIN_URL -bin/kc.sh build --cache=ispn --cache-stack=kubernetes --health-enabled=true --http-relative-path /auth +# cache-config is relative to conf directory +bin/kc.sh build --cache=ispn --cache-stack=kubernetes --health-enabled=true --http-relative-path /auth --cache-config-file=cache-ispn-override.xml bin/kc.sh start --optimized --proxy edge --hostname-strict false diff --git a/airbyte-metrics/readme.md b/airbyte-metrics/README.md similarity index 100% rename from airbyte-metrics/readme.md rename to airbyte-metrics/README.md diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceUtils.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceUtils.java index 25d1b898feb..49f1aab078b 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceUtils.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceUtils.java @@ -19,8 +19,10 @@ import java.io.StringWriter; import java.nio.file.Path; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.UUID; +import java.util.stream.Collectors; /** * Collection of utility methods to help with performance tracing. @@ -37,6 +39,20 @@ public class ApmTraceUtils { */ public static final String TAG_PREFIX = "metadata"; + /** + * Converts the provided metric attributes to tags and adds them to the currently active span, if + * one exists.
    + * All tags added via this method will use the default {@link #TAG_PREFIX} namespace. + * + * @param attrs A list of attributes to be converted to tags and added to the currently active span. + */ + public static void addTagsToTrace(final List attrs) { + final Map tags = attrs.stream() + .collect(Collectors.toMap(MetricAttribute::key, MetricAttribute::value)); + + addTagsToTrace(tags, TAG_PREFIX); + } + /** * Adds all the provided tags to the currently active span, if one exists.
    * All tags added via this method will use the default {@link #TAG_PREFIX} namespace. diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java index f4a0bfa90d8..31f88599ac7 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java @@ -46,7 +46,7 @@ public class MetricTags { public static final String NOTIFICATION_CLIENT = "notification_client"; public static final String RECORD_COUNT_TYPE = "record_count_type"; public static final String RELEASE_STAGE = "release_stage"; - public static final String RESET_WORKFLOW_FAILURE_CAUSE = "failure_cause"; + public static final String FAILURE_CAUSE = "failure_cause"; public static final String SOURCE_ID = "source_id"; public static final String SOURCE_IMAGE = "source_image"; public static final String STATUS = "status"; @@ -56,6 +56,14 @@ public class MetricTags { public static final String USER_TYPE = "user_type"; // real user, service account, data plane user, etc public static final String WILL_RETRY = "will_retry"; + // payload metric tags + public static final String URI_NULL = "uri_null"; + public static final String URI_ID = "uri_id"; + public static final String URI_VERSION = "uri_version"; + public static final String PAYLOAD_NAME = "payload_name"; + public static final String IS_MATCH = "is_match"; + public static final String IS_MISS = "is_miss"; + public static String getReleaseStage(final ReleaseStage stage) { return stage != null ? stage.value() : UNKNOWN; } diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java index 35f4bdf5d33..de9f9f0c58b 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java @@ -150,10 +150,6 @@ public enum OssMetricsRegistry implements MetricsRegistry { MetricEmittingApps.WORKER, "json_string_length", "string length of a raw json string"), - RECORD_SIZE_ERROR( - MetricEmittingApps.WORKER, - "record_size_error", - "length of a raw record json string exceeding the limit"), KUBE_POD_PROCESS_CREATE_TIME_MILLISECS( MetricEmittingApps.WORKER, "kube_pod_process_create_time_millisecs", @@ -442,7 +438,19 @@ public enum OssMetricsRegistry implements MetricsRegistry { PAYLOAD_SIZE_EXCEEDED(MetricEmittingApps.WORKER, "payload_size_exceeded", - "Detected payload size was over 4mb Temporal limit"); + "Detected payload size was over 4mb Temporal limit"), + + PAYLOAD_FAILURE_WRITE(MetricEmittingApps.WORKER, + "payload_failure_write", + "Failure writing the activity payload to storage."), + + PAYLOAD_FAILURE_READ(MetricEmittingApps.WORKER, + "payload_failure_read", + "Failure reading the activity payload from storage."), + + PAYLOAD_VALIDATION_RESULT(MetricEmittingApps.WORKER, + "payload_validation_result", + "The result of the comparing the payload in object storage to the one passed from temporal."); private final MetricEmittingApp application; private final String metricName; diff --git a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/ApmTraceUtilsTest.java b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/ApmTraceUtilsTest.java index 77f453f68aa..372d7335526 100644 --- a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/ApmTraceUtilsTest.java +++ b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/ApmTraceUtilsTest.java @@ -29,6 +29,7 @@ import java.io.PrintWriter; import java.io.StringWriter; import java.nio.file.Path; +import java.util.List; import java.util.Map; import java.util.UUID; import org.junit.After; @@ -65,6 +66,21 @@ void testAddingTags() { verify(span, times(1)).setTag(String.format(TAG_FORMAT, TAG_PREFIX, TAG_2), VALUE_2); } + @Test + void convertsAndAddsAttributes() { + final Span span = mock(Span.class); + final Tracer tracer = mock(Tracer.class); + when(tracer.activeSpan()).thenReturn(span); + + GlobalTracerTestUtil.setGlobalTracerUnconditionally(tracer); + + final var attrs = List.of(new MetricAttribute(TAG_1, VALUE_1), new MetricAttribute(TAG_2, VALUE_2)); + ApmTraceUtils.addTagsToTrace(attrs); + + verify(span, times(1)).setTag(String.format(TAG_FORMAT, TAG_PREFIX, TAG_1), VALUE_1); + verify(span, times(1)).setTag(String.format(TAG_FORMAT, TAG_PREFIX, TAG_2), VALUE_2); + } + @Test void testAddingTagsWithPrefix() { final Span span = mock(Span.class); diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 2526d773b98..bc9696a01a5 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,11 +1,12 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.1.0 -FROM ${JDK_IMAGE} +ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.2.1 +FROM scratch as builder WORKDIR /app - -USER root ADD airbyte-app.tar /app -RUN chown -R airbyte:airbyte /app + +FROM ${JDK_IMAGE} +WORKDIR /app +COPY --chown=airbyte:airbyte --from=builder /app /app USER airbyte:airbyte ENTRYPOINT ["/bin/bash", "-c", "airbyte-app/bin/airbyte-metrics-reporter"] diff --git a/airbyte-metrics/reporter/build.gradle.kts b/airbyte-metrics/reporter/build.gradle.kts index ae093138cba..3709f9b5d8c 100644 --- a/airbyte-metrics/reporter/build.gradle.kts +++ b/airbyte-metrics/reporter/build.gradle.kts @@ -1,55 +1,55 @@ plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } configurations { - create("jdbc") + create("jdbc") } configurations.all { - resolutionStrategy { - force (libs.jooq) - } + resolutionStrategy { + force(libs.jooq) + } } dependencies { - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-db:jooq")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(libs.jooq) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-db:jooq")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(libs.jooq) - runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.snakeyaml) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.junit.pioneer) } airbyte { - application { - name = "airbyte-metrics-reporter" - mainClass = "io.airbyte.metrics.reporter.Application" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - } - docker { - imageName = "metrics-reporter" - } + application { + name = "airbyte-metrics-reporter" + mainClass = "io.airbyte.metrics.reporter.Application" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + } + docker { + imageName = "metrics-reporter" + } } diff --git a/airbyte-micronaut-temporal/build.gradle.kts b/airbyte-micronaut-temporal/build.gradle.kts index bc8bb26c7dd..bc33187eb8d 100644 --- a/airbyte-micronaut-temporal/build.gradle.kts +++ b/airbyte-micronaut-temporal/build.gradle.kts @@ -1,25 +1,25 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(libs.bundles.micronaut) - implementation(libs.byte.buddy) - implementation(libs.guava) - implementation(libs.spring.core) - implementation(libs.temporal.sdk) { - exclude( module = "guava") - } + implementation(libs.bundles.micronaut) + implementation(libs.byte.buddy) + implementation(libs.guava) + implementation(libs.spring.core) + implementation(libs.temporal.sdk) { + exclude(module = "guava") + } - implementation(project(":airbyte-commons-temporal-core")) + implementation(project(":airbyte-commons-temporal-core")) - testImplementation(libs.assertj.core) - testImplementation(libs.bundles.junit) - testImplementation(libs.junit.pioneer) - testImplementation(libs.mockito.inline) - testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.assertj.core) + testImplementation(libs.bundles.junit) + testImplementation(libs.junit.pioneer) + testImplementation(libs.mockito.inline) + testRuntimeOnly(libs.junit.jupiter.engine) } diff --git a/airbyte-notification/readme.md b/airbyte-notification/README.md similarity index 100% rename from airbyte-notification/readme.md rename to airbyte-notification/README.md diff --git a/airbyte-notification/build.gradle.kts b/airbyte-notification/build.gradle.kts index a37faed343f..92e8713b160 100644 --- a/airbyte-notification/build.gradle.kts +++ b/airbyte-notification/build.gradle.kts @@ -1,39 +1,39 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("org.jetbrains.kotlin.jvm") - id("org.jetbrains.kotlin.kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + id("org.jetbrains.kotlin.jvm") + id("org.jetbrains.kotlin.kapt") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - kapt(libs.bundles.micronaut.annotation.processor) + kapt(libs.bundles.micronaut.annotation.processor) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(libs.okhttp) - implementation("org.apache.httpcomponents:httpclient:4.5.13") - implementation("org.commonmark:commonmark:0.21.0") + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(libs.okhttp) + implementation("org.apache.httpcomponents:httpclient:4.5.13") + implementation("org.commonmark:commonmark:0.21.0") - implementation(libs.guava) - implementation(libs.bundles.apache) - implementation(libs.commons.io) - implementation(platform(libs.fasterxml)) - implementation(libs.bundles.jackson) - // TODO remove this, it"s used for String.isEmpty check) - implementation(libs.bundles.log4j) + implementation(libs.guava) + implementation(libs.bundles.apache) + implementation(libs.commons.io) + implementation(platform(libs.fasterxml)) + implementation(libs.bundles.jackson) + // TODO remove this, it"s used for String.isEmpty check) + implementation(libs.bundles.log4j) - testImplementation(libs.mockk) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) + testImplementation(libs.mockk) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - testImplementation(libs.mockito.inline) - testImplementation(libs.mockwebserver) + testImplementation(libs.junit.pioneer) + testImplementation(libs.mockito.inline) + testImplementation(libs.mockwebserver) } diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java b/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java index c1c082bdc15..70b6a8ac38a 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java @@ -28,6 +28,7 @@ import java.net.http.HttpResponse; import java.util.Comparator; import java.util.List; +import java.util.Optional; import org.apache.logging.log4j.util.Strings; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; @@ -66,7 +67,7 @@ public boolean notifyJobFailure(final SyncSummary summary, summary.getErrorMessage(), summary.getConnection().getUrl(), String.valueOf(summary.getJobId())); - return notifyJson(buildJobCompletedNotification(summary, legacyMessage).toJsonNode()); + return notifyJson(buildJobCompletedNotification(summary, "Sync failure occurred", legacyMessage, Optional.empty()).toJsonNode()); } @Override @@ -81,19 +82,26 @@ public boolean notifyJobSuccess(final SyncSummary summary, summary.getErrorMessage(), summary.getConnection().getUrl(), String.valueOf(summary.getJobId())); - return notifyJson(buildJobCompletedNotification(summary, legacyMessage).toJsonNode()); + return notifyJson(buildJobCompletedNotification(summary, "Sync completed", legacyMessage, Optional.empty()).toJsonNode()); } @NotNull - static Notification buildJobCompletedNotification(final SyncSummary summary, final String text) { + static Notification buildJobCompletedNotification(final SyncSummary summary, + final String titleText, + final String legacyText, + final Optional topContent) { Notification notification = new Notification(); - notification.setText(text); + notification.setText(legacyText); Section title = notification.addSection(); String connectionLink = Notification.createLink(summary.getConnection().getName(), summary.getConnection().getUrl()); - String titleText = summary.isSuccess() ? "Sync completed" : "Sync failure occurred"; title.setText(String.format("%s: %s", titleText, connectionLink)); - Section description = notification.addSection(); + if (topContent.isPresent()) { + final Section topSection = notification.addSection(); + topSection.setText(topContent.get()); + } + + Section description = notification.addSection(); final Field sourceLabel = description.addField(); sourceLabel.setType("mrkdwn"); sourceLabel.setText("*Source:*"); @@ -130,11 +138,11 @@ static Notification buildJobCompletedNotification(final SyncSummary summary, fin Section summarySection = notification.addSection(); summarySection.setText(String.format(""" *Sync Summary:* - %d record(s) loaded / %d record(s) extracted - %s loaded / %s extracted + %d record(s) extracted / %d record(s) loaded + %s extracted / %s loaded """, - summary.getRecordsCommitted(), summary.getRecordsEmitted(), - summary.getBytesCommittedFormatted(), summary.getBytesEmittedFormatted())); + summary.getRecordsEmitted(), summary.getRecordsCommitted(), + summary.getBytesEmittedFormatted(), summary.getBytesCommittedFormatted())); return notification; } @@ -143,38 +151,35 @@ static Notification buildJobCompletedNotification(final SyncSummary summary, fin public boolean notifyConnectionDisabled(final SyncSummary summary, final String receiverEmail) throws IOException, InterruptedException { - final String message = renderTemplate( + String legacyMessage = renderTemplate( "slack/auto_disable_slack_notification_template.txt", summary.getSource().getName(), summary.getDestination().getName(), summary.getErrorMessage(), summary.getWorkspace().getId().toString(), summary.getConnection().getId().toString()); - - final String webhookUrl = config.getWebhook(); - if (!Strings.isEmpty(webhookUrl)) { - return notify(message); - } - return false; + String message = """ + Your connection has been repeatedly failing and has been automatically disabled. + """; + return notifyJson(buildJobCompletedNotification(summary, "Connection disabled", legacyMessage, Optional.of(message)).toJsonNode()); } @Override public boolean notifyConnectionDisableWarning(final SyncSummary summary, final String receiverEmail) throws IOException, InterruptedException { - final String message = renderTemplate( + String legacyMessage = renderTemplate( "slack/auto_disable_warning_slack_notification_template.txt", summary.getSource().getName(), summary.getDestination().getName(), summary.getErrorMessage(), summary.getWorkspace().getId().toString(), summary.getConnection().getId().toString()); - - final String webhookUrl = config.getWebhook(); - if (!Strings.isEmpty(webhookUrl)) { - return notify(message); - } - return false; + String message = """ + Your connection has been repeatedly failing. Please address any issues to ensure your syncs continue to run. + """; + return notifyJson( + buildJobCompletedNotification(summary, "Warning - repeated connection failures", legacyMessage, Optional.of(message)).toJsonNode()); } @Override diff --git a/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java b/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java index 83834349bc3..5e1116d4f02 100644 --- a/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java +++ b/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java @@ -173,7 +173,7 @@ void testNotifyConnectionDisabled() throws IOException, InterruptedException { .workspace(WorkspaceInfo.builder().id(WORKSPACE_ID).build()) .destination(DestinationInfo.builder().name(DESTINATION_TEST).build()) .source(SourceInfo.builder().name(SOURCE_TEST).build()) - .connection(ConnectionInfo.builder().id(CONNECTION_ID).build()) + .connection(ConnectionInfo.builder().id(CONNECTION_ID).name(CONNECTION_NAME).url("http://connection").build()) .errorMessage("job description.") .build(); assertTrue(client.notifyConnectionDisabled(summary, "")); @@ -200,7 +200,7 @@ void testNotifyConnectionDisabledWarning() throws IOException, InterruptedExcept .workspace(WorkspaceInfo.builder().id(WORKSPACE_ID).build()) .destination(DestinationInfo.builder().name(DESTINATION_TEST).build()) .source(SourceInfo.builder().name(SOURCE_TEST).build()) - .connection(ConnectionInfo.builder().id(CONNECTION_ID).build()) + .connection(ConnectionInfo.builder().id(CONNECTION_ID).name(CONNECTION_NAME).url("http://connection").build()) .errorMessage("job description.") .build(); assertTrue(client.notifyConnectionDisableWarning(summary, "")); @@ -359,7 +359,7 @@ public void handle(final HttpExchange t) throws IOException { response = "No notification message or message missing `text` node"; t.sendResponseHeaders(500, response.length()); } else { - response = String.format("Wrong notification messge: %s", message.get("text").asText()); + response = String.format("Wrong notification message: %s", message.get("text").asText()); t.sendResponseHeaders(500, response.length()); } final OutputStream os = t.getResponseBody(); diff --git a/airbyte-oauth/readme.md b/airbyte-oauth/README.md similarity index 100% rename from airbyte-oauth/readme.md rename to airbyte-oauth/README.md diff --git a/airbyte-oauth/build.gradle.kts b/airbyte-oauth/build.gradle.kts index 7ad77afa77f..34a0b33d075 100644 --- a/airbyte-oauth/build.gradle.kts +++ b/airbyte-oauth/build.gradle.kts @@ -1,26 +1,26 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - implementation(platform("com.fasterxml.jackson:jackson-bom:2.13.0")) - implementation(libs.bundles.jackson) - implementation(libs.guava) - implementation(libs.google.cloud.storage) - implementation(libs.bundles.apache) - implementation(libs.appender.log4j2) - implementation(libs.aws.java.sdk.s3) - implementation(libs.aws.java.sdk.sts) + implementation(platform("com.fasterxml.jackson:jackson-bom:2.13.0")) + implementation(libs.bundles.jackson) + implementation(libs.guava) + implementation(libs.google.cloud.storage) + implementation(libs.bundles.apache) + implementation(libs.appender.log4j2) + implementation(libs.aws.java.sdk.s3) + implementation(libs.aws.java.sdk.sts) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-json-validation")) - implementation(libs.airbyte.protocol) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-json-validation")) + implementation(libs.airbyte.protocol) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) } diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java index 6e7ecb93a92..ab363bd80c9 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java @@ -20,6 +20,7 @@ import io.airbyte.oauth.flows.LeverOAuthFlow; import io.airbyte.oauth.flows.LinkedinAdsOAuthFlow; import io.airbyte.oauth.flows.MailchimpOAuthFlow; +import io.airbyte.oauth.flows.MicrosoftAzureBlobStorageOAuthFlow; import io.airbyte.oauth.flows.MicrosoftBingAdsOAuthFlow; import io.airbyte.oauth.flows.MicrosoftOneDriveOAuthFlow; import io.airbyte.oauth.flows.MicrosoftSharepointOAuthFlow; @@ -76,6 +77,7 @@ public OAuthImplementationFactory(final HttpClient httpClient) { builder.put("airbyte/source-amazon-ads", new AmazonAdsOAuthFlow(httpClient)); builder.put("airbyte/source-amazon-seller-partner", new AmazonSellerPartnerOAuthFlow(httpClient)); builder.put("airbyte/source-asana", new AsanaOAuthFlow(httpClient)); + builder.put("airbyte/source-azure-blob-storage", new MicrosoftAzureBlobStorageOAuthFlow(httpClient)); builder.put("airbyte/source-bing-ads", new MicrosoftBingAdsOAuthFlow(httpClient)); builder.put("airbyte/source-drift", new DriftOAuthFlow(httpClient)); builder.put("airbyte/source-facebook-marketing", new FacebookMarketingOAuthFlow(httpClient)); diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MicrosoftAzureBlobStorageOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MicrosoftAzureBlobStorageOAuthFlow.java new file mode 100644 index 00000000000..11636bce6fc --- /dev/null +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MicrosoftAzureBlobStorageOAuthFlow.java @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableMap; +import io.airbyte.oauth.BaseOAuth2Flow; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.http.HttpClient; +import java.util.Map; +import java.util.UUID; +import java.util.function.Supplier; +import org.apache.http.client.utils.URIBuilder; + +/** + * Microsoft Azure Blob Storage OAuth. + */ +public class MicrosoftAzureBlobStorageOAuthFlow extends BaseOAuth2Flow { + + private static final String fieldName = "tenant_id"; + + public MicrosoftAzureBlobStorageOAuthFlow(final HttpClient httpClient) { + super(httpClient); + } + + @VisibleForTesting + public MicrosoftAzureBlobStorageOAuthFlow(final HttpClient httpClient, final Supplier stateSupplier) { + super(httpClient, stateSupplier); + } + + private String getScopes() { + return "offline_access%20https://storage.azure.com/.default"; + } + + @Override + protected String formatConsentUrl(final UUID definitionId, + final String clientId, + final String redirectUrl, + final JsonNode inputOAuthConfiguration) + throws IOException { + + final String tenantId; + try { + tenantId = getConfigValueUnsafe(inputOAuthConfiguration, fieldName); + } catch (final IllegalArgumentException e) { + throw new IOException("Failed to get " + fieldName + " value from input configuration", e); + } + + try { + return new URIBuilder() + .setScheme("https") + .setHost("login.microsoftonline.com") + .setPath(tenantId + "/oauth2/v2.0/authorize") + .addParameter("client_id", clientId) + .addParameter("response_type", "code") + .addParameter("redirect_uri", redirectUrl) + .addParameter("response_mode", "query") + .addParameter("state", getState()) + .build().toString() + "&scope=" + getScopes(); + } catch (final URISyntaxException e) { + throw new IOException("Failed to format Consent URL for OAuth flow", e); + } + } + + @Override + protected Map getAccessTokenQueryParameters(final String clientId, + final String clientSecret, + final String authCode, + final String redirectUrl) { + return ImmutableMap.builder() + .put("client_id", clientId) + .put("client_secret", clientSecret) + .put("code", authCode) + .put("redirect_uri", redirectUrl) + .put("grant_type", "authorization_code") + .build(); + } + + @Override + protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { + final String tenantId = getConfigValueUnsafe(inputOAuthConfiguration, fieldName); + return "https://login.microsoftonline.com/" + tenantId + "/oauth2/v2.0/token"; + } + +} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MicrosoftAzureBlobStorageOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MicrosoftAzureBlobStorageOAuthFlowTest.java new file mode 100644 index 00000000000..e56baf4e2f4 --- /dev/null +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MicrosoftAzureBlobStorageOAuthFlowTest.java @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.oauth.BaseOAuthFlow; +import java.util.Map; +import org.junit.jupiter.api.Test; + +@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") +class MicrosoftAzureBlobStorageOAuthFlowTest extends BaseOAuthFlowTest { + + @Override + protected BaseOAuthFlow getOAuthFlow() { + return new MicrosoftAzureBlobStorageOAuthFlow(getHttpClient(), this::getConstantState); + } + + @Override + protected String getExpectedConsentUrl() { + return "https://login.microsoftonline.com/test_tenant_id/oauth2/v2.0/authorize?client_id=test_client_id&response_type=code&redirect_uri=https%3A%2F%2Fairbyte.io&response_mode=query&state=state&scope=offline_access%20https://storage.azure.com/.default"; + } + + @Override + protected JsonNode getInputOAuthConfiguration() { + return Jsons.jsonNode(Map.of("tenant_id", "test_tenant_id")); + } + + @Override + protected JsonNode getUserInputFromConnectorConfigSpecification() { + return getJsonSchema(Map.of("tenant_id", Map.of("type", "string"))); + } + + @Test + @Override + void testEmptyInputCompleteDestinationOAuth() {} + + @Test + @Override + void testDeprecatedCompleteDestinationOAuth() {} + + @Test + @Override + void testDeprecatedCompleteSourceOAuth() {} + + @Test + @Override + void testEmptyInputCompleteSourceOAuth() {} + +} diff --git a/airbyte-persistence/job-persistence/build.gradle.kts b/airbyte-persistence/job-persistence/build.gradle.kts index 07a23d70f55..041ed7041c2 100644 --- a/airbyte-persistence/job-persistence/build.gradle.kts +++ b/airbyte-persistence/job-persistence/build.gradle.kts @@ -1,59 +1,67 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } configurations.all { - resolutionStrategy { - force(libs.platform.testcontainers.postgresql) - } + resolutionStrategy { + force(libs.platform.testcontainers.postgresql) + } } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - - implementation(platform(libs.fasterxml)) - implementation(libs.bundles.jackson) - implementation(libs.spotbugs.annotations) - implementation(libs.guava) - implementation(libs.commons.io) - implementation(libs.bundles.apache) - // TODO: remove this, it's pulled in for a Strings.notEmpty() check - implementation(libs.bundles.log4j) - - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-oauth")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-db:jooq")) - implementation(project(":airbyte-db:db-lib")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-notification")) - implementation(project(":airbyte-analytics")) - implementation(project(":airbyte-metrics:metrics-lib")) - - implementation(libs.sentry.java) - implementation(libs.otel.semconv) - implementation(libs.otel.sdk) - implementation(libs.otel.sdk.testing) - implementation(libs.micrometer.statsd) - implementation(platform(libs.otel.bom)) - implementation("io.opentelemetry:opentelemetry-api") - implementation("io.opentelemetry:opentelemetry-sdk") - implementation("io.opentelemetry:opentelemetry-exporter-otlp") - implementation(libs.apache.commons.collections) - implementation(libs.datadog.statsd.client) - - testImplementation(project(":airbyte-config:config-persistence")) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.platform.testcontainers.postgresql) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - - testImplementation(libs.junit.pioneer) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + + implementation(platform(libs.fasterxml)) + implementation(libs.bundles.jackson) + implementation(libs.spotbugs.annotations) + implementation(libs.guava) + implementation(libs.commons.io) + implementation(libs.bundles.apache) + // TODO: remove this, it's pulled in for a Strings.notEmpty() check + implementation(libs.bundles.log4j) + + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-oauth")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-db:jooq")) + implementation(project(":airbyte-db:db-lib")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-notification")) + implementation(project(":airbyte-analytics")) + implementation(project(":airbyte-metrics:metrics-lib")) + + implementation(libs.sentry.java) + implementation(libs.otel.semconv) + implementation(libs.otel.sdk) + implementation(libs.otel.sdk.testing) + implementation(libs.micrometer.statsd) + implementation(platform(libs.otel.bom)) + implementation("io.opentelemetry:opentelemetry-api") + implementation("io.opentelemetry:opentelemetry-sdk") + implementation("io.opentelemetry:opentelemetry-exporter-otlp") + implementation(libs.apache.commons.collections) + implementation(libs.datadog.statsd.client) + implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.bundles.micronaut.kotlin) + + testImplementation(project(":airbyte-config:config-persistence")) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.platform.testcontainers.postgresql) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + + testImplementation(libs.junit.pioneer) } diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java index 64a796aa28f..2dc69021e31 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java @@ -14,6 +14,7 @@ import io.airbyte.config.JobResetConnectionConfig; import io.airbyte.config.JobSyncConfig; import io.airbyte.config.JobTypeResourceLimit.JobType; +import io.airbyte.config.RefreshConfig; import io.airbyte.config.ResetSourceConfiguration; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.ResourceRequirementsType; @@ -23,10 +24,17 @@ import io.airbyte.config.StandardSourceDefinition.SourceType; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; +import io.airbyte.config.StateWrapper; import io.airbyte.config.SyncResourceRequirements; import io.airbyte.config.SyncResourceRequirementsKey; import io.airbyte.config.helpers.ResourceRequirementsUtils; +import io.airbyte.config.persistence.RefreshJobStateUpdater; +import io.airbyte.config.persistence.StatePersistence; +import io.airbyte.config.persistence.StreamRefreshesRepository; +import io.airbyte.config.persistence.domain.StreamRefresh; +import io.airbyte.config.persistence.helper.GenerationBumper; import io.airbyte.config.provider.ResourceRequirementsProvider; +import io.airbyte.featureflag.ActivateRefreshes; import io.airbyte.featureflag.Connection; import io.airbyte.featureflag.Context; import io.airbyte.featureflag.DestResourceOverrides; @@ -63,13 +71,25 @@ public class DefaultJobCreator implements JobCreator { private final JobPersistence jobPersistence; private final ResourceRequirementsProvider resourceRequirementsProvider; private final FeatureFlagClient featureFlagClient; + private final GenerationBumper generationBumper; + private final StatePersistence statePersistence; + private final RefreshJobStateUpdater refreshJobStateUpdater; + private final StreamRefreshesRepository streamRefreshesRepository; public DefaultJobCreator(final JobPersistence jobPersistence, final ResourceRequirementsProvider resourceRequirementsProvider, - final FeatureFlagClient featureFlagClient) { + final FeatureFlagClient featureFlagClient, + final GenerationBumper generationBumper, + final StatePersistence statePersistence, + final RefreshJobStateUpdater refreshJobStateUpdater, + final StreamRefreshesRepository streamRefreshesRepository) { this.jobPersistence = jobPersistence; this.resourceRequirementsProvider = resourceRequirementsProvider; this.featureFlagClient = featureFlagClient; + this.generationBumper = generationBumper; + this.statePersistence = statePersistence; + this.refreshJobStateUpdater = refreshJobStateUpdater; + this.streamRefreshesRepository = streamRefreshesRepository; } @Override @@ -115,6 +135,85 @@ public Optional createSyncJob(final SourceConnection source, return jobPersistence.enqueueJob(standardSync.getConnectionId().toString(), jobConfig); } + @Override + public Optional createRefreshConnection(final StandardSync standardSync, + final String sourceDockerImageName, + final Version sourceProtocolVersion, + final String destinationDockerImageName, + final Version destinationProtocolVersion, + final List standardSyncOperations, + @Nullable final JsonNode webhookOperationConfigs, + final StandardSourceDefinition sourceDefinition, + final StandardDestinationDefinition destinationDefinition, + final ActorDefinitionVersion sourceDefinitionVersion, + final ActorDefinitionVersion destinationDefinitionVersion, + final UUID workspaceId, + final List streamsToRefresh) + throws IOException { + final boolean canRunRefreshes = featureFlagClient.boolVariation(ActivateRefreshes.INSTANCE, new Multi( + List.of( + new Workspace(workspaceId), + new Connection(standardSync.getConnectionId()), + new SourceDefinition(sourceDefinition.getSourceDefinitionId()), + new DestinationDefinition(destinationDefinition.getDestinationDefinitionId())))); + + if (!canRunRefreshes) { + throw new IllegalStateException("Trying to create a refresh job for a destination which doesn't support refreshes"); + } + + final SyncResourceRequirements syncResourceRequirements = + getSyncResourceRequirements(workspaceId, standardSync, sourceDefinition, destinationDefinition, false); + + final RefreshConfig refreshConfig = new RefreshConfig() + .withNamespaceDefinition(standardSync.getNamespaceDefinition()) + .withNamespaceFormat(standardSync.getNamespaceFormat()) + .withPrefix(standardSync.getPrefix()) + .withSourceDockerImage(sourceDockerImageName) + .withSourceProtocolVersion(sourceProtocolVersion) + .withDestinationDockerImage(destinationDockerImageName) + .withDestinationProtocolVersion(destinationProtocolVersion) + .withOperationSequence(standardSyncOperations) + .withWebhookOperationConfigs(webhookOperationConfigs) + .withConfiguredAirbyteCatalog(standardSync.getCatalog()) + .withSyncResourceRequirements(syncResourceRequirements) + .withIsSourceCustomConnector(sourceDefinition.getCustom()) + .withIsDestinationCustomConnector(destinationDefinition.getCustom()) + .withWorkspaceId(workspaceId) + .withSourceDefinitionVersionId(sourceDefinitionVersion.getVersionId()) + .withDestinationDefinitionVersionId(destinationDefinitionVersion.getVersionId()) + .withStreamsToRefresh( + streamsToRefresh.stream().map(streamRefresh -> new StreamDescriptor() + .withName(streamRefresh.getStreamName()) + .withNamespace(streamRefresh.getStreamNamespace())).toList()); + + final JobConfig jobConfig = new JobConfig() + .withConfigType(ConfigType.REFRESH) + .withRefresh(refreshConfig); + + final Optional maybeJobId = jobPersistence.enqueueJob(standardSync.getConnectionId().toString(), jobConfig); + + if (maybeJobId.isPresent()) { + final long jobId = maybeJobId.get(); + generationBumper.updateGenerationForStreams(standardSync.getConnectionId(), jobId, streamsToRefresh); + final Optional currentState = statePersistence.getCurrentState(standardSync.getConnectionId()); + updateStateAndDeleteRefreshes(standardSync.getConnectionId(), streamsToRefresh, currentState); + } + + return maybeJobId; + } + + // TODO: Add Transactional annotation + private void updateStateAndDeleteRefreshes(final UUID connectionId, + final List streamsToRefresh, + final Optional currentState) + throws IOException { + if (currentState.isPresent()) { + refreshJobStateUpdater.updateStateWrapperForRefresh(connectionId, currentState.get(), streamsToRefresh); + } + streamsToRefresh.forEach( + s -> streamRefreshesRepository.deleteByConnectionIdAndStreamNameAndStreamNamespace(connectionId, s.getStreamName(), s.getStreamNamespace())); + } + @Override public Optional createResetConnectionJob(final DestinationConnection destination, final StandardSync standardSync, diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java index 793d029c8d1..f8dc2b833c7 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java @@ -434,10 +434,6 @@ private static JobOutput parseJobOutputFromString(final String jobOutputString) // TODO feature flag this for data types rollout // CatalogMigrationV1Helper.upgradeSchemaIfNeeded(jobOutput.getDiscoverCatalog().getCatalog()); CatalogMigrationV1Helper.downgradeSchemaIfNeeded(jobOutput.getDiscoverCatalog().getCatalog()); - } else if (jobOutput.getOutputType() == OutputType.SYNC && jobOutput.getSync() != null) { - // TODO feature flag this for data types rollout - // CatalogMigrationV1Helper.upgradeSchemaIfNeeded(jobOutput.getSync().getOutputCatalog()); - CatalogMigrationV1Helper.downgradeSchemaIfNeeded(jobOutput.getSync().getOutputCatalog()); } return jobOutput; } @@ -935,12 +931,10 @@ public List listJobs(final Set configTypes, .and(updatedAtStart == null ? DSL.noCondition() : JOBS.UPDATED_AT.ge(updatedAtStart)) .and(updatedAtEnd == null ? DSL.noCondition() : JOBS.UPDATED_AT.le(updatedAtEnd)) .orderBy(JOBS.CREATED_AT.desc(), JOBS.ID.desc()) - .limit(limit) - .offset(offset) .getSQL(ParamType.INLINED) + ") AS jobs"; LOGGER.debug("jobs subquery: {}", jobsSubquery); - return getJobsFromResult(ctx.fetch(jobSelectAndJoin(jobsSubquery) + buildJobOrderByString(orderByField, orderByMethod))); + return getJobsFromResult(ctx.fetch(jobSelectAndJoin(jobsSubquery) + buildJobOrderByString(orderByField, orderByMethod, limit, offset))); }); } @@ -975,11 +969,9 @@ public List listJobs(final Set configTypes, .and(updatedAtStart == null ? DSL.noCondition() : JOBS.UPDATED_AT.ge(updatedAtStart)) .and(updatedAtEnd == null ? DSL.noCondition() : JOBS.UPDATED_AT.le(updatedAtEnd)) .orderBy(JOBS.CREATED_AT.desc(), JOBS.ID.desc()) - .limit(limit) - .offset(offset) .getSQL(ParamType.INLINED) + ") AS jobs"; - return getJobsFromResult(ctx.fetch(jobSelectAndJoin(jobsSubquery) + buildJobOrderByString(orderByField, orderByMethod))); + return getJobsFromResult(ctx.fetch(jobSelectAndJoin(jobsSubquery) + buildJobOrderByString(orderByField, orderByMethod, limit, offset))); }); } @@ -1437,7 +1429,7 @@ private String removeUnsupportedUnicode(final String value) { return value != null ? value.replaceAll("\\u0000|\\\\u0000", "") : null; } - private String buildJobOrderByString(final String orderByField, final String orderByMethod) { + private String buildJobOrderByString(final String orderByField, final String orderByMethod, final int limit, final int offset) { // Set up maps and values final Map fieldMap = Map.of( OrderByField.CREATED_AT, JOBS.CREATED_AT.getName(), @@ -1457,7 +1449,7 @@ private String buildJobOrderByString(final String orderByField, final String ord sortMethod = orderByMethod.toUpperCase(); } - return String.format("ORDER BY jobs.%s %s ", field, sortMethod); + return String.format("ORDER BY jobs.%s %s LIMIT %d OFFSET %d", field, sortMethod, limit, offset); } private enum OrderByField { diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobCreator.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobCreator.java index ef9c216b881..b6aff1a7dba 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobCreator.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobCreator.java @@ -13,6 +13,7 @@ import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; +import io.airbyte.config.persistence.domain.StreamRefresh; import io.airbyte.protocol.models.StreamDescriptor; import jakarta.annotation.Nullable; import java.io.IOException; @@ -75,4 +76,19 @@ Optional createResetConnectionJob(DestinationConnection destination, UUID workspaceId) throws IOException; + Optional createRefreshConnection(final StandardSync standardSync, + final String sourceDockerImageName, + final Version sourceProtocolVersion, + final String destinationDockerImageName, + final Version destinationProtocolVersion, + final List standardSyncOperations, + @Nullable final JsonNode webhookOperationConfigs, + final StandardSourceDefinition sourceDefinition, + final StandardDestinationDefinition destinationDefinition, + final ActorDefinitionVersion sourceDefinitionVersion, + final ActorDefinitionVersion destinationDefinitionVersion, + final UUID workspaceId, + final List streamsToRefresh) + throws IOException; + } diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java index 78907e8a5c2..487cae93b0a 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java @@ -196,31 +196,6 @@ private void submitToMetricClient(final String action, final String notification metricTriggerAttribute); } - /** - * This method allows for the alert to be sent without the customerio configuration set in the - * database. - *

    - * This is only needed because there is no UI element to allow for users to create that - * configuration. - *

    - * Once that exists, this can be removed and we should be using `notifyJobByEmail`. The alert is - * sent to the email associated with the workspace. - * - * @param reason for notification - * @param action tracking action for telemetry - * @param job job notification is for - * @param attemptStats sync stats for each attempts - */ - public void notifyJobByEmail(final String reason, final String action, final Job job, List attemptStats) { - try { - final UUID workspaceId = workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(job.getId()); - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); - notifyJob(action, job, attemptStats, workspace); - } catch (final Exception e) { - LOGGER.error("Unable to read configuration:", e); - } - } - private String getJobDescription(final Job job, final String reason) { final Instant jobStartedDate = Instant.ofEpochSecond(job.getStartedAtInSecond().orElse(job.getCreatedAtInSecond())); final DateTimeFormatter formatter = DateTimeFormatter.ofLocalizedDateTime(FormatStyle.FULL) diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactory.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactory.java index 76dfe0aef70..77c984cd8f9 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactory.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactory.java @@ -19,8 +19,10 @@ import io.airbyte.config.persistence.ConfigInjector; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.domain.StreamRefresh; import io.airbyte.persistence.job.DefaultJobCreator; import io.airbyte.persistence.job.WorkspaceHelper; +import io.airbyte.persistence.job.helper.model.JobCreatorInput; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.util.List; @@ -56,66 +58,25 @@ public DefaultSyncJobFactory(final boolean connectorSpecificResourceDefaultsEnab } @Override - public Long create(final UUID connectionId) { + public Long createSync(final UUID connectionId) { try { - final StandardSync standardSync = configRepository.getStandardSync(connectionId); - final UUID workspaceId = workspaceHelper.getWorkspaceForSourceId(standardSync.getSourceId()); - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); - final SourceConnection sourceConnection = configRepository.getSourceConnection(standardSync.getSourceId()); - final DestinationConnection destinationConnection = configRepository.getDestinationConnection(standardSync.getDestinationId()); - final JsonNode sourceConfiguration = oAuthConfigSupplier.injectSourceOAuthParameters( - sourceConnection.getSourceDefinitionId(), - sourceConnection.getSourceId(), - sourceConnection.getWorkspaceId(), - sourceConnection.getConfiguration()); - sourceConnection.withConfiguration(configInjector.injectConfig(sourceConfiguration, sourceConnection.getSourceDefinitionId())); - final JsonNode destinationConfiguration = oAuthConfigSupplier.injectDestinationOAuthParameters( - destinationConnection.getDestinationDefinitionId(), - destinationConnection.getDestinationId(), - destinationConnection.getWorkspaceId(), - destinationConnection.getConfiguration()); - destinationConnection - .withConfiguration(configInjector.injectConfig(destinationConfiguration, destinationConnection.getDestinationDefinitionId())); - final StandardSourceDefinition sourceDefinition = configRepository - .getStandardSourceDefinition(sourceConnection.getSourceDefinitionId()); - final StandardDestinationDefinition destinationDefinition = configRepository - .getStandardDestinationDefinition(destinationConnection.getDestinationDefinitionId()); - - final ActorDefinitionVersion sourceVersion = - actorDefinitionVersionHelper.getSourceVersion(sourceDefinition, workspaceId, standardSync.getSourceId()); - final ActorDefinitionVersion destinationVersion = - actorDefinitionVersionHelper.getDestinationVersion(destinationDefinition, workspaceId, standardSync.getDestinationId()); - - final String sourceImageName = sourceVersion.getDockerRepository() + ":" + sourceVersion.getDockerImageTag(); - final String destinationImageName = destinationVersion.getDockerRepository() + ":" + destinationVersion.getDockerImageTag(); - - final List standardSyncOperations = Lists.newArrayList(); - for (final var operationId : standardSync.getOperationIds()) { - final StandardSyncOperation standardSyncOperation = configRepository.getStandardSyncOperation(operationId); - standardSyncOperations.add(standardSyncOperation); - } - - // for OSS users, make it possible to ignore default actor-level resource requirements - if (!connectorSpecificResourceDefaultsEnabled) { - sourceDefinition.setResourceRequirements(null); - destinationDefinition.setResourceRequirements(null); - } + final JobCreatorInput jobCreatorInput = getJobCreatorInput(connectionId); return jobCreator.createSyncJob( - sourceConnection, - destinationConnection, - standardSync, - sourceImageName, - new Version(sourceVersion.getProtocolVersion()), - destinationImageName, - new Version(destinationVersion.getProtocolVersion()), - standardSyncOperations, - workspace.getWebhookOperationConfigs(), - sourceDefinition, - destinationDefinition, - sourceVersion, - destinationVersion, - workspace.getWorkspaceId()) + jobCreatorInput.getSource(), + jobCreatorInput.getDestination(), + jobCreatorInput.getStandardSync(), + jobCreatorInput.getSourceDockerImageName(), + jobCreatorInput.getSourceProtocolVersion(), + jobCreatorInput.getDestinationDockerImageName(), + jobCreatorInput.getDestinationProtocolVersion(), + jobCreatorInput.getStandardSyncOperations(), + jobCreatorInput.getWebhookOperationConfigs(), + jobCreatorInput.getSourceDefinition(), + jobCreatorInput.getDestinationDefinition(), + jobCreatorInput.getSourceDefinitionVersion(), + jobCreatorInput.getDestinationDefinitionVersion(), + jobCreatorInput.getWorkspaceId()) .orElseThrow(() -> new IllegalStateException("We shouldn't be trying to create a new sync job if there is one running already.")); } catch (final IOException | JsonValidationException | ConfigNotFoundException e) { @@ -123,4 +84,91 @@ public Long create(final UUID connectionId) { } } + @Override + public Long createRefresh(UUID connectionId, List streamsToRefresh) { + try { + final JobCreatorInput jobCreatorInput = getJobCreatorInput(connectionId); + + return jobCreator.createRefreshConnection( + jobCreatorInput.getStandardSync(), + jobCreatorInput.getSourceDockerImageName(), + jobCreatorInput.getSourceProtocolVersion(), + jobCreatorInput.getDestinationDockerImageName(), + jobCreatorInput.getDestinationProtocolVersion(), + jobCreatorInput.getStandardSyncOperations(), + jobCreatorInput.getWebhookOperationConfigs(), + jobCreatorInput.getSourceDefinition(), + jobCreatorInput.getDestinationDefinition(), + jobCreatorInput.getSourceDefinitionVersion(), + jobCreatorInput.getDestinationDefinitionVersion(), + jobCreatorInput.getWorkspaceId(), + streamsToRefresh) + .orElseThrow(() -> new IllegalStateException("We shouldn't be trying to create a new sync job if there is one running already.")); + + } catch (final IOException | JsonValidationException | ConfigNotFoundException e) { + throw new RuntimeException(e); + } + } + + private JobCreatorInput getJobCreatorInput(UUID connectionId) throws JsonValidationException, ConfigNotFoundException, IOException { + final StandardSync standardSync = configRepository.getStandardSync(connectionId); + final UUID workspaceId = workspaceHelper.getWorkspaceForSourceId(standardSync.getSourceId()); + final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); + final SourceConnection sourceConnection = configRepository.getSourceConnection(standardSync.getSourceId()); + final DestinationConnection destinationConnection = configRepository.getDestinationConnection(standardSync.getDestinationId()); + final JsonNode sourceConfiguration = oAuthConfigSupplier.injectSourceOAuthParameters( + sourceConnection.getSourceDefinitionId(), + sourceConnection.getSourceId(), + sourceConnection.getWorkspaceId(), + sourceConnection.getConfiguration()); + sourceConnection.withConfiguration(configInjector.injectConfig(sourceConfiguration, sourceConnection.getSourceDefinitionId())); + final JsonNode destinationConfiguration = oAuthConfigSupplier.injectDestinationOAuthParameters( + destinationConnection.getDestinationDefinitionId(), + destinationConnection.getDestinationId(), + destinationConnection.getWorkspaceId(), + destinationConnection.getConfiguration()); + destinationConnection + .withConfiguration(configInjector.injectConfig(destinationConfiguration, destinationConnection.getDestinationDefinitionId())); + final StandardSourceDefinition sourceDefinition = configRepository + .getStandardSourceDefinition(sourceConnection.getSourceDefinitionId()); + final StandardDestinationDefinition destinationDefinition = configRepository + .getStandardDestinationDefinition(destinationConnection.getDestinationDefinitionId()); + + final ActorDefinitionVersion sourceVersion = + actorDefinitionVersionHelper.getSourceVersion(sourceDefinition, workspaceId, standardSync.getSourceId()); + final ActorDefinitionVersion destinationVersion = + actorDefinitionVersionHelper.getDestinationVersion(destinationDefinition, workspaceId, standardSync.getDestinationId()); + + final String sourceImageName = sourceVersion.getDockerRepository() + ":" + sourceVersion.getDockerImageTag(); + final String destinationImageName = destinationVersion.getDockerRepository() + ":" + destinationVersion.getDockerImageTag(); + + final List standardSyncOperations = Lists.newArrayList(); + for (final var operationId : standardSync.getOperationIds()) { + final StandardSyncOperation standardSyncOperation = configRepository.getStandardSyncOperation(operationId); + standardSyncOperations.add(standardSyncOperation); + } + + // for OSS users, make it possible to ignore default actor-level resource requirements + if (!connectorSpecificResourceDefaultsEnabled) { + sourceDefinition.setResourceRequirements(null); + destinationDefinition.setResourceRequirements(null); + } + + return new JobCreatorInput( + sourceConnection, + destinationConnection, + standardSync, + sourceImageName, + new Version(sourceVersion.getProtocolVersion()), + destinationImageName, + new Version(destinationVersion.getProtocolVersion()), + standardSyncOperations, + workspace.getWebhookOperationConfigs(), + sourceDefinition, + destinationDefinition, + sourceVersion, + destinationVersion, + workspaceId); + } + } diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/SyncJobFactory.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/SyncJobFactory.java index 97c2cae991e..b41869aa6ec 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/SyncJobFactory.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/SyncJobFactory.java @@ -4,6 +4,8 @@ package io.airbyte.persistence.job.factory; +import io.airbyte.config.persistence.domain.StreamRefresh; +import java.util.List; import java.util.UUID; /** @@ -17,6 +19,14 @@ public interface SyncJobFactory { * @param connectionId connection id * @return job id */ - Long create(UUID connectionId); + Long createSync(UUID connectionId); + + /** + * Create refresh job for given connection id. + * + * @param connectionId connection id + * @return job id + */ + Long createRefresh(UUID connectionId, List streamsToRefresh); } diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Job.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Job.java index ebef2cfbeb7..378e01aa07b 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Job.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Job.java @@ -187,19 +187,6 @@ public Optional getLastFailedAttempt() { .findFirst(); } - /** - * Get the last attempt by created_at for the job that had an output. - * - * @return the last attempt. empty optional, if there have been no attempts with outputs. - */ - public Optional getLastAttemptWithOutput() { - return getAttempts() - .stream() - .sorted(Comparator.comparing(Attempt::getCreatedAtInSecond).reversed()) - .filter(a -> a.getOutput().isPresent() && a.getOutput().get().getSync() != null && a.getOutput().get().getSync().getState() != null) - .findFirst(); - } - /** * Get the last attempt by created_at for the job. * diff --git a/airbyte-persistence/job-persistence/src/main/kotlin/io/airbyte/persistence/job/helper/model/JobCreatorInput.kt b/airbyte-persistence/job-persistence/src/main/kotlin/io/airbyte/persistence/job/helper/model/JobCreatorInput.kt new file mode 100644 index 00000000000..8665dd9dc8d --- /dev/null +++ b/airbyte-persistence/job-persistence/src/main/kotlin/io/airbyte/persistence/job/helper/model/JobCreatorInput.kt @@ -0,0 +1,29 @@ +package io.airbyte.persistence.job.helper.model + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.version.Version +import io.airbyte.config.ActorDefinitionVersion +import io.airbyte.config.DestinationConnection +import io.airbyte.config.SourceConnection +import io.airbyte.config.StandardDestinationDefinition +import io.airbyte.config.StandardSourceDefinition +import io.airbyte.config.StandardSync +import io.airbyte.config.StandardSyncOperation +import java.util.UUID + +data class JobCreatorInput( + val source: SourceConnection, + val destination: DestinationConnection, + val standardSync: StandardSync, + val sourceDockerImageName: String, + val sourceProtocolVersion: Version, + val destinationDockerImageName: String, + val destinationProtocolVersion: Version, + val standardSyncOperations: List, + val webhookOperationConfigs: JsonNode?, + val sourceDefinition: StandardSourceDefinition, + val destinationDefinition: StandardDestinationDefinition, + val sourceDefinitionVersion: ActorDefinitionVersion, + val destinationDefinitionVersion: ActorDefinitionVersion, + val workspaceId: UUID, +) diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java index d973642571f..b1d938fae22 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java @@ -5,8 +5,10 @@ package io.airbyte.persistence.job; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -28,6 +30,7 @@ import io.airbyte.config.JobTypeResourceLimit.JobType; import io.airbyte.config.OperatorNormalization; import io.airbyte.config.OperatorNormalization.Option; +import io.airbyte.config.RefreshConfig; import io.airbyte.config.ResetSourceConfiguration; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.ResourceRequirementsType; @@ -38,13 +41,26 @@ import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardSyncOperation.OperatorType; +import io.airbyte.config.StateType; +import io.airbyte.config.StateWrapper; import io.airbyte.config.SyncResourceRequirements; import io.airbyte.config.SyncResourceRequirementsKey; +import io.airbyte.config.persistence.RefreshJobStateUpdater; +import io.airbyte.config.persistence.StatePersistence; +import io.airbyte.config.persistence.StreamRefreshesRepository; +import io.airbyte.config.persistence.domain.StreamRefresh; +import io.airbyte.config.persistence.helper.GenerationBumper; import io.airbyte.config.provider.ResourceRequirementsProvider; +import io.airbyte.featureflag.ActivateRefreshes; import io.airbyte.featureflag.DestResourceOverrides; +import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.OrchestratorResourceOverrides; import io.airbyte.featureflag.SourceResourceOverrides; import io.airbyte.featureflag.TestClient; +import io.airbyte.featureflag.UseResourceRequirementsVariant; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; @@ -54,6 +70,8 @@ import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.protocol.models.SyncMode; import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -99,11 +117,15 @@ class DefaultJobCreatorTest { private static final UUID WORKSPACE_ID = UUID.randomUUID(); private JobPersistence jobPersistence; + private StatePersistence statePersistence; + private RefreshJobStateUpdater refreshJobStateUpdater; + private StreamRefreshesRepository streamRefreshesRepository; private JobCreator jobCreator; private ResourceRequirementsProvider resourceRequirementsProvider; private ResourceRequirements workerResourceRequirements; private ResourceRequirements sourceResourceRequirements; private ResourceRequirements destResourceRequirements; + private GenerationBumper generationBumper; private static final JsonNode PERSISTED_WEBHOOK_CONFIGS; @@ -179,9 +201,10 @@ class DefaultJobCreatorTest { String.format("{\"webhookConfigs\": [{\"id\": \"%s\", \"name\": \"%s\", \"authToken\": {\"_secret\": \"a-secret_v1\"}}]}", WEBHOOK_CONFIG_ID, WEBHOOK_NAME)); - STANDARD_SOURCE_DEFINITION = new StandardSourceDefinition().withCustom(false); - STANDARD_SOURCE_DEFINITION_WITH_SOURCE_TYPE = new StandardSourceDefinition().withSourceType(SourceType.DATABASE).withCustom(false); - STANDARD_DESTINATION_DEFINITION = new StandardDestinationDefinition().withCustom(false); + STANDARD_SOURCE_DEFINITION = new StandardSourceDefinition().withSourceDefinitionId(UUID.randomUUID()).withCustom(false); + STANDARD_SOURCE_DEFINITION_WITH_SOURCE_TYPE = + new StandardSourceDefinition().withSourceDefinitionId(UUID.randomUUID()).withSourceType(SourceType.DATABASE).withCustom(false); + STANDARD_DESTINATION_DEFINITION = new StandardDestinationDefinition().withDestinationDefinitionId(UUID.randomUUID()).withCustom(false); SOURCE_DEFINITION_VERSION = new ActorDefinitionVersion().withVersionId(UUID.randomUUID()); DESTINATION_DEFINITION_VERSION = new ActorDefinitionVersion().withVersionId(UUID.randomUUID()); @@ -190,6 +213,9 @@ class DefaultJobCreatorTest { @BeforeEach void setup() { jobPersistence = mock(JobPersistence.class); + statePersistence = mock(StatePersistence.class); + refreshJobStateUpdater = new RefreshJobStateUpdater(statePersistence); + streamRefreshesRepository = mock(StreamRefreshesRepository.class); workerResourceRequirements = new ResourceRequirements() .withCpuLimit("0.2") .withCpuRequest("0.2") @@ -208,49 +234,369 @@ void setup() { resourceRequirementsProvider = mock(ResourceRequirementsProvider.class); when(resourceRequirementsProvider.getResourceRequirements(any(), any(), any())) .thenReturn(workerResourceRequirements); - jobCreator = new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, new TestClient()); + generationBumper = mock(GenerationBumper.class); + jobCreator = + new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, new TestClient(), generationBumper, statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); + } + + @Test + void testCreateRefreshJobWithStreamState() throws IOException { + final String streamToRefresh = "name"; + final String streamToNotRefresh = "stream-not-refresh"; + final String streamNamespace = "namespace"; + + final FeatureFlagClient mFeatureFlagClient = mock(TestClient.class); + when(mFeatureFlagClient.boolVariation(eq(ActivateRefreshes.INSTANCE), any())).thenReturn(true); + when(mFeatureFlagClient.stringVariation(eq(UseResourceRequirementsVariant.INSTANCE), any())).thenReturn("default"); + when(jobPersistence.enqueueJob(any(), any())).thenReturn(Optional.of(1L)); + + final AirbyteStateMessage stateMessageFromRefreshStream = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor", 1)))); + + final AirbyteStateMessage stateMessageFromNonRefreshStream = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToNotRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor-2", 2)))); + + final StateWrapper stateWrapper = new StateWrapper().withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList(stateMessageFromRefreshStream, stateMessageFromNonRefreshStream)); + + when(statePersistence.getCurrentState(STANDARD_SYNC.getConnectionId())).thenReturn(Optional.of(stateWrapper)); + + jobCreator = + new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, mFeatureFlagClient, generationBumper, statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); + + final Optional expectedSourceType = Optional.of("database"); + final ResourceRequirements destStderrResourceRequirements = new ResourceRequirements().withCpuLimit("10"); + final ResourceRequirements destStdinResourceRequirements = new ResourceRequirements().withCpuLimit("11"); + final ResourceRequirements destStdoutResourceRequirements = new ResourceRequirements().withCpuLimit("12"); + final ResourceRequirements heartbeatResourceRequirements = new ResourceRequirements().withCpuLimit("13"); + final ResourceRequirements srcStderrResourceRequirements = new ResourceRequirements().withCpuLimit("14"); + final ResourceRequirements srcStdoutResourceRequirements = new ResourceRequirements().withCpuLimit("14"); + + mockResourcesRequirement(expectedSourceType, + destStderrResourceRequirements, + destStdinResourceRequirements, + destStdoutResourceRequirements, + heartbeatResourceRequirements, + srcStderrResourceRequirements, + srcStdoutResourceRequirements); + + final SyncResourceRequirements expectedSyncResourceRequirements = getExpectedResourcesRequirement(destStderrResourceRequirements, + destStdinResourceRequirements, + destStdoutResourceRequirements, + heartbeatResourceRequirements, + srcStderrResourceRequirements, + srcStdoutResourceRequirements); + + final RefreshConfig refreshConfig = getRefreshConfig(expectedSyncResourceRequirements, List.of( + new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace))); + + final JobConfig jobConfig = new JobConfig() + .withConfigType(ConfigType.REFRESH) + .withRefresh(refreshConfig); + + final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); + when(jobPersistence.enqueueJob(expectedScope, jobConfig)).thenReturn(Optional.of(JOB_ID)); + + List refreshes = + List.of(new StreamRefresh(UUID.randomUUID(), STANDARD_SYNC.getConnectionId(), streamToRefresh, streamNamespace, null)); + + jobCreator.createRefreshConnection( + STANDARD_SYNC, + SOURCE_IMAGE_NAME, + SOURCE_PROTOCOL_VERSION, + DESTINATION_IMAGE_NAME, + DESTINATION_PROTOCOL_VERSION, + List.of(STANDARD_SYNC_OPERATION), + PERSISTED_WEBHOOK_CONFIGS, + STANDARD_SOURCE_DEFINITION_WITH_SOURCE_TYPE, + STANDARD_DESTINATION_DEFINITION, + SOURCE_DEFINITION_VERSION, + DESTINATION_DEFINITION_VERSION, + WORKSPACE_ID, + refreshes); + + verify(jobPersistence).enqueueJob(expectedScope, jobConfig); + verify(generationBumper).updateGenerationForStreams(STANDARD_SYNC.getConnectionId(), JOB_ID, refreshes); + + final StateWrapper expected = + new StateWrapper().withStateType(StateType.STREAM).withStateMessages(Collections.singletonList(stateMessageFromNonRefreshStream)); + verify(statePersistence).updateOrCreateState(STANDARD_SYNC.getConnectionId(), expected); + verify(streamRefreshesRepository).deleteByConnectionIdAndStreamNameAndStreamNamespace(STANDARD_SYNC.getConnectionId(), streamToRefresh, + streamNamespace); + } + + @Test + void testCreateRefreshJobWithGlobalState() throws IOException { + final String streamToRefresh = "name"; + final String streamToNotRefresh = "stream-not-refresh"; + final String streamNamespace = "namespace"; + + final FeatureFlagClient mFeatureFlagClient = mock(TestClient.class); + when(mFeatureFlagClient.boolVariation(eq(ActivateRefreshes.INSTANCE), any())).thenReturn(true); + when(mFeatureFlagClient.stringVariation(eq(UseResourceRequirementsVariant.INSTANCE), any())).thenReturn("default"); + when(jobPersistence.enqueueJob(any(), any())).thenReturn(Optional.of(1L)); + + final AirbyteStreamState stateMessageFromRefreshStream = new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor", 1))); + + final AirbyteStreamState stateMessageFromNonRefreshStream = new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToNotRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor-2", 2))); + + final JsonNode sharedState = Jsons.jsonNode(ImmutableMap.of("shared-state", 5)); + + final AirbyteStateMessage existingStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState().withSharedState(sharedState) + .withStreamStates(Arrays.asList(stateMessageFromRefreshStream, stateMessageFromNonRefreshStream))); + + final StateWrapper stateWrapper = new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(existingStateMessage); + + when(statePersistence.getCurrentState(STANDARD_SYNC.getConnectionId())).thenReturn(Optional.of(stateWrapper)); + + jobCreator = + new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, mFeatureFlagClient, generationBumper, statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); + + final Optional expectedSourceType = Optional.of("database"); + final ResourceRequirements destStderrResourceRequirements = new ResourceRequirements().withCpuLimit("10"); + final ResourceRequirements destStdinResourceRequirements = new ResourceRequirements().withCpuLimit("11"); + final ResourceRequirements destStdoutResourceRequirements = new ResourceRequirements().withCpuLimit("12"); + final ResourceRequirements heartbeatResourceRequirements = new ResourceRequirements().withCpuLimit("13"); + final ResourceRequirements srcStderrResourceRequirements = new ResourceRequirements().withCpuLimit("14"); + final ResourceRequirements srcStdoutResourceRequirements = new ResourceRequirements().withCpuLimit("14"); + + mockResourcesRequirement(expectedSourceType, + destStderrResourceRequirements, + destStdinResourceRequirements, + destStdoutResourceRequirements, + heartbeatResourceRequirements, + srcStderrResourceRequirements, + srcStdoutResourceRequirements); + + final SyncResourceRequirements expectedSyncResourceRequirements = getExpectedResourcesRequirement(destStderrResourceRequirements, + destStdinResourceRequirements, + destStdoutResourceRequirements, + heartbeatResourceRequirements, + srcStderrResourceRequirements, + srcStdoutResourceRequirements); + + final RefreshConfig refreshConfig = getRefreshConfig(expectedSyncResourceRequirements, List.of( + new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace))); + + final JobConfig jobConfig = new JobConfig() + .withConfigType(ConfigType.REFRESH) + .withRefresh(refreshConfig); + + jobCreator.createRefreshConnection( + STANDARD_SYNC, + SOURCE_IMAGE_NAME, + SOURCE_PROTOCOL_VERSION, + DESTINATION_IMAGE_NAME, + DESTINATION_PROTOCOL_VERSION, + List.of(STANDARD_SYNC_OPERATION), + PERSISTED_WEBHOOK_CONFIGS, + STANDARD_SOURCE_DEFINITION_WITH_SOURCE_TYPE, + STANDARD_DESTINATION_DEFINITION, + SOURCE_DEFINITION_VERSION, + DESTINATION_DEFINITION_VERSION, + WORKSPACE_ID, + List.of(new StreamRefresh(UUID.randomUUID(), STANDARD_SYNC.getConnectionId(), streamToRefresh, streamNamespace, null))); + + final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); + verify(jobPersistence).enqueueJob(expectedScope, jobConfig); + + final AirbyteStateMessage expectedStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal( + new AirbyteGlobalState().withSharedState(sharedState).withStreamStates(Collections.singletonList(stateMessageFromNonRefreshStream))); + + final StateWrapper expected = new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(expectedStateMessage); + verify(statePersistence).updateOrCreateState(STANDARD_SYNC.getConnectionId(), expected); + verify(streamRefreshesRepository).deleteByConnectionIdAndStreamNameAndStreamNamespace(STANDARD_SYNC.getConnectionId(), streamToRefresh, + streamNamespace); + } + + private static RefreshConfig getRefreshConfig(final SyncResourceRequirements expectedSyncResourceRequirements, + final List streamToRefresh) { + return new RefreshConfig() + .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) + .withNamespaceFormat(STANDARD_SYNC.getNamespaceFormat()) + .withPrefix(STANDARD_SYNC.getPrefix()) + .withSourceDockerImage(SOURCE_IMAGE_NAME) + .withSourceProtocolVersion(SOURCE_PROTOCOL_VERSION) + .withDestinationDockerImage(DESTINATION_IMAGE_NAME) + .withDestinationProtocolVersion(DESTINATION_PROTOCOL_VERSION) + .withConfiguredAirbyteCatalog(STANDARD_SYNC.getCatalog()) + .withOperationSequence(List.of(STANDARD_SYNC_OPERATION)) + .withSyncResourceRequirements(expectedSyncResourceRequirements) + .withWebhookOperationConfigs(PERSISTED_WEBHOOK_CONFIGS) + .withIsSourceCustomConnector(false) + .withIsDestinationCustomConnector(false) + .withWorkspaceId(WORKSPACE_ID) + .withSourceDefinitionVersionId(SOURCE_DEFINITION_VERSION.getVersionId()) + .withDestinationDefinitionVersionId(DESTINATION_DEFINITION_VERSION.getVersionId()) + .withStreamsToRefresh(streamToRefresh); + } + + @Test + void testCreateRefreshJobWithFullGlobalState() throws IOException { + final String streamToRefresh = "name"; + final String streamToRefresh2 = "stream-refresh2"; + final String streamNamespace = "namespace"; + + final FeatureFlagClient mFeatureFlagClient = mock(TestClient.class); + when(mFeatureFlagClient.boolVariation(eq(ActivateRefreshes.INSTANCE), any())).thenReturn(true); + when(mFeatureFlagClient.stringVariation(eq(UseResourceRequirementsVariant.INSTANCE), any())).thenReturn("default"); + when(jobPersistence.enqueueJob(any(), any())).thenReturn(Optional.of(1L)); + + final AirbyteStreamState stateMessageFromRefreshStream = new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor", 1))); + + final AirbyteStreamState stateMessageFromNonRefreshStream = new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToRefresh2).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor-2", 2))); + + final JsonNode sharedState = Jsons.jsonNode(ImmutableMap.of("shared-state", 5)); + + final AirbyteStateMessage existingStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState().withSharedState(sharedState) + .withStreamStates(Arrays.asList(stateMessageFromRefreshStream, stateMessageFromNonRefreshStream))); + + final StateWrapper stateWrapper = new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(existingStateMessage); + + when(statePersistence.getCurrentState(STANDARD_SYNC.getConnectionId())).thenReturn(Optional.of(stateWrapper)); + + jobCreator = + new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, mFeatureFlagClient, generationBumper, statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); + + final Optional expectedSourceType = Optional.of("database"); + final ResourceRequirements destStderrResourceRequirements = new ResourceRequirements().withCpuLimit("10"); + final ResourceRequirements destStdinResourceRequirements = new ResourceRequirements().withCpuLimit("11"); + final ResourceRequirements destStdoutResourceRequirements = new ResourceRequirements().withCpuLimit("12"); + final ResourceRequirements heartbeatResourceRequirements = new ResourceRequirements().withCpuLimit("13"); + final ResourceRequirements srcStderrResourceRequirements = new ResourceRequirements().withCpuLimit("14"); + final ResourceRequirements srcStdoutResourceRequirements = new ResourceRequirements().withCpuLimit("14"); + + mockResourcesRequirement(expectedSourceType, + destStderrResourceRequirements, + destStdinResourceRequirements, + destStdoutResourceRequirements, + heartbeatResourceRequirements, + srcStderrResourceRequirements, + srcStdoutResourceRequirements); + + final SyncResourceRequirements expectedSyncResourceRequirements = getExpectedResourcesRequirement(destStderrResourceRequirements, + destStdinResourceRequirements, + destStdoutResourceRequirements, + heartbeatResourceRequirements, + srcStderrResourceRequirements, + srcStdoutResourceRequirements); + + final RefreshConfig refreshConfig = getRefreshConfig(expectedSyncResourceRequirements, List.of( + new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace), + new StreamDescriptor().withName(streamToRefresh2).withNamespace(streamNamespace))); + + final JobConfig jobConfig = new JobConfig() + .withConfigType(ConfigType.REFRESH) + .withRefresh(refreshConfig); + + jobCreator.createRefreshConnection( + STANDARD_SYNC, + SOURCE_IMAGE_NAME, + SOURCE_PROTOCOL_VERSION, + DESTINATION_IMAGE_NAME, + DESTINATION_PROTOCOL_VERSION, + List.of(STANDARD_SYNC_OPERATION), + PERSISTED_WEBHOOK_CONFIGS, + STANDARD_SOURCE_DEFINITION_WITH_SOURCE_TYPE, + STANDARD_DESTINATION_DEFINITION, + SOURCE_DEFINITION_VERSION, + DESTINATION_DEFINITION_VERSION, + WORKSPACE_ID, + List.of(new StreamRefresh(UUID.randomUUID(), STANDARD_SYNC.getConnectionId(), streamToRefresh, streamNamespace, null), + new StreamRefresh(UUID.randomUUID(), STANDARD_SYNC.getConnectionId(), streamToRefresh2, streamNamespace, null))); + + final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); + verify(jobPersistence).enqueueJob(expectedScope, jobConfig); + + final AirbyteStateMessage expectedStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState().withSharedState(null).withStreamStates(Collections.emptyList())); + + final StateWrapper expected = new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(expectedStateMessage); + verify(statePersistence).updateOrCreateState(STANDARD_SYNC.getConnectionId(), expected); + verify(streamRefreshesRepository).deleteByConnectionIdAndStreamNameAndStreamNamespace(STANDARD_SYNC.getConnectionId(), streamToRefresh, + streamNamespace); + verify(streamRefreshesRepository).deleteByConnectionIdAndStreamNameAndStreamNamespace(STANDARD_SYNC.getConnectionId(), streamToRefresh2, + streamNamespace); + } + + @Test + void testFailToCreateRefreshIfNotAllowed() { + final FeatureFlagClient mFeatureFlagClient = mock(TestClient.class); + when(mFeatureFlagClient.boolVariation(eq(ActivateRefreshes.INSTANCE), any())).thenReturn(false); + jobCreator = + new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, mFeatureFlagClient, generationBumper, statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); + + assertThrows(IllegalStateException.class, () -> jobCreator.createRefreshConnection( + STANDARD_SYNC, + SOURCE_IMAGE_NAME, + SOURCE_PROTOCOL_VERSION, + DESTINATION_IMAGE_NAME, + DESTINATION_PROTOCOL_VERSION, + List.of(STANDARD_SYNC_OPERATION), + PERSISTED_WEBHOOK_CONFIGS, + STANDARD_SOURCE_DEFINITION_WITH_SOURCE_TYPE, + STANDARD_DESTINATION_DEFINITION, + SOURCE_DEFINITION_VERSION, + DESTINATION_DEFINITION_VERSION, + WORKSPACE_ID, + List.of())); } @Test void testCreateSyncJob() throws IOException { final Optional expectedSourceType = Optional.of("database"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.ORCHESTRATOR, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(workerResourceRequirements); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.SOURCE, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(sourceResourceRequirements); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(destResourceRequirements); - // More explicit resource requirements to verify data mapping final ResourceRequirements destStderrResourceRequirements = new ResourceRequirements().withCpuLimit("10"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION_STDERR, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(destStderrResourceRequirements); final ResourceRequirements destStdinResourceRequirements = new ResourceRequirements().withCpuLimit("11"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION_STDIN, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(destStdinResourceRequirements); final ResourceRequirements destStdoutResourceRequirements = new ResourceRequirements().withCpuLimit("12"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION_STDOUT, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(destStdoutResourceRequirements); final ResourceRequirements heartbeatResourceRequirements = new ResourceRequirements().withCpuLimit("13"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.HEARTBEAT, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(heartbeatResourceRequirements); final ResourceRequirements srcStderrResourceRequirements = new ResourceRequirements().withCpuLimit("14"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.SOURCE_STDERR, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(srcStderrResourceRequirements); final ResourceRequirements srcStdoutResourceRequirements = new ResourceRequirements().withCpuLimit("14"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.SOURCE_STDOUT, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(srcStdoutResourceRequirements); - final SyncResourceRequirements expectedSyncResourceRequirements = new SyncResourceRequirements() - .withConfigKey(new SyncResourceRequirementsKey().withVariant(DEFAULT_VARIANT).withSubType("database")) - .withDestination(destResourceRequirements) - .withDestinationStdErr(destStderrResourceRequirements) - .withDestinationStdIn(destStdinResourceRequirements) - .withDestinationStdOut(destStdoutResourceRequirements) - .withOrchestrator(workerResourceRequirements) - .withHeartbeat(heartbeatResourceRequirements) - .withSource(sourceResourceRequirements) - .withSourceStdErr(srcStderrResourceRequirements) - .withSourceStdOut(srcStdoutResourceRequirements); + mockResourcesRequirement(expectedSourceType, + destStderrResourceRequirements, + destStdinResourceRequirements, + destStdoutResourceRequirements, + heartbeatResourceRequirements, + srcStderrResourceRequirements, + srcStdoutResourceRequirements); + + final SyncResourceRequirements expectedSyncResourceRequirements = getExpectedResourcesRequirement(destStderrResourceRequirements, + destStdinResourceRequirements, + destStdoutResourceRequirements, + heartbeatResourceRequirements, + srcStderrResourceRequirements, + srcStdoutResourceRequirements); final JobSyncConfig jobSyncConfig = new JobSyncConfig() .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) @@ -294,6 +640,53 @@ void testCreateSyncJob() throws IOException { verify(jobPersistence).enqueueJob(expectedScope, jobConfig); } + private void mockResourcesRequirement(final Optional expectedSourceType, + final ResourceRequirements destStderrResourceRequirements, + final ResourceRequirements destStdinResourceRequirements, + final ResourceRequirements destStdoutResourceRequirements, + final ResourceRequirements heartbeatResourceRequirements, + final ResourceRequirements srcStderrResourceRequirements, + final ResourceRequirements srcStdoutResourceRequirements) { + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.ORCHESTRATOR, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(workerResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.SOURCE, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(sourceResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(destResourceRequirements); + // More explicit resource requirements to verify data mapping + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION_STDERR, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(destStderrResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION_STDIN, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(destStdinResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION_STDOUT, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(destStdoutResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.HEARTBEAT, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(heartbeatResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.SOURCE_STDERR, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(srcStderrResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.SOURCE_STDOUT, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(srcStdoutResourceRequirements); + } + + private SyncResourceRequirements getExpectedResourcesRequirement(final ResourceRequirements destStderrResourceRequirements, + final ResourceRequirements destStdinResourceRequirements, + final ResourceRequirements destStdoutResourceRequirements, + final ResourceRequirements heartbeatResourceRequirements, + final ResourceRequirements srcStderrResourceRequirements, + final ResourceRequirements srcStdoutResourceRequirements) { + return new SyncResourceRequirements() + .withConfigKey(new SyncResourceRequirementsKey().withVariant(DEFAULT_VARIANT).withSubType("database")) + .withDestination(destResourceRequirements) + .withDestinationStdErr(destStderrResourceRequirements) + .withDestinationStdIn(destStdinResourceRequirements) + .withDestinationStdOut(destStdoutResourceRequirements) + .withOrchestrator(workerResourceRequirements) + .withHeartbeat(heartbeatResourceRequirements) + .withSource(sourceResourceRequirements) + .withSourceStdErr(srcStderrResourceRequirements) + .withSourceStdOut(srcStdoutResourceRequirements); + } + @Test void testCreateSyncJobEnsureNoQueuing() throws IOException { final JobSyncConfig jobSyncConfig = new JobSyncConfig() @@ -548,7 +941,9 @@ void testDestinationResourceReqsOverrides(final String cpuReqOverride, .withMemoryRequest("800Mi"); final var jobCreator = new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, - new TestClient(Map.of(DestResourceOverrides.INSTANCE.getKey(), Jsons.serialize(overrides)))); + new TestClient(Map.of(DestResourceOverrides.INSTANCE.getKey(), Jsons.serialize(overrides))), generationBumper, statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); jobCreator.createSyncJob( SOURCE_CONNECTION, @@ -612,7 +1007,8 @@ void testOrchestratorResourceReqsOverrides(final String cpuReqOverride, .withMemoryRequest("800Mi"); final var jobCreator = new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, - new TestClient(Map.of(OrchestratorResourceOverrides.INSTANCE.getKey(), Jsons.serialize(overrides)))); + new TestClient(Map.of(OrchestratorResourceOverrides.INSTANCE.getKey(), Jsons.serialize(overrides))), generationBumper, statePersistence, + refreshJobStateUpdater, streamRefreshesRepository); final var standardSync = new StandardSync() .withConnectionId(UUID.randomUUID()) @@ -688,7 +1084,8 @@ void testSourceResourceReqsOverrides(final String cpuReqOverride, .withMemoryRequest("800Mi"); final var jobCreator = new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, - new TestClient(Map.of(SourceResourceOverrides.INSTANCE.getKey(), Jsons.serialize(overrides)))); + new TestClient(Map.of(SourceResourceOverrides.INSTANCE.getKey(), Jsons.serialize(overrides))), generationBumper, statePersistence, + refreshJobStateUpdater, streamRefreshesRepository); jobCreator.createSyncJob( SOURCE_CONNECTION, @@ -744,7 +1141,9 @@ void ignoresOverridesIfJsonStringWeird(final String weirdness) throws IOExceptio .withMemoryRequest("800Mi"); final var jobCreator = new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, - new TestClient(Map.of(DestResourceOverrides.INSTANCE.getKey(), Jsons.serialize(weirdness)))); + new TestClient(Map.of(DestResourceOverrides.INSTANCE.getKey(), Jsons.serialize(weirdness))), generationBumper, statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); jobCreator.createSyncJob( SOURCE_CONNECTION, diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java index d215448f72a..1f614dee095 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java @@ -2034,6 +2034,35 @@ void testListJobsWithMultipleAttemptsInDescOrder() throws IOException { assertEquals(jobId2, actualList.get(0).getId()); } + @Test + @DisplayName("Should apply limits after ordering by the key provided by the caller") + void testListJobsOrderedByUpdatedAt() throws IOException { + + final var jobId1 = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); + final var job1Attempt1 = jobPersistence.createAttempt(jobId1, LOG_PATH); + + final var laterTime = NOW.plusSeconds(1000); + when(timeSupplier.get()).thenReturn(laterTime); + final var jobId2 = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); + final var job2Attempt1LogPath = LOG_PATH.resolve("3"); + final var job2Attempt1 = jobPersistence.createAttempt(jobId2, job2Attempt1LogPath); + jobPersistence.succeedAttempt(jobId2, job2Attempt1); + + final var evenLaterTime = NOW.plusSeconds(3000); + when(timeSupplier.get()).thenReturn(evenLaterTime); + jobPersistence.succeedAttempt(jobId1, job1Attempt1); + + String configId = null; + final List updatedAtJobs = + jobPersistence.listJobs(Set.of(SPEC_JOB_CONFIG.getConfigType()), configId, 1, 0, null, null, null, null, null, "UPDATED_AT", "ASC"); + assertEquals(1, updatedAtJobs.size()); + assertEquals(jobId2, updatedAtJobs.get(0).getId()); + final List createdAtJobs = + jobPersistence.listJobs(Set.of(SPEC_JOB_CONFIG.getConfigType()), configId, 1, 0, null, null, null, null, null, "CREATED_AT", "ASC"); + assertEquals(1, createdAtJobs.size()); + assertEquals(jobId1, createdAtJobs.get(0).getId()); + } + @Test @DisplayName("Should list jobs across all connections in any status") void testListJobsWithNoFilters() throws IOException { diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactoryTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactoryTest.java index 95321dc475f..bb3da2b6896 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactoryTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactoryTest.java @@ -134,7 +134,7 @@ void createSyncJobFromConnectionId() throws JsonValidationException, ConfigNotFo final SyncJobFactory factory = new DefaultSyncJobFactory(true, jobCreator, configRepository, oAuthConfigSupplier, configInjector, workspaceHelper, actorDefinitionVersionHelper); - final long actualJobId = factory.create(connectionId); + final long actualJobId = factory.createSync(connectionId); assertEquals(jobId, actualJobId); verify(jobCreator) diff --git a/airbyte-proxy/build.gradle.kts b/airbyte-proxy/build.gradle.kts index 95fd2f83bad..50e0d5510b2 100644 --- a/airbyte-proxy/build.gradle.kts +++ b/airbyte-proxy/build.gradle.kts @@ -1,40 +1,40 @@ plugins { - id("io.airbyte.gradle.jvm") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } airbyte { - docker { - imageName = "proxy" - } + docker { + imageName = "proxy" + } } val prepareBuild = tasks.register("prepareBuild") { - from(layout.projectDirectory.file("nginx-auth.conf.template")) - from(layout.projectDirectory.file("nginx-no-auth.conf.template")) - from(layout.projectDirectory.file("run.sh")) - from(layout.projectDirectory.file("401.html")) + from(layout.projectDirectory.file("nginx-auth.conf.template")) + from(layout.projectDirectory.file("nginx-no-auth.conf.template")) + from(layout.projectDirectory.file("run.sh")) + from(layout.projectDirectory.file("401.html")) - into(layout.buildDirectory.dir("airbyte/docker")) + into(layout.buildDirectory.dir("airbyte/docker")) } tasks.named("dockerBuildImage") { - dependsOn(prepareBuild) - inputs.file("../.env") + dependsOn(prepareBuild) + inputs.file("../.env") } val bashTest = tasks.register("bashTest") { - inputs.file(layout.projectDirectory.file("nginx-auth.conf.template")) - inputs.file(layout.projectDirectory.file("nginx-no-auth.conf.template")) - inputs.file(layout.projectDirectory.file("run.sh")) - inputs.file(layout.projectDirectory.file("401.html")) - outputs.upToDateWhen { true } - dependsOn(tasks.named("dockerBuildImage")) - commandLine("./test.sh") + inputs.file(layout.projectDirectory.file("nginx-auth.conf.template")) + inputs.file(layout.projectDirectory.file("nginx-no-auth.conf.template")) + inputs.file(layout.projectDirectory.file("run.sh")) + inputs.file(layout.projectDirectory.file("401.html")) + outputs.upToDateWhen { true } + dependsOn(tasks.named("dockerBuildImage")) + commandLine("./test.sh") } // we can"t override the "test" command, so we can make our bash test a dependency) tasks.named("test") { - dependsOn(bashTest) + dependsOn(bashTest) } diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index 6efa706ddf7..1e7865f87c9 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -1,19 +1,16 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.1.0 -FROM ${JDK_IMAGE} AS server +ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.2.1 -EXPOSE 8000 5005 +FROM scratch as builder +WORKDIR /app +ADD airbyte-app.tar /app +FROM ${JDK_IMAGE} AS server +EXPOSE 8000 5005 ARG VERSION=dev - ENV APPLICATION airbyte-server ENV VERSION ${VERSION} - WORKDIR /app - -# This is automatically unzipped by Docker -USER root -ADD airbyte-app.tar /app -RUN chown -R airbyte:airbyte /app +COPY --chown=airbyte:airbyte --from=builder /app /app USER airbyte:airbyte # wait for upstream dependencies to become available before starting server diff --git a/airbyte-server/readme.md b/airbyte-server/README.md similarity index 100% rename from airbyte-server/readme.md rename to airbyte-server/README.md diff --git a/airbyte-server/build.gradle.kts b/airbyte-server/build.gradle.kts index 36b244db7cd..33918acbab2 100644 --- a/airbyte-server/build.gradle.kts +++ b/airbyte-server/build.gradle.kts @@ -1,180 +1,186 @@ import java.util.Properties plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } configurations.all { - resolutionStrategy { - // Ensure that the versions defined in deps.toml are used) - // instead of versions from transitive dependencies) - // Force to avoid updated version(brought in transitively from Micronaut 3.8+) - // that is incompatible with our current Helm setup) - force (libs.flyway.core, libs.s3, libs.aws.java.sdk.s3, libs.sts, libs.aws.java.sdk.sts) - } + resolutionStrategy { + // Ensure that the versions defined in deps.toml are used) + // instead of versions from transitive dependencies) + // Force to avoid updated version(brought in transitively from Micronaut 3.8+) + // that is incompatible with our current Helm setup) + force(libs.flyway.core, libs.s3, libs.aws.java.sdk.s3, libs.sts, libs.aws.java.sdk.sts) + } } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - annotationProcessor(libs.micronaut.jaxrs.processor) - - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) - kapt(libs.micronaut.jaxrs.processor) - - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.micronaut.cache) - implementation(libs.bundles.micronaut.data.jdbc) - implementation(libs.bundles.micronaut.metrics) - implementation(libs.micronaut.jaxrs.server) - implementation(libs.micronaut.http) - implementation(libs.micronaut.security) - implementation(libs.bundles.flyway) - implementation(libs.s3) - implementation(libs.sts) - implementation(libs.aws.java.sdk.s3) - implementation(libs.aws.java.sdk.sts) - implementation(libs.reactor.core) - implementation(libs.slugify) - implementation(libs.temporal.sdk) - implementation(libs.bundles.datadog) - implementation(libs.sentry.java) - implementation(libs.swagger.annotations) - implementation(libs.google.cloud.storage) - implementation(libs.cron.utils) - implementation(libs.log4j.slf4j2.impl) // Because cron-utils uses slf4j 2.0+ - implementation(libs.jakarta.ws.rs.api) - implementation(libs.jakarta.validation.api ) - - implementation(project(":airbyte-analytics")) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-converters")) - implementation(project(":airbyte-commons-license")) - implementation(project(":airbyte-commons-micronaut")) - implementation(project(":airbyte-commons-micronaut-security")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-commons-temporal-core")) - implementation(project(":airbyte-commons-server")) - implementation(project(":airbyte-commons-with-dependencies")) - implementation(project(":airbyte-config:init")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-config:config-secrets")) - implementation(project(":airbyte-config:specs")) - implementation(project(":airbyte-data")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-db:jooq")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-notification")) - implementation(project(":airbyte-oauth")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-persistence:job-persistence")) - - runtimeOnly(libs.javax.databind) - - // Required for local database secret hydration) - runtimeOnly(libs.hikaricp) - runtimeOnly(libs.h2.database) - - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) - testAnnotationProcessor(libs.micronaut.jaxrs.processor) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - - testImplementation(libs.bundles.micronaut.test) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.mockwebserver) - testImplementation(libs.mockito.inline) - testImplementation(libs.reactor.test) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - testImplementation(libs.mockk) - testImplementation(libs.micronaut.http.client) - - testRuntimeOnly(libs.junit.jupiter.engine) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(libs.micronaut.jaxrs.processor) + + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + kapt(libs.micronaut.jaxrs.processor) + + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.micronaut.cache) + implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.bundles.micronaut.metrics) + implementation(libs.micronaut.jaxrs.server) + implementation(libs.micronaut.http) + implementation(libs.micronaut.security) + implementation(libs.bundles.flyway) + implementation(libs.s3) + implementation(libs.sts) + implementation(libs.aws.java.sdk.s3) + implementation(libs.aws.java.sdk.sts) + implementation(libs.reactor.core) + implementation(libs.slugify) + implementation(libs.temporal.sdk) + implementation(libs.bundles.datadog) + implementation(libs.sentry.java) + implementation(libs.swagger.annotations) + implementation(libs.google.cloud.storage) + implementation(libs.cron.utils) + implementation(libs.log4j.slf4j2.impl) // Because cron-utils uses slf4j 2.0+ + implementation(libs.jakarta.ws.rs.api) + implementation(libs.jakarta.validation.api) + + implementation(project(":airbyte-analytics")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-converters")) + implementation(project(":airbyte-commons-license")) + implementation(project(":airbyte-commons-micronaut")) + implementation(project(":airbyte-commons-micronaut-security")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-commons-temporal-core")) + implementation(project(":airbyte-commons-server")) + implementation(project(":airbyte-commons-with-dependencies")) + implementation(project(":airbyte-config:init")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-config:config-secrets")) + implementation(project(":airbyte-config:specs")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-db:jooq")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-notification")) + implementation(project(":airbyte-oauth")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-persistence:job-persistence")) + + runtimeOnly(libs.javax.databind) + + // Required for local database secret hydration) + runtimeOnly(libs.hikaricp) + runtimeOnly(libs.h2.database) + + testCompileOnly(libs.lombok) + testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) + testAnnotationProcessor(libs.micronaut.jaxrs.processor) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + + testImplementation(libs.bundles.micronaut.test) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.mockwebserver) + testImplementation(libs.mockito.inline) + testImplementation(libs.reactor.test) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + testImplementation(libs.mockk) + testImplementation(libs.micronaut.http.client) + + testRuntimeOnly(libs.junit.jupiter.engine) } // we want to be able to access the generated db files from config/init when we build the server docker image.) val copySeed = tasks.register("copySeed") { - from ("${project(":airbyte-config:init").buildDir}/resources/main/config") - into ("$buildDir/config_init/resources/main/config") - dependsOn(project(":airbyte-config:init").tasks.named("processResources")) + from("${project(":airbyte-config:init").buildDir}/resources/main/config") + into("$buildDir/config_init/resources/main/config") + dependsOn(project(":airbyte-config:init").tasks.named("processResources")) } // need to make sure that the files are in the resource directory before copying.) // tests require the seed to exist.) tasks.named("test") { - dependsOn(copySeed) + dependsOn(copySeed) } tasks.named("assemble") { - dependsOn(copySeed) + dependsOn(copySeed) } val env = Properties().apply { - load(rootProject.file(".env.dev").inputStream()) + load(rootProject.file(".env.dev").inputStream()) } airbyte { - application { - mainClass = "io.airbyte.server.Application" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - @Suppress("UNCHECKED_CAST") - localEnvVars.putAll(env.toMap() as Map) - localEnvVars.putAll(mapOf( - "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), - "AIRBYTE_VERSION" to env["VERSION"].toString(), - "DATABASE_USER" to env["DATABASE_USER"].toString(), - "DATABASE_PASSWORD" to env["DATABASE_PASSWORD"].toString(), - "CONFIG_DATABASE_USER" to (env["CONFIG_DATABASE_USER"]?.toString() ?: ""), - "CONFIG_DATABASE_PASSWORD" to (env["CONFIG_DATABASE_PASSWORD"]?.toString() ?: ""), - // we map the docker pg db to port 5433 so it does not conflict with other pg instances. - "DATABASE_URL" to "jdbc:postgresql://localhost:5433/${env["DATABASE_DB"]}", - "CONFIG_DATABASE_URL" to "jdbc:postgresql://localhost:5433/${env["CONFIG_DATABASE_DB"]}", - "RUN_DATABASE_MIGRATION_ON_STARTUP" to "true", - "WORKSPACE_ROOT" to env["WORKSPACE_ROOT"].toString(), - "CONFIG_ROOT" to "/tmp/airbyte_config", - "TRACKING_STRATEGY" to env["TRACKING_STRATEGY"].toString(), - "TEMPORAL_HOST" to "localhost:7233", - "MICRONAUT_ENVIRONMENTS" to "control-plane", - )) - } - - docker { - imageName = "server" - } - - spotbugs { - excludes = listOf(" \n" + - " \n" + - " \n" + - " \n" + - " ") - } + application { + mainClass = "io.airbyte.server.Application" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + @Suppress("UNCHECKED_CAST") + localEnvVars.putAll(env.toMap() as Map) + localEnvVars.putAll( + mapOf( + "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), + "AIRBYTE_VERSION" to env["VERSION"].toString(), + "DATABASE_USER" to env["DATABASE_USER"].toString(), + "DATABASE_PASSWORD" to env["DATABASE_PASSWORD"].toString(), + "CONFIG_DATABASE_USER" to (env["CONFIG_DATABASE_USER"]?.toString() ?: ""), + "CONFIG_DATABASE_PASSWORD" to (env["CONFIG_DATABASE_PASSWORD"]?.toString() ?: ""), + // we map the docker pg db to port 5433 so it does not conflict with other pg instances. + "DATABASE_URL" to "jdbc:postgresql://localhost:5433/${env["DATABASE_DB"]}", + "CONFIG_DATABASE_URL" to "jdbc:postgresql://localhost:5433/${env["CONFIG_DATABASE_DB"]}", + "RUN_DATABASE_MIGRATION_ON_STARTUP" to "true", + "WORKSPACE_ROOT" to env["WORKSPACE_ROOT"].toString(), + "CONFIG_ROOT" to "/tmp/airbyte_config", + "TRACKING_STRATEGY" to env["TRACKING_STRATEGY"].toString(), + "TEMPORAL_HOST" to "localhost:7233", + "MICRONAUT_ENVIRONMENTS" to "control-plane", + ) + ) + } + + docker { + imageName = "server" + } + + spotbugs { + excludes = listOf( + " \n" + + " \n" + + " \n" + + " \n" + + " " + ) + } } tasks.named("test") { - environment(mapOf( - "AIRBYTE_VERSION" to env["VERSION"], - "MICRONAUT_ENVIRONMENTS" to "test", - "SERVICE_NAME" to project.name, - )) + environment( + mapOf( + "AIRBYTE_VERSION" to env["VERSION"], + "MICRONAUT_ENVIRONMENTS" to "test", + "SERVICE_NAME" to project.name, + ) + ) } // The DuplicatesStrategy will be required while this module is mixture of kotlin and java _with_ lombok dependencies.) @@ -183,5 +189,5 @@ tasks.named("test") { // keepJavacAnnotationProcessors enabled, which causes duplicate META-INF files to be generated.) // Once lombok has been removed, this can also be removed.) tasks.withType().configureEach { - duplicatesStrategy = DuplicatesStrategy.EXCLUDE + duplicatesStrategy = DuplicatesStrategy.EXCLUDE } \ No newline at end of file diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java index 5163598a126..a44047f7b32 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java @@ -12,6 +12,7 @@ import io.airbyte.api.generated.ConnectionApi; import io.airbyte.api.model.generated.ActorDefinitionRequestBody; +import io.airbyte.api.model.generated.BooleanRead; import io.airbyte.api.model.generated.ConnectionAutoPropagateResult; import io.airbyte.api.model.generated.ConnectionAutoPropagateSchemaChange; import io.airbyte.api.model.generated.ConnectionCreate; @@ -25,6 +26,7 @@ import io.airbyte.api.model.generated.ConnectionStatusesRequestBody; import io.airbyte.api.model.generated.ConnectionStreamHistoryReadItem; import io.airbyte.api.model.generated.ConnectionStreamHistoryRequestBody; +import io.airbyte.api.model.generated.ConnectionStreamRefreshRequestBody; import io.airbyte.api.model.generated.ConnectionStreamRequestBody; import io.airbyte.api.model.generated.ConnectionSyncProgressReadItem; import io.airbyte.api.model.generated.ConnectionSyncResultRead; @@ -41,6 +43,7 @@ import io.airbyte.commons.server.handlers.MatchSearchHandler; import io.airbyte.commons.server.handlers.OperationsHandler; import io.airbyte.commons.server.handlers.SchedulerHandler; +import io.airbyte.commons.server.handlers.StreamRefreshesHandler; import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors; import io.airbyte.commons.temporal.TemporalJobType; import io.airbyte.commons.temporal.scheduling.RouterService; @@ -54,6 +57,7 @@ import io.micronaut.scheduling.annotation.ExecuteOn; import io.micronaut.security.annotation.Secured; import io.micronaut.security.rules.SecurityRule; +import java.util.ArrayList; import java.util.List; @Controller("/api/v1/connections") @@ -67,19 +71,22 @@ public class ConnectionApiController implements ConnectionApi { private final RouterService routerService; private final StreamStatusesHandler streamStatusesHandler; private final MatchSearchHandler matchSearchHandler; + private final StreamRefreshesHandler streamRefreshesHandler; public ConnectionApiController(final ConnectionsHandler connectionsHandler, final OperationsHandler operationsHandler, final SchedulerHandler schedulerHandler, final RouterService routerService, final StreamStatusesHandler streamStatusesHandler, - final MatchSearchHandler matchSearchHandler) { + final MatchSearchHandler matchSearchHandler, + final StreamRefreshesHandler streamRefreshesHandler) { this.connectionsHandler = connectionsHandler; this.operationsHandler = operationsHandler; this.schedulerHandler = schedulerHandler; this.routerService = routerService; this.streamStatusesHandler = streamStatusesHandler; this.matchSearchHandler = matchSearchHandler; + this.streamRefreshesHandler = streamRefreshesHandler; } @Override @@ -124,6 +131,16 @@ public ConnectionReadList listConnectionsForWorkspacesPaginated( return ApiHelper.execute(() -> connectionsHandler.listConnectionsForWorkspaces(listConnectionsForWorkspacesRequestBody)); } + @Post(uri = "/refresh") + @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) + @ExecuteOn(AirbyteTaskExecutors.SCHEDULER) + @Override + public BooleanRead refreshConnectionStream(@Body final ConnectionStreamRefreshRequestBody connectionStreamRefreshRequestBody) { + return ApiHelper.execute(() -> new BooleanRead().value(streamRefreshesHandler.createRefreshesForConnection( + connectionStreamRefreshRequestBody.getConnectionId(), + connectionStreamRefreshRequestBody.getStreams() != null ? connectionStreamRefreshRequestBody.getStreams() : new ArrayList<>()))); + } + @Override @Post(uri = "/list_all") @Secured({WORKSPACE_READER, ORGANIZATION_READER}) @@ -234,6 +251,22 @@ public JobInfoRead resetConnectionStream(@Body final ConnectionStreamRequestBody return ApiHelper.execute(() -> schedulerHandler.resetConnectionStream(connectionStreamRequestBody)); } + @Override + @Post(uri = "/clear") + @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) + @ExecuteOn(AirbyteTaskExecutors.SCHEDULER) + public JobInfoRead clearConnection(@Body ConnectionIdRequestBody connectionIdRequestBody) { + return ApiHelper.execute(() -> schedulerHandler.resetConnection(connectionIdRequestBody)); + } + + @Override + @Post(uri = "/clear/stream") + @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) + @ExecuteOn(AirbyteTaskExecutors.SCHEDULER) + public JobInfoRead clearConnectionStream(@Body ConnectionStreamRequestBody connectionStreamRequestBody) { + return ApiHelper.execute(() -> schedulerHandler.resetConnectionStream(connectionStreamRequestBody)); + } + @Override @Post(uri = "/apply_schema_change") @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/PermissionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/PermissionApiController.java index 15079cfc05a..2419284f5ce 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/PermissionApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/PermissionApiController.java @@ -81,10 +81,11 @@ public PermissionRead getPermission(@Body final PermissionIdRequestBody permissi @Secured({ORGANIZATION_ADMIN, WORKSPACE_ADMIN}) @Post("/update") @Override - public PermissionRead updatePermission(@Body final PermissionUpdate permissionUpdate) { - return ApiHelper.execute(() -> { + public void updatePermission(@Body final PermissionUpdate permissionUpdate) { + ApiHelper.execute(() -> { validatePermissionUpdate(permissionUpdate); - return permissionHandler.updatePermission(permissionUpdate); + permissionHandler.updatePermission(permissionUpdate); + return null; }); } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/UserInvitationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/UserInvitationApiController.java index 5ccd003beaa..83faa81a361 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/UserInvitationApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/UserInvitationApiController.java @@ -5,7 +5,9 @@ package io.airbyte.server.apis; import static io.airbyte.commons.auth.AuthRoleConstants.ORGANIZATION_ADMIN; +import static io.airbyte.commons.auth.AuthRoleConstants.ORGANIZATION_READER; import static io.airbyte.commons.auth.AuthRoleConstants.WORKSPACE_ADMIN; +import static io.airbyte.commons.auth.AuthRoleConstants.WORKSPACE_READER; import io.airbyte.api.generated.UserInvitationApi; import io.airbyte.api.model.generated.InviteCodeRequestBody; @@ -63,7 +65,7 @@ public UserInvitationRead getUserInvitation(@PathParam("inviteCode") final Strin @Post @Path("/list_pending") @Override - @Secured({WORKSPACE_ADMIN, ORGANIZATION_ADMIN}) + @Secured({WORKSPACE_READER, ORGANIZATION_READER}) public List listPendingInvitations(@Body final UserInvitationListRequestBody invitationListRequestBody) { return userInvitationHandler.getPendingInvitations(invitationListRequestBody); } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java index 5574b6b23dc..62d6fe3c83f 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java @@ -208,8 +208,11 @@ public WorkspaceRead getWorkspaceByConnectionId(@Body final ConnectionIdRequestB return ApiHelper.execute(() -> workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody, false)); } + @Post("/get_by_connection_id_with_tombstone") + @Secured({WORKSPACE_READER, ORGANIZATION_READER}) + @ExecuteOn(AirbyteTaskExecutors.IO) @Override - public WorkspaceRead getWorkspaceByConnectionIdWithTombstone(ConnectionIdRequestBody connectionIdRequestBody) { + public WorkspaceRead getWorkspaceByConnectionIdWithTombstone(@Body final ConnectionIdRequestBody connectionIdRequestBody) { return ApiHelper.execute(() -> workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody, true)); } diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java index 899ee4c6452..4302df5e9e3 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java @@ -7,6 +7,10 @@ import io.airbyte.analytics.TrackingClient; import io.airbyte.commons.features.EnvVariableFeatureFlags; import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.server.handlers.helpers.BuilderProjectUpdater; +import io.airbyte.commons.server.handlers.helpers.CompositeBuilderProjectUpdater; +import io.airbyte.commons.server.handlers.helpers.ConfigRepositoryBuilderProjectUpdater; +import io.airbyte.commons.server.handlers.helpers.LocalFileSystemBuilderProjectUpdater; import io.airbyte.commons.server.scheduler.EventRunner; import io.airbyte.commons.server.scheduler.TemporalEventRunner; import io.airbyte.commons.temporal.TemporalClient; @@ -16,6 +20,10 @@ import io.airbyte.config.persistence.ActorDefinitionVersionHelper; import io.airbyte.config.persistence.ConfigInjector; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.RefreshJobStateUpdater; +import io.airbyte.config.persistence.StatePersistence; +import io.airbyte.config.persistence.StreamRefreshesRepository; +import io.airbyte.config.persistence.helper.GenerationBumper; import io.airbyte.config.secrets.JsonSecretsProcessor; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.metrics.lib.MetricClient; @@ -38,6 +46,7 @@ import jakarta.inject.Singleton; import java.net.http.HttpClient; import java.nio.file.Path; +import java.util.List; import java.util.UUID; import java.util.function.Supplier; @@ -87,8 +96,13 @@ public JobNotifier jobNotifier( @Singleton public DefaultJobCreator defaultJobCreator(final JobPersistence jobPersistence, final WorkerConfigsProvider workerConfigsProvider, - final FeatureFlagClient featureFlagClient) { - return new DefaultJobCreator(jobPersistence, workerConfigsProvider, featureFlagClient); + final FeatureFlagClient featureFlagClient, + final GenerationBumper generationBumper, + final StatePersistence statePersistence, + final RefreshJobStateUpdater refreshJobStateUpdater, + final StreamRefreshesRepository streamRefreshesRepository) { + return new DefaultJobCreator(jobPersistence, workerConfigsProvider, featureFlagClient, generationBumper, statePersistence, refreshJobStateUpdater, + streamRefreshesRepository); } @SuppressWarnings("ParameterName") @@ -163,4 +177,15 @@ public HttpClient httpClient() { return HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); } + @Singleton + public BuilderProjectUpdater builderProjectUpdater(ConfigRepository configRepository) { + final var pathToConnectors = io.airbyte.commons.envvar.EnvVar.PATH_TO_CONNECTORS.fetch(); + ConfigRepositoryBuilderProjectUpdater configRepositoryProjectUpdater = new ConfigRepositoryBuilderProjectUpdater(configRepository); + if (pathToConnectors == null || pathToConnectors.isEmpty()) { + return configRepositoryProjectUpdater; + } else { + return new CompositeBuilderProjectUpdater(List.of(configRepositoryProjectUpdater, new LocalFileSystemBuilderProjectUpdater())); + } + } + } diff --git a/airbyte-server/src/main/java/io/airbyte/server/pro/AirbyteAuthInternalTokenValidator.java b/airbyte-server/src/main/java/io/airbyte/server/pro/AirbyteAuthInternalTokenValidator.java index 3bce5c37adc..ee4b7016275 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/pro/AirbyteAuthInternalTokenValidator.java +++ b/airbyte-server/src/main/java/io/airbyte/server/pro/AirbyteAuthInternalTokenValidator.java @@ -4,6 +4,8 @@ package io.airbyte.server.pro; +import static io.airbyte.config.persistence.UserPersistence.DEFAULT_USER_ID; + import io.airbyte.commons.auth.AirbyteAuthConstants; import io.airbyte.commons.license.annotation.RequiresAirbyteProEnabled; import io.airbyte.commons.server.support.RbacRoleHelper; @@ -31,7 +33,7 @@ public class AirbyteAuthInternalTokenValidator implements TokenValidator validateToken(final String token, final HttpRequest request) { if (validateAirbyteAuthInternalToken(token)) { return Flux.create(emitter -> { - emitter.next(getAuthentication(token)); + emitter.next(getAuthentication()); emitter.complete(); }); } else { @@ -44,10 +46,10 @@ private Boolean validateAirbyteAuthInternalToken(final String token) { return AirbyteAuthConstants.VALID_INTERNAL_SERVICE_NAMES.contains(token); } - private Authentication getAuthentication(final String token) { + private Authentication getAuthentication() { // set the Authentication username to the token value, which must be a valid internal service name. // for now, all internal services get instance admin roles. - return Authentication.build(token, RbacRoleHelper.getInstanceAdminRoles()); + return Authentication.build(DEFAULT_USER_ID.toString(), RbacRoleHelper.getInstanceAdminRoles()); } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt index 00d80059a8b..ab59ae28477 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt @@ -95,7 +95,7 @@ open class ConnectionsController( val validStreams: Map = AirbyteCatalogHelper.getValidStreams( - Objects.requireNonNull(airbyteCatalogFromDiscoverSchema), + Objects.requireNonNull(airbyteCatalogFromDiscoverSchema), ) // check user configs @@ -117,7 +117,14 @@ open class ConnectionsController( for (streamConfiguration in connectionCreateRequest.configurations.streams) { val validStreamAndConfig = validStreams[streamConfiguration.name] val schemaStream = validStreamAndConfig!!.stream - val schemaConfig = validStreamAndConfig.config + val updatedValidStreamAndConfig = AirbyteStreamAndConfiguration() + updatedValidStreamAndConfig.stream = schemaStream + updatedValidStreamAndConfig.config = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + validStreamAndConfig.config, + schemaStream, + streamConfiguration, + ) val validDestinationSyncModes = trackingHelper.callWithTracker( @@ -130,23 +137,21 @@ open class ConnectionsController( // set user configs trackingHelper.callWithTracker( { - AirbyteCatalogHelper.setAndValidateStreamConfig( - streamConfiguration, - validDestinationSyncModes, - schemaStream!!, - schemaConfig!!, + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = validDestinationSyncModes, + airbyteStream = schemaStream, ) }, CONNECTIONS_PATH, POST, userId, ) - configuredCatalog!!.addStreamsItem(validStreamAndConfig) + configuredCatalog!!.addStreamsItem(updatedValidStreamAndConfig) } } else { // no user supplied stream configs, return all streams with full refresh overwrite - configuredCatalog = airbyteCatalogFromDiscoverSchema - AirbyteCatalogHelper.setAllStreamsFullRefreshOverwrite(configuredCatalog!!) + configuredCatalog = AirbyteCatalogHelper.updateAllStreamsFullRefreshOverwrite(airbyteCatalogFromDiscoverSchema!!) } val finalConfiguredCatalog = configuredCatalog @@ -332,7 +337,7 @@ open class ConnectionsController( val validStreams: Map = AirbyteCatalogHelper.getValidStreams( - Objects.requireNonNull(airbyteCatalogFromDiscoverSchema), + Objects.requireNonNull(airbyteCatalogFromDiscoverSchema), ) // check user configs @@ -354,7 +359,14 @@ open class ConnectionsController( for (streamConfiguration in connectionPatchRequest.configurations.streams) { val validStreamAndConfig = validStreams[streamConfiguration.name] val schemaStream = validStreamAndConfig!!.stream - val schemaConfig = validStreamAndConfig.config + val updatedValidStreamAndConfig = AirbyteStreamAndConfiguration() + updatedValidStreamAndConfig.stream = schemaStream + updatedValidStreamAndConfig.config = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + validStreamAndConfig.config, + schemaStream, + streamConfiguration, + ) val validDestinationSyncModes = trackingHelper.callWithTracker( @@ -367,18 +379,17 @@ open class ConnectionsController( // set user configs trackingHelper.callWithTracker( { - AirbyteCatalogHelper.setAndValidateStreamConfig( - streamConfiguration, - validDestinationSyncModes, - schemaStream!!, - schemaConfig!!, + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = validDestinationSyncModes, + airbyteStream = schemaStream, ) }, CONNECTIONS_PATH, POST, userId, ) - configuredCatalog!!.addStreamsItem(validStreamAndConfig) + configuredCatalog!!.addStreamsItem(updatedValidStreamAndConfig) } } else { // no user supplied stream configs, return all existing streams diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/WorkspacesController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/WorkspacesController.kt index 0be4a84b53d..e47563e2f97 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/WorkspacesController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/WorkspacesController.kt @@ -5,10 +5,12 @@ package io.airbyte.server.apis.publicapi.controllers import io.airbyte.api.model.generated.PermissionType +import io.airbyte.commons.auth.OrganizationAuthRole import io.airbyte.commons.server.authorization.ApiAuthorizationHelper import io.airbyte.commons.server.authorization.Scope import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors import io.airbyte.commons.server.support.CurrentUserService +import io.airbyte.config.persistence.OrganizationPersistence.DEFAULT_ORGANIZATION_ID import io.airbyte.public_api.generated.PublicWorkspacesApi import io.airbyte.public_api.model.generated.WorkspaceCreateRequest import io.airbyte.public_api.model.generated.WorkspaceOAuthCredentialsRequest @@ -30,7 +32,7 @@ val logger = KotlinLogging.logger {} @Controller(WORKSPACES_PATH) @Secured(SecurityRule.IS_AUTHENTICATED) open class WorkspacesController( - private val workspaceService: WorkspaceService, + protected val workspaceService: WorkspaceService, private val apiAuthorizationHelper: ApiAuthorizationHelper, private val currentUserService: CurrentUserService, ) : PublicWorkspacesApi { @@ -55,7 +57,12 @@ open class WorkspacesController( @ExecuteOn(AirbyteTaskExecutors.IO) override fun publicCreateWorkspace(workspaceCreateRequest: WorkspaceCreateRequest?): Response { - // As long as user is authenticated, they can proceed. + // Now that we have orgs everywhere, ensure the user is at least an organization editor + apiAuthorizationHelper.ensureUserHasAnyRequiredRoleOrThrow( + Scope.ORGANIZATION, + listOf(DEFAULT_ORGANIZATION_ID.toString()), + setOf(OrganizationAuthRole.ORGANIZATION_EDITOR), + ) return workspaceService.controllerCreateWorkspace(workspaceCreateRequest!!) } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt index aa3ea309a55..84183d803f7 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt @@ -57,9 +57,19 @@ object AirbyteCatalogHelper { * * @param config config to be set */ - fun setConfigDefaultFullRefreshOverwrite(config: AirbyteStreamConfiguration?) { - config!!.syncMode = SyncMode.FULL_REFRESH - config.destinationSyncMode = DestinationSyncMode.OVERWRITE + fun updateConfigDefaultFullRefreshOverwrite(config: AirbyteStreamConfiguration?): AirbyteStreamConfiguration { + val updatedStreamConfiguration = AirbyteStreamConfiguration() + config?.let { + updatedStreamConfiguration.aliasName = config.aliasName + updatedStreamConfiguration.cursorField = config.cursorField + updatedStreamConfiguration.fieldSelectionEnabled = config.fieldSelectionEnabled + updatedStreamConfiguration.selected = config.selected + updatedStreamConfiguration.selectedFields = config.selectedFields + updatedStreamConfiguration.suggested = config.suggested + } + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + return updatedStreamConfiguration } /** @@ -67,11 +77,17 @@ object AirbyteCatalogHelper { * * @param airbyteCatalog The catalog to be modified */ - fun setAllStreamsFullRefreshOverwrite(airbyteCatalog: AirbyteCatalog) { - for (schemaStreams in airbyteCatalog.streams) { - val config = schemaStreams.config!! - setConfigDefaultFullRefreshOverwrite(config) - } + fun updateAllStreamsFullRefreshOverwrite(airbyteCatalog: AirbyteCatalog): AirbyteCatalog { + val updatedAirbyteCatalog = AirbyteCatalog() + updatedAirbyteCatalog.streams = + airbyteCatalog.streams.stream().map { stream: AirbyteStreamAndConfiguration -> + val updatedAirbyteStreamAndConfiguration = AirbyteStreamAndConfiguration() + updatedAirbyteStreamAndConfiguration.config = updateConfigDefaultFullRefreshOverwrite(stream.config) + updatedAirbyteStreamAndConfiguration.stream = stream.stream + updatedAirbyteStreamAndConfiguration + }.toList() + + return updatedAirbyteCatalog } /** @@ -142,16 +158,11 @@ object AirbyteCatalogHelper { } catch (e: NumberFormatException) { log.debug("Invalid cron expression: " + connectionSchedule.cronExpression) log.debug("NumberFormatException: $e") - throw ConnectionConfigurationProblem.invalidCronExpressionUnderOneHour( - connectionSchedule.cronExpression, - ) + throw ConnectionConfigurationProblem.invalidCronExpressionUnderOneHour(connectionSchedule.cronExpression) } catch (e: IllegalArgumentException) { log.debug("Invalid cron expression: " + connectionSchedule.cronExpression) log.debug("IllegalArgumentException: $e") - throw ConnectionConfigurationProblem.invalidCronExpression( - connectionSchedule.cronExpression, - e.message, - ) + throw ConnectionConfigurationProblem.invalidCronExpression(connectionSchedule.cronExpression, e.message) } } } @@ -160,6 +171,80 @@ object AirbyteCatalogHelper { // check that the first seconds and hour values are not * } + fun updateAirbyteStreamConfiguration( + config: AirbyteStreamConfiguration, + airbyteStream: AirbyteStream, + streamConfiguration: StreamConfiguration, + ): AirbyteStreamConfiguration { + val updatedStreamConfiguration = AirbyteStreamConfiguration() + // Set stream config as selected + updatedStreamConfiguration.selected = true + updatedStreamConfiguration.aliasName = config.aliasName + updatedStreamConfiguration.fieldSelectionEnabled = config.fieldSelectionEnabled + updatedStreamConfiguration.suggested = config.suggested + + if (streamConfiguration.syncMode == null) { + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE + updatedStreamConfiguration.cursorField = config.cursorField + updatedStreamConfiguration.primaryKey = config.primaryKey + } else { + when (streamConfiguration.syncMode) { + ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> { + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.APPEND + updatedStreamConfiguration.cursorField = config.cursorField + updatedStreamConfiguration.primaryKey = config.primaryKey + } + + ConnectionSyncModeEnum.INCREMENTAL_APPEND -> { + updatedStreamConfiguration.syncMode(SyncMode.INCREMENTAL) + updatedStreamConfiguration.destinationSyncMode(DestinationSyncMode.APPEND) + updatedStreamConfiguration.cursorField(selectCursorField(airbyteStream, streamConfiguration)) + updatedStreamConfiguration.primaryKey(selectPrimaryKey(airbyteStream, streamConfiguration)) + } + + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> { + updatedStreamConfiguration.syncMode = SyncMode.INCREMENTAL + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.APPEND_DEDUP + updatedStreamConfiguration.cursorField = selectCursorField(airbyteStream, streamConfiguration) + updatedStreamConfiguration.primaryKey = selectPrimaryKey(airbyteStream, streamConfiguration) + } + + else -> { + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE + updatedStreamConfiguration.cursorField = config.cursorField + updatedStreamConfiguration.primaryKey = config.primaryKey + } + } + } + + return updatedStreamConfiguration + } + + private fun selectCursorField( + airbyteStream: AirbyteStream, + streamConfiguration: StreamConfiguration, + ): List? { + return if (airbyteStream.sourceDefinedCursor != null && airbyteStream.sourceDefinedCursor!!) { + airbyteStream.defaultCursorField + } else if (streamConfiguration.cursorField != null && streamConfiguration.cursorField.isNotEmpty()) { + streamConfiguration.cursorField + } else { + airbyteStream.defaultCursorField + } + } + + private fun selectPrimaryKey( + airbyteStream: AirbyteStream, + streamConfiguration: StreamConfiguration, + ): List>? { + return (airbyteStream.sourceDefinedPrimaryKey ?: emptyList()).ifEmpty { + streamConfiguration.primaryKey + } + } + /** * Validates a stream's configurations and sets those configurations in the * `AirbyteStreamConfiguration` object. Logic comes from @@ -168,19 +253,14 @@ object AirbyteCatalogHelper { * @param streamConfiguration The configuration input of a specific stream provided by the caller. * @param validDestinationSyncModes All the valid destination sync modes for a destination * @param airbyteStream The immutable schema defined by the source - * @param config The configuration of a stream consumed by the config-api * @return True if no exceptions. Needed so it can be used inside TrackingHelper.callWithTracker */ - fun setAndValidateStreamConfig( + fun validateStreamConfig( streamConfiguration: StreamConfiguration, - validDestinationSyncModes: List, + validDestinationSyncModes: List, airbyteStream: AirbyteStream, - config: AirbyteStreamConfiguration, ): Boolean { - // Set stream config as selected - config.selected = true if (streamConfiguration.syncMode == null) { - setConfigDefaultFullRefreshOverwrite(config) return true } @@ -193,49 +273,33 @@ object AirbyteCatalogHelper { validCombinedSyncModes, ) } - when (streamConfiguration.syncMode) { - ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> { - config.syncMode = SyncMode.FULL_REFRESH - config.destinationSyncMode = DestinationSyncMode.APPEND - } + when (streamConfiguration.syncMode) { ConnectionSyncModeEnum.INCREMENTAL_APPEND -> { - config.syncMode = SyncMode.INCREMENTAL - config.destinationSyncMode = DestinationSyncMode.APPEND - setAndValidateCursorField(streamConfiguration.cursorField, airbyteStream, config) + validateCursorField(streamConfiguration.cursorField, airbyteStream) } ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> { - config.syncMode = SyncMode.INCREMENTAL - config.destinationSyncMode = DestinationSyncMode.APPEND_DEDUP - setAndValidateCursorField(streamConfiguration.cursorField, airbyteStream, config) - setAndValidatePrimaryKey(streamConfiguration.primaryKey, airbyteStream, config) + validateCursorField(streamConfiguration.cursorField, airbyteStream) + validatePrimaryKey(streamConfiguration.primaryKey, airbyteStream) } - else -> { - // always valid - setConfigDefaultFullRefreshOverwrite(config) - } + else -> {} } return true } - private fun setAndValidateCursorField( + private fun validateCursorField( cursorField: List?, airbyteStream: AirbyteStream, - config: AirbyteStreamConfiguration, ) { if (airbyteStream.sourceDefinedCursor != null && airbyteStream.sourceDefinedCursor!!) { if (!cursorField.isNullOrEmpty()) { // if cursor given is not empty and is NOT the same as the default, throw error - if (java.util.Set.copyOf(cursorField) != java.util.Set.copyOf(airbyteStream.defaultCursorField)) { - throw ConnectionConfigurationProblem.sourceDefinedCursorFieldProblem( - airbyteStream.name, - airbyteStream.defaultCursorField!!, - ) + if (java.util.Set.copyOf(cursorField) != java.util.Set.copyOf(airbyteStream.defaultCursorField)) { + throw ConnectionConfigurationProblem.sourceDefinedCursorFieldProblem(airbyteStream.name, airbyteStream.defaultCursorField!!) } } - config.cursorField = airbyteStream.defaultCursorField // this probably isn't necessary and should be already set } else { if (!cursorField.isNullOrEmpty()) { // validate cursor field @@ -243,44 +307,46 @@ object AirbyteCatalogHelper { if (!validCursorFields.contains(cursorField)) { throw ConnectionConfigurationProblem.invalidCursorField(airbyteStream.name, validCursorFields) } - config.cursorField = cursorField } else { // no default or given cursor field if (airbyteStream.defaultCursorField == null || airbyteStream.defaultCursorField!!.isEmpty()) { throw ConnectionConfigurationProblem.missingCursorField(airbyteStream.name) } - config.cursorField = airbyteStream.defaultCursorField // this probably isn't necessary and should be already set } } } - private fun setAndValidatePrimaryKey( + private fun validatePrimaryKey( primaryKey: List>?, airbyteStream: AirbyteStream, - config: AirbyteStreamConfiguration, ) { - // if no source defined primary key - if (airbyteStream.sourceDefinedPrimaryKey == null || airbyteStream.sourceDefinedPrimaryKey!!.isEmpty()) { - if (!primaryKey.isNullOrEmpty()) { - // validate primary key - val validPrimaryKey: List> = getStreamFields(airbyteStream.jsonSchema!!) - - // todo maybe check that they don't provide the same primary key twice? - for (singlePrimaryKey in primaryKey) { - if (!validPrimaryKey.contains(singlePrimaryKey)) { // todo double check if the .contains() for list of strings works as intended - throw ConnectionConfigurationProblem.invalidPrimaryKey(airbyteStream.name, validPrimaryKey) - } - } - config.primaryKey = primaryKey - } else { - throw ConnectionConfigurationProblem.missingPrimaryKey(airbyteStream.name) + // Validate that if a source defined primary key exists, that's the one we use. + // Currently, UI only supports this and there's likely assumptions baked into the platform that mean this needs to be true. + val sourceDefinedPrimaryKeyExists = !airbyteStream.sourceDefinedPrimaryKey.isNullOrEmpty() + val configuredPrimaryKeyExists = !primaryKey.isNullOrEmpty() + + if (sourceDefinedPrimaryKeyExists && configuredPrimaryKeyExists) { + if (airbyteStream.sourceDefinedPrimaryKey != primaryKey) { + throw ConnectionConfigurationProblem.primaryKeyAlreadyDefined(airbyteStream.name, airbyteStream.sourceDefinedPrimaryKey) } - } else { - // source defined primary key exists - if (!primaryKey.isNullOrEmpty()) { - throw ConnectionConfigurationProblem.primaryKeyAlreadyDefined(airbyteStream.name) - } else { - config.primaryKey = airbyteStream.sourceDefinedPrimaryKey // this probably isn't necessary and should be already set + } + + // Ensure that we've passed at least some kind of primary key + val noPrimaryKey = !configuredPrimaryKeyExists && !sourceDefinedPrimaryKeyExists + if (noPrimaryKey) { + throw ConnectionConfigurationProblem.missingPrimaryKey(airbyteStream.name) + } + + // Validate the actual key passed in + val validPrimaryKey: List> = getStreamFields(airbyteStream.jsonSchema!!) + + for (singlePrimaryKey in primaryKey!!) { + if (!validPrimaryKey.contains(singlePrimaryKey)) { // todo double check if the .contains() for list of strings works as intended + throw ConnectionConfigurationProblem.invalidPrimaryKey(airbyteStream.name, validPrimaryKey) + } + + if (singlePrimaryKey.distinct() != singlePrimaryKey) { + throw ConnectionConfigurationProblem.duplicatePrimaryKey(airbyteStream.name, primaryKey) } } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapper.kt index 85d1400d6fa..5d7c6939a51 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapper.kt @@ -44,8 +44,8 @@ object PaginationMapper { limit: Int, offset: Int, ): Optional { - // If we have no more entries or we had no entries this page, just return empty - no next URL - return if (CollectionUtils.isEmpty(collection) || collection.size < limit) { + // If we have no more entries, or we had no entries this page, just return empty - no next URL + return if (CollectionUtils.isEmpty(collection)) { Optional.empty() } else { Optional.of(offset + limit) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/WorkspaceService.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/WorkspaceService.kt index 154ed66e274..2de3aac667a 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/WorkspaceService.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/WorkspaceService.kt @@ -8,6 +8,7 @@ import io.airbyte.api.model.generated.ListResourcesForWorkspacesRequestBody import io.airbyte.api.model.generated.Pagination import io.airbyte.api.model.generated.WorkspaceCreate import io.airbyte.api.model.generated.WorkspaceIdRequestBody +import io.airbyte.api.model.generated.WorkspaceUpdateName import io.airbyte.commons.server.handlers.WorkspacesHandler import io.airbyte.commons.server.support.CurrentUserService import io.airbyte.config.persistence.OrganizationPersistence.DEFAULT_ORGANIZATION_ID @@ -20,6 +21,7 @@ import io.airbyte.server.apis.publicapi.apiTracking.TrackingHelper import io.airbyte.server.apis.publicapi.constants.DELETE import io.airbyte.server.apis.publicapi.constants.GET import io.airbyte.server.apis.publicapi.constants.HTTP_RESPONSE_BODY_DEBUG_MESSAGE +import io.airbyte.server.apis.publicapi.constants.PATCH import io.airbyte.server.apis.publicapi.constants.POST import io.airbyte.server.apis.publicapi.constants.WORKSPACES_PATH import io.airbyte.server.apis.publicapi.constants.WORKSPACES_WITH_ID_PATH @@ -134,21 +136,49 @@ open class WorkspaceServiceImpl( } /** - * No-op in OSS. + * Updates a workspace name in OSS. */ override fun updateWorkspace( workspaceId: UUID, workspaceUpdateRequest: WorkspaceUpdateRequest, ): WorkspaceResponse { - // Update workspace in the cloud version of the airbyte API currently only supports name updates, but we don't have name updates in OSS. - return WorkspaceResponse() + val workspaceUpdate = + WorkspaceUpdateName().apply { + this.name = workspaceUpdateRequest.name + this.workspaceId = workspaceId + } + val result = + kotlin.runCatching { workspacesHandler.updateWorkspaceName(workspaceUpdate) } + .onFailure { + log.error("Error for updateWorkspace", it) + ConfigClientErrorHandler.handleError(it, workspaceId.toString()) + } + log.debug(HTTP_RESPONSE_BODY_DEBUG_MESSAGE + result) + return WorkspaceResponseMapper.from(result.getOrNull()!!) } override fun controllerUpdateWorkspace( workspaceId: UUID, workspaceUpdateRequest: WorkspaceUpdateRequest, ): Response { - return Response.status(Response.Status.NOT_IMPLEMENTED).build() + val userId: UUID = currentUserService.currentUser.userId + + val workspaceResponse: Any = + trackingHelper.callWithTracker( + { updateWorkspace(workspaceId, workspaceUpdateRequest) }, + WORKSPACES_WITH_ID_PATH, + PATCH, + userId, + ) + trackingHelper.trackSuccess( + WORKSPACES_WITH_ID_PATH, + PATCH, + userId, + ) + return Response + .status(Response.Status.OK.statusCode) + .entity(workspaceResponse) + .build() } /** diff --git a/airbyte-server/src/main/resources/application.yml b/airbyte-server/src/main/resources/application.yml index 2a2cf7d698a..3a16b2cc138 100644 --- a/airbyte-server/src/main/resources/application.yml +++ b/airbyte-server/src/main/resources/application.yml @@ -55,6 +55,7 @@ micronaut: max-content-length: 52428800 # 50MB access-logger: enabled: ${HTTP_ACCESS_LOG_ENABLED:true} + max-header-size: ${NETTY_MAX_HEADER_SIZE:32768} idle-timeout: ${HTTP_IDLE_TIMEOUT:5m} http: client: diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/PermissionApiControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/PermissionApiControllerTest.java index 247a783f8e9..eb4ff27ceb9 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/PermissionApiControllerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/apis/PermissionApiControllerTest.java @@ -46,12 +46,10 @@ void testGetPermission() throws ConfigNotFoundException, IOException { } @Test - void testUpdatePermission() throws ConfigNotFoundException, IOException, JsonValidationException { + void testUpdatePermission() throws ConfigNotFoundException, IOException { final UUID userId = UUID.randomUUID(); Mockito.when(permissionHandler.getPermission(Mockito.any())) .thenReturn(new PermissionRead().userId(userId)); - Mockito.when(permissionHandler.updatePermission(Mockito.any())) - .thenReturn(new PermissionRead().userId(userId)); final String path = "/api/v1/permissions/update"; testEndpointStatus( HttpRequest.POST(path, new PermissionUpdate().permissionId(UUID.randomUUID())), @@ -59,7 +57,7 @@ void testUpdatePermission() throws ConfigNotFoundException, IOException, JsonVal } @Test - void testDeletePermission() throws IOException { + void testDeletePermission() { Mockito.doNothing().when(permissionHandler).deletePermission(Mockito.any()); final String path = "/api/v1/permissions/delete"; testEndpointStatus( diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelperTest.kt new file mode 100644 index 00000000000..1b131766076 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelperTest.kt @@ -0,0 +1,601 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.publicapi.helpers + +import io.airbyte.api.model.generated.AirbyteCatalog +import io.airbyte.api.model.generated.AirbyteStream +import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration +import io.airbyte.api.model.generated.AirbyteStreamConfiguration +import io.airbyte.api.model.generated.DestinationSyncMode +import io.airbyte.api.model.generated.SelectedFieldInfo +import io.airbyte.api.model.generated.SyncMode +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.server.errors.problems.ConnectionConfigurationProblem +import io.airbyte.public_api.model.generated.AirbyteApiConnectionSchedule +import io.airbyte.public_api.model.generated.ConnectionSyncModeEnum +import io.airbyte.public_api.model.generated.ScheduleTypeEnum +import io.airbyte.public_api.model.generated.StreamConfiguration +import io.airbyte.public_api.model.generated.StreamConfigurations +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertThrows +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.EnumSource + +internal class AirbyteCatalogHelperTest { + @Test + internal fun `test that a stream configuration is not empty`() { + val streamConfigurations: StreamConfigurations = mockk() + + every { streamConfigurations.streams } returns listOf(mockk()) + + assertTrue(AirbyteCatalogHelper.hasStreamConfigurations(streamConfigurations)) + } + + @Test + internal fun `test that a stream configuration is empty`() { + val streamConfigurations: StreamConfigurations = mockk() + + every { streamConfigurations.streams } returns listOf() + + assertFalse(AirbyteCatalogHelper.hasStreamConfigurations(streamConfigurations)) + + every { streamConfigurations.streams } returns null + + assertFalse(AirbyteCatalogHelper.hasStreamConfigurations(streamConfigurations)) + + assertFalse(AirbyteCatalogHelper.hasStreamConfigurations(null)) + } + + @Test + internal fun `test that a copy of the AirbyteStreamConfiguration is returned when it is updated to full refresh overwrite mode`() { + val originalStreamConfiguration = createAirbyteStreamConfiguration() + + val updatedStreamConfiguration = AirbyteCatalogHelper.updateConfigDefaultFullRefreshOverwrite(config = originalStreamConfiguration) + assertFalse(originalStreamConfiguration === updatedStreamConfiguration) + assertEquals(SyncMode.FULL_REFRESH, updatedStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, updatedStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a copy of the AirbyteCatalog is returned when all of its streams are updated to full refresh overwrite mode`() { + val originalAirbyteCatalog = createAirbyteCatalog() + val updatedAirbyteCatalog = AirbyteCatalogHelper.updateAllStreamsFullRefreshOverwrite(airbyteCatalog = originalAirbyteCatalog) + assertFalse(originalAirbyteCatalog === updatedAirbyteCatalog) + updatedAirbyteCatalog.streams.stream().forEach { stream -> + assertEquals(SyncMode.FULL_REFRESH, stream.config?.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, stream.config?.destinationSyncMode) + } + } + + @Test + internal fun `test that streams can be validated`() { + val referenceCatalog = createAirbyteCatalog() + val streamConfiguration = StreamConfiguration() + streamConfiguration.name = "name1" + val streamConfigurations = StreamConfigurations() + streamConfigurations.streams = listOf(streamConfiguration) + + assertTrue(AirbyteCatalogHelper.validateStreams(referenceCatalog = referenceCatalog, streamConfigurations = streamConfigurations)) + } + + @Test + internal fun `test that a stream with an invalid name is considered to be invalid`() { + val referenceCatalog = createAirbyteCatalog() + val streamConfiguration = StreamConfiguration() + streamConfiguration.name = "unknown" + val streamConfigurations = StreamConfigurations() + streamConfigurations.streams = listOf(streamConfiguration) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreams(referenceCatalog = referenceCatalog, streamConfigurations = streamConfigurations) + } + assertEquals(true, throwable.message?.contains("Invalid stream found")) + } + + @Test + internal fun `test that streams with duplicate streams is considered to be invalid`() { + val referenceCatalog = createAirbyteCatalog() + val streamConfiguration1 = StreamConfiguration() + streamConfiguration1.name = "name1" + val streamConfiguration2 = StreamConfiguration() + streamConfiguration2.name = "name1" + val streamConfigurations = StreamConfigurations() + streamConfigurations.streams = listOf(streamConfiguration1, streamConfiguration2) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreams(referenceCatalog = referenceCatalog, streamConfigurations = streamConfigurations) + } + assertEquals(true, throwable.message?.contains("Duplicate stream found in configuration")) + } + + @Test + internal fun `test that valid streams can be retrieved from the AirbyteCatalog`() { + val airbyteCatalog = createAirbyteCatalog() + val validStreamNames = AirbyteCatalogHelper.getValidStreams(airbyteCatalog = airbyteCatalog) + assertEquals(airbyteCatalog.streams.map { it.stream?.name }.toSet(), validStreamNames.keys) + } + + @Test + internal fun `test that the cron configuration can be validated`() { + val connectionSchedule = AirbyteApiConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = "0 15 10 * * ? * UTC" + assertTrue(AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule)) + assertFalse(connectionSchedule.cronExpression.contains("UTC")) + + connectionSchedule.scheduleType = ScheduleTypeEnum.MANUAL + assertTrue(AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule)) + } + + @Test + internal fun `test that the cron configuration with a missing cron expression is invalid`() { + val connectionSchedule = AirbyteApiConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = null + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule) + } + assertEquals(true, throwable.message?.contains("Missing cron expression in the schedule.")) + } + + @Test + internal fun `test that the cron configuration with an invalid cron expression length is invalid`() { + val connectionSchedule = AirbyteApiConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = "0 15 10 * * ? * * * *" + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule) + } + assertEquals(true, throwable.message?.contains("Cron expression contains 10 parts but we expect one of [6, 7]")) + } + + @Test + internal fun `test that the cron configuration with an invalid cron expression is invalid`() { + val connectionSchedule = AirbyteApiConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = "not a valid cron expression string" + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule) + } + assertEquals(true, throwable.message?.contains("Failed to parse cron expression. Invalid chars in expression!")) + } + + @ParameterizedTest + @EnumSource(ConnectionSyncModeEnum::class) + internal fun `test that when a stream configuration is updated, the corret sync modes are set based on the stream configuration`( + connectionSyncMode: ConnectionSyncModeEnum, + ) { + val cursorField = "cursor" + val primayKeyColumn = "primary" + val airbyteStream = AirbyteStream() + val airbyteStreamConfiguration = createAirbyteStreamConfiguration() + val streamConfiguration = StreamConfiguration() + streamConfiguration.syncMode = connectionSyncMode + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primayKeyColumn)) + + val updatedAirbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = airbyteStreamConfiguration, + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + assertEquals(true, updatedAirbyteStreamConfiguration.selected) + assertEquals(getSyncMode(connectionSyncMode), updatedAirbyteStreamConfiguration.syncMode) + assertEquals(getDestinationSyncMode(connectionSyncMode), updatedAirbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that when a stream configuration does not have a configured sync mode, the updated configuration uses full refresh overwrite`() { + val cursorField = "cursor" + val primayKeyColumn = "primary" + val airbyteStream = AirbyteStream() + val airbyteStreamConfiguration = createAirbyteStreamConfiguration() + val streamConfiguration = StreamConfiguration() + streamConfiguration.syncMode = null + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primayKeyColumn)) + + val updatedAirbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = airbyteStreamConfiguration, + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertEquals(true, updatedAirbyteStreamConfiguration.selected) + assertEquals(SyncMode.FULL_REFRESH, updatedAirbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, updatedAirbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that when validating a stream without a sync mode, the sync mode is set to full refresh and the stream is considered valid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + streamConfiguration.syncMode = null + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue(AirbyteCatalogHelper.validateStreamConfig(streamConfiguration, listOf(), airbyteStream)) + assertEquals(SyncMode.FULL_REFRESH, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(true, airbyteStreamConfiguration.selected) + } + + @Test + internal fun `test that if the stream configuration contains an invalid sync mode, the stream is considered invalid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.syncMode = ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE + streamConfiguration.name = "stream-name" + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.OVERWRITE), + airbyteStream = airbyteStream, + ) + } + assertEquals(true, throwable.message?.contains("Cannot set sync mode to ${streamConfiguration.syncMode} for stream")) + } + + @Test + internal fun `test that a stream configuration with FULL_REFRESH_APPEND is always considered to be valid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.supportedSyncModes = listOf(SyncMode.FULL_REFRESH) + streamConfiguration.syncMode = ConnectionSyncModeEnum.FULL_REFRESH_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + createAirbyteStreamConfiguration(), + airbyteStream, + streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.FULL_REFRESH, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a stream configuration with FULL_REFRESH_OVERWRITE is always considered to be valid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.supportedSyncModes = listOf(SyncMode.FULL_REFRESH) + streamConfiguration.syncMode = ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.OVERWRITE), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.FULL_REFRESH, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is only valid if the source defined cursor field is also valid`() { + val cursorField = "cursor" + val airbyteStream = AirbyteStream() + val airbyteStreamConfiguration = createAirbyteStreamConfiguration() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.sourceDefinedCursor = true + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is invalid if the source defined cursor field is invalid`() { + val cursorField = "cursor" + val streamName = "stream-name" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.name = streamName + airbyteStream.sourceDefinedCursor = true + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf("other") + streamConfiguration.name = airbyteStream.name + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ) + } + assertEquals(true, throwable.message?.contains("Do not include a cursor field configuration for this stream")) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is only valid if the source cursor field is also valid`() { + val cursorField = "cursor" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}}}") + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is invalid if the source cursor field is invalid`() { + val cursorField = "cursor" + val otherCursorField = "other" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(otherCursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$otherCursorField\": {}}}") + airbyteStream.name = "name" + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ) + } + assertEquals( + true, + throwable.message?.contains( + "Invalid cursor field for stream: ${airbyteStream.name}. The list of valid cursor fields include: [[$otherCursorField]]", + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is invalid if there is no cursor field`() { + val cursorField = "cursor" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf() + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}}}") + airbyteStream.name = "name" + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf() + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ) + } + assertEquals( + true, + throwable.message?.contains( + "No default cursor field for stream: ${airbyteStream.name}. Please include a cursor field configuration for this stream.", + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that an INCREMENTAL_DEDUPED_HISTORY stream is only valid if the source defined cursor and primary key field are also valid`() { + val cursorField = "cursor" + val primaryKey = "primary" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}, \"$primaryKey\": {}}}") + airbyteStream.sourceDefinedCursor = true + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primaryKey)) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND_DEDUP), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND_DEDUP, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + assertEquals(listOf(listOf(primaryKey)), airbyteStreamConfiguration.primaryKey) + } + + @Test + internal fun `test that an INCREMENTAL_DEDUPED_HISTORY stream is only valid if the source cursor field and primary key field are also valid`() { + val cursorField = "cursor" + val primaryKey = "primary" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}, \"$primaryKey\": {}}}") + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primaryKey)) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND_DEDUP), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND_DEDUP, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + assertEquals(listOf(listOf(primaryKey)), airbyteStreamConfiguration.primaryKey) + } + + @Test + internal fun `test that the combined sync modes are valid`() { + val validSourceSyncModes = listOf(SyncMode.FULL_REFRESH) + val validDestinationSyncModes = listOf(DestinationSyncMode.OVERWRITE) + + val combinedSyncModes = + AirbyteCatalogHelper.validCombinedSyncModes( + validSourceSyncModes = validSourceSyncModes, + validDestinationSyncModes = validDestinationSyncModes, + ) + assertEquals(1, combinedSyncModes.size) + assertEquals(listOf(ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE).first(), combinedSyncModes.first()) + } + + private fun createAirbyteCatalog(): AirbyteCatalog { + val airbyteCatalog = AirbyteCatalog() + val streams = mutableListOf() + for (i in 1..5) { + val streamAndConfiguration = AirbyteStreamAndConfiguration() + val stream = AirbyteStream() + stream.name = "name$i" + stream.namespace = "namespace" + streamAndConfiguration.stream = stream + streamAndConfiguration.config = createAirbyteStreamConfiguration() + streams += streamAndConfiguration + } + airbyteCatalog.streams(streams) + return airbyteCatalog + } + + private fun createAirbyteStreamConfiguration(): AirbyteStreamConfiguration { + val airbyteStreamConfiguration = AirbyteStreamConfiguration() + airbyteStreamConfiguration.aliasName = "alias" + airbyteStreamConfiguration.cursorField = listOf("cursor") + airbyteStreamConfiguration.destinationSyncMode = DestinationSyncMode.APPEND + airbyteStreamConfiguration.fieldSelectionEnabled = true + airbyteStreamConfiguration.primaryKey = listOf(listOf("primary")) + airbyteStreamConfiguration.selected = false + airbyteStreamConfiguration.selectedFields = listOf(SelectedFieldInfo()) + airbyteStreamConfiguration.suggested = false + airbyteStreamConfiguration.syncMode = SyncMode.INCREMENTAL + return airbyteStreamConfiguration + } + + private fun getSyncMode(connectionSyncMode: ConnectionSyncModeEnum): SyncMode { + return when (connectionSyncMode) { + ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE -> SyncMode.FULL_REFRESH + ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> SyncMode.FULL_REFRESH + ConnectionSyncModeEnum.INCREMENTAL_APPEND -> SyncMode.INCREMENTAL + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> SyncMode.INCREMENTAL + } + } + + private fun getDestinationSyncMode(connectionSyncMode: ConnectionSyncModeEnum): DestinationSyncMode { + return when (connectionSyncMode) { + ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE -> DestinationSyncMode.OVERWRITE + ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> DestinationSyncMode.APPEND + ConnectionSyncModeEnum.INCREMENTAL_APPEND -> DestinationSyncMode.APPEND + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> DestinationSyncMode.APPEND_DEDUP + } + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapperTest.kt index 0da6ee20632..113eb213c57 100644 --- a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapperTest.kt +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapperTest.kt @@ -23,14 +23,21 @@ class PaginationMapperTest { @Test fun `test that it can generate next URLs`() { - var noOffsetBuilder = PaginationMapper.getBuilder(publicApiHost, removePublicApiPathPrefix(SOURCES_PATH)) - PaginationMapper.getNextUrl(listOf("a", "b", "c"), 4, 0, noOffsetBuilder) + val lessResultsBuilder = PaginationMapper.getBuilder(publicApiHost, removePublicApiPathPrefix(SOURCES_PATH)) + PaginationMapper.getNextUrl(listOf("a", "b", "c"), 4, 0, lessResultsBuilder) + assertEquals( + "$publicApiHost/v1/sources?limit=4&offset=4", + lessResultsBuilder.build().toString(), + ) + + val noResultsBuilder = PaginationMapper.getBuilder(publicApiHost, removePublicApiPathPrefix(SOURCES_PATH)) + PaginationMapper.getNextUrl(emptyList(), 4, 0, noResultsBuilder) assertEquals( "$publicApiHost/v1/sources", - noOffsetBuilder.build().toString(), + noResultsBuilder.build().toString(), ) - var offsetLimitBuilder = PaginationMapper.getBuilder(publicApiHost, removePublicApiPathPrefix(SOURCES_PATH)) + val offsetLimitBuilder = PaginationMapper.getBuilder(publicApiHost, removePublicApiPathPrefix(SOURCES_PATH)) PaginationMapper.getNextUrl(listOf("a", "b", "c"), 2, 0, offsetLimitBuilder) assertEquals( "$publicApiHost/v1/sources?limit=2&offset=2", diff --git a/airbyte-temporal/Dockerfile b/airbyte-temporal/Dockerfile index c04074c7d4b..9e1cf243964 100644 --- a/airbyte-temporal/Dockerfile +++ b/airbyte-temporal/Dockerfile @@ -1,5 +1,5 @@ # A test describe in the README is available to test a version update -FROM temporalio/auto-setup:1.22.3 +FROM temporalio/auto-setup:1.22.7 ENV TEMPORAL_HOME /etc/temporal diff --git a/airbyte-temporal/build.gradle.kts b/airbyte-temporal/build.gradle.kts index d699ef53873..2014497b8a4 100644 --- a/airbyte-temporal/build.gradle.kts +++ b/airbyte-temporal/build.gradle.kts @@ -1,20 +1,20 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } airbyte { - docker { - imageName = "temporal" - } + docker { + imageName = "temporal" + } } val copyScripts = tasks.register("copyScripts") { - from("scripts") - into("build/airbyte/docker/") + from("scripts") + into("build/airbyte/docker/") } tasks.named("dockerBuildImage") { - dependsOn(copyScripts) + dependsOn(copyScripts) } diff --git a/airbyte-test-utils/README.md b/airbyte-test-utils/README.md new file mode 100644 index 00000000000..fb22cc4b538 --- /dev/null +++ b/airbyte-test-utils/README.md @@ -0,0 +1,20 @@ +# airbyte-test-utils + +Shared Java code for executing TestContainers and other helpers. + +## Stage databases setup + +When we run acceptance tests on an environment that is not `stage`, a test container will be used for each connector that requires a database. Each test container will be used for only one test case, and it will be deleted once the test case completes. + +When we run acceptance tests on stage, things are slightly more complex, but we try to have the same behavior. Instead of using a test container for each connector that requires a database, we will use a CloudSQL database for each connector. Similarly to the test containers, each CloudSQL database will be used for only one test case, and it will be deleted once the test case completes. + +It's important to understand how are the different components communicating when running on stage. + +![Stage network setup](stage_network_setup.png) + +- It is possible to communicate with the `CloudSQL Instance` from both private IP and public ip +- One same `CloudSQL Instance` is use for all the tests, but each test case will create their own databases inside this instance. +- We run the acceptance tests from a `AWS Test Runner` (EC2 instances), which are behind Tailscale, so they can communicate with the CloudSQL instance using its private IP. We need to be able to access the CloudSQL instance from these test runners since the tests will access these databases to validate their content. +- The only IPs that are allowed to connect to the CloudSQL instance via its public IP are the ones that belong to stage Dataplanes (both `GCP Dataplane` and `AWS Dataplane`). Note that this is not a workaround for the sake of our tests, this is the same setup that real users have. + + diff --git a/airbyte-test-utils/build.gradle.kts b/airbyte-test-utils/build.gradle.kts index f35125c69b1..c85d5a828f1 100644 --- a/airbyte-test-utils/build.gradle.kts +++ b/airbyte-test-utils/build.gradle.kts @@ -1,39 +1,44 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } configurations.all { - exclude( group = "io.micronaut.jaxrs") - exclude( group = "io.micronaut.sql") + exclude(group = "io.micronaut.jaxrs") + exclude(group = "io.micronaut.sql") - resolutionStrategy { - // Force to avoid(updated version(brought in transitively from Micronaut) - force(libs.platform.testcontainers.postgresql) - } + resolutionStrategy { + // Force to avoid(updated version(brought in transitively from Micronaut) + force(libs.platform.testcontainers.postgresql) + } } dependencies { - api(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-commons-worker")) + api(project(":airbyte-db:db-lib")) + api(project(":airbyte-db:jooq")) + api(project(":airbyte-config:config-models")) + api(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-commons-worker")) - implementation(libs.bundles.kubernetes.client) - implementation(libs.bundles.flyway) - implementation(libs.temporal.sdk) + implementation(libs.bundles.kubernetes.client) + implementation(libs.bundles.flyway) + implementation(libs.temporal.sdk) + implementation(libs.google.cloud.api.client) + implementation(libs.google.cloud.sqladmin) - api(libs.junit.jupiter.api) + api(libs.junit.jupiter.api) - // Mark as compile only(to avoid leaking transitively to connectors - compileOnly(libs.platform.testcontainers.postgresql) + // Mark as compile only(to avoid leaking transitively to connectors + compileOnly(libs.platform.testcontainers.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.junit.pioneer) } diff --git a/airbyte-test-utils/readme.md b/airbyte-test-utils/readme.md deleted file mode 100644 index f75ba4e74b0..00000000000 --- a/airbyte-test-utils/readme.md +++ /dev/null @@ -1,3 +0,0 @@ -# airbyte-test-utils - -Shared Java code for executing TestContainers and other helpers. diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AcceptanceTestHarness.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AcceptanceTestHarness.java index f497fb8366f..05179fc9830 100644 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AcceptanceTestHarness.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AcceptanceTestHarness.java @@ -103,8 +103,6 @@ import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.test.container.AirbyteTestContainer; -import io.fabric8.kubernetes.client.DefaultKubernetesClient; -import io.fabric8.kubernetes.client.KubernetesClient; import io.temporal.client.WorkflowClient; import io.temporal.serviceclient.WorkflowServiceStubs; import java.io.File; @@ -114,6 +112,7 @@ import java.net.URISyntaxException; import java.net.UnknownHostException; import java.nio.file.Path; +import java.security.GeneralSecurityException; import java.sql.SQLException; import java.time.Duration; import java.time.Instant; @@ -156,6 +155,7 @@ public class AcceptanceTestHarness { private static final Logger LOGGER = LoggerFactory.getLogger(AcceptanceTestHarness.class); + private static final UUID DEFAULT_ORGANIZATION_ID = UUID.fromString("00000000-0000-0000-0000-000000000000"); private static final String DOCKER_COMPOSE_FILE_NAME = "docker-compose.yaml"; // assume env file is one directory level up from airbyte-tests. private static final File ENV_FILE = Path.of(System.getProperty("user.dir")).getParent().resolve(".env").toFile(); @@ -179,9 +179,6 @@ public class AcceptanceTestHarness { private static final String SOURCE_USERNAME = "sourceusername"; public static final String SOURCE_PASSWORD = "hunter2"; public static final String PUBLIC_SCHEMA_NAME = "public"; - public static final String STAGING_SCHEMA_NAME = "staging"; - public static final String COOL_EMPLOYEES_TABLE_NAME = "cool_employees"; - public static final String AWESOME_PEOPLE_TABLE_NAME = "awesome_people"; public static final String PUBLIC = "public"; private static final String DEFAULT_POSTGRES_INIT_SQL_FILE = "postgres_init.sql"; @@ -191,6 +188,8 @@ public class AcceptanceTestHarness { public static final int MAX_TRIES = 5; public static final int MAX_ALLOWED_SECOND_PER_RUN = 120; + private static final String CLOUD_SQL_DATABASE_PREFIX = "acceptance_test_"; + // NOTE: we include `INCOMPLETE` here because the job may still retry; see // https://docs.airbyte.com/understanding-airbyte/jobs/. public static final Set IN_PROGRESS_JOB_STATUSES = Set.of(JobStatus.PENDING, JobStatus.INCOMPLETE, JobStatus.RUNNING); @@ -203,6 +202,7 @@ public class AcceptanceTestHarness { private static boolean isMac; private static boolean useExternalDeployment; private static boolean ensureCleanSlate; + private CloudSqlDatabaseProvisioner cloudSqlDatabaseProvisioner; /** * When the acceptance tests are run against a local instance of docker-compose or KUBE then these @@ -211,6 +211,9 @@ public class AcceptanceTestHarness { */ private PostgreSQLContainer sourcePsql; private PostgreSQLContainer destinationPsql; + private String sourceDatabaseName; + private String destinationDatabaseName; + private AirbyteTestContainer airbyteTestContainer; private AirbyteApiClient apiClient; @@ -218,8 +221,6 @@ public class AcceptanceTestHarness { private final UUID defaultWorkspaceId; private final String postgresSqlInitFile; - private KubernetesClient kubernetesClient; - private final List sourceIds = Lists.newArrayList(); private final List connectionIds = Lists.newArrayList(); private final List destinationIds = Lists.newArrayList(); @@ -227,11 +228,13 @@ public class AcceptanceTestHarness { private final List sourceDefinitionIds = Lists.newArrayList(); private DataSource sourceDataSource; private DataSource destinationDataSource; - private String postgresPassword; - public KubernetesClient getKubernetesClient() { - return kubernetesClient; - } + private String gcpProjectId; + private String cloudSqlInstanceId; + private String cloudSqlInstanceUsername; + private String cloudSqlInstancePassword; + private String cloudSqlInstancePrivateIp; + private String cloudSqlInstancePublicIp; public void removeConnection(final UUID connection) { connectionIds.remove(connection); @@ -241,7 +244,7 @@ public AcceptanceTestHarness(final AirbyteApiClient apiClient, final WebBackendApi webBackendApi, final UUID defaultWorkspaceId, final String postgresSqlInitFile) - throws URISyntaxException, IOException, InterruptedException { + throws URISyntaxException, IOException, InterruptedException, GeneralSecurityException { // reads env vars to assign static variables assignEnvVars(); this.apiClient = apiClient; @@ -260,12 +263,16 @@ public AcceptanceTestHarness(final AirbyteApiClient apiClient, destinationPsql = new PostgreSQLContainer(DESTINATION_POSTGRES_IMAGE_NAME); destinationPsql.start(); - } - - if (isKube && !isGke) { - // TODO(mfsiega-airbyte): get the Kube client to work with GKE tests. We don't use it yet but we - // will want to someday. - kubernetesClient = new DefaultKubernetesClient(); + } else { + this.cloudSqlDatabaseProvisioner = new CloudSqlDatabaseProvisioner(); + sourceDatabaseName = cloudSqlDatabaseProvisioner.createDatabase( + gcpProjectId, + cloudSqlInstanceId, + generateRandomCloudSqlDatabaseName()); + destinationDatabaseName = cloudSqlDatabaseProvisioner.createDatabase( + gcpProjectId, + cloudSqlInstanceId, + generateRandomCloudSqlDatabaseName()); } // by default use airbyte deployment governed by a test container. @@ -289,7 +296,7 @@ public AcceptanceTestHarness(final AirbyteApiClient apiClient, } public AcceptanceTestHarness(final AirbyteApiClient apiClient, final WebBackendApi webBackendApi, final UUID defaultWorkspaceId) - throws URISyntaxException, IOException, InterruptedException { + throws URISyntaxException, IOException, InterruptedException, GeneralSecurityException { this(apiClient, webBackendApi, defaultWorkspaceId, DEFAULT_POSTGRES_INIT_SQL_FILE); } @@ -314,9 +321,17 @@ public void stopDbAndContainers() { public void setup() throws SQLException, URISyntaxException, IOException, ApiException { if (isGke) { // Prepare the database data sources. - LOGGER.info("postgresPassword: {}", postgresPassword); - sourceDataSource = GKEPostgresConfig.getSourceDataSource(postgresPassword); - destinationDataSource = GKEPostgresConfig.getDestinationDataSource(postgresPassword); + LOGGER.info("postgresPassword: {}", cloudSqlInstancePassword); + sourceDataSource = GKEPostgresConfig.getDataSource( + cloudSqlInstanceUsername, + cloudSqlInstancePassword, + cloudSqlInstancePrivateIp, + sourceDatabaseName); + destinationDataSource = GKEPostgresConfig.getDataSource( + cloudSqlInstanceUsername, + cloudSqlInstancePassword, + cloudSqlInstancePrivateIp, + destinationDatabaseName); // seed database. GKEPostgresConfig.runSqlScript(Path.of(MoreResources.readResourceAsFile(postgresSqlInitFile).toURI()), getSourceDatabase()); } else { @@ -324,16 +339,18 @@ public void setup() throws SQLException, URISyntaxException, IOException, ApiExc sourceDataSource = Databases.createDataSource(sourcePsql); destinationDataSource = Databases.createDataSource(destinationPsql); - } - // Pinning Postgres destination version - final DestinationDefinitionRead postgresDestDef = getPostgresDestinationDefinition(); - if (!postgresDestDef.getDockerImageTag().equals(POSTGRES_DESTINATION_CONNECTOR_VERSION)) { - LOGGER.info("Setting postgres destination connector to version {}...", POSTGRES_DESTINATION_CONNECTOR_VERSION); - try { - updateDestinationDefinitionVersion(postgresDestDef.getDestinationDefinitionId(), POSTGRES_DESTINATION_CONNECTOR_VERSION); - } catch (final ApiException e) { - LOGGER.error("Error while updating destination definition version", e); + // Pinning Postgres destination version. This doesn't work on GKE since the + // airbyte-cron will revert this change. On GKE we are pinning the version by + // adding an entry to the scoped_configuration table. + final DestinationDefinitionRead postgresDestDef = getPostgresDestinationDefinition(); + if (!postgresDestDef.getDockerImageTag().equals(POSTGRES_DESTINATION_CONNECTOR_VERSION)) { + LOGGER.info("Setting postgres destination connector to version {}...", POSTGRES_DESTINATION_CONNECTOR_VERSION); + try { + updateDestinationDefinitionVersion(postgresDestDef.getDestinationDefinitionId(), POSTGRES_DESTINATION_CONNECTOR_VERSION); + } catch (final ApiException e) { + LOGGER.error("Error while updating destination definition version", e); + } } } } @@ -362,6 +379,15 @@ public void cleanup() { if (isGke) { DataSourceFactory.close(sourceDataSource); DataSourceFactory.close(destinationDataSource); + + cloudSqlDatabaseProvisioner.deleteDatabase( + gcpProjectId, + cloudSqlInstanceId, + sourceDatabaseName); + cloudSqlDatabaseProvisioner.deleteDatabase( + gcpProjectId, + cloudSqlInstanceId, + destinationDatabaseName); } else { destinationPsql.stop(); sourcePsql.stop(); @@ -432,9 +458,12 @@ private void assignEnvVars() { && System.getenv("USE_EXTERNAL_DEPLOYMENT").equalsIgnoreCase("true"); ensureCleanSlate = System.getenv("ENSURE_CLEAN_SLATE") != null && System.getenv("ENSURE_CLEAN_SLATE").equalsIgnoreCase("true"); - postgresPassword = System.getenv("POSTGRES_PASSWORD") != null - ? System.getenv("POSTGRES_PASSWORD") - : "admin123"; + gcpProjectId = System.getenv("GCP_PROJECT_ID"); + cloudSqlInstanceId = System.getenv("CLOUD_SQL_INSTANCE_ID"); + cloudSqlInstanceUsername = System.getenv("CLOUD_SQL_INSTANCE_USERNAME"); + cloudSqlInstancePassword = System.getenv("CLOUD_SQL_INSTANCE_PASSWORD"); + cloudSqlInstancePrivateIp = System.getenv("CLOUD_SQL_INSTANCE_PRIVATE_IP"); + cloudSqlInstancePublicIp = System.getenv("CLOUD_SQL_INSTANCE_PUBLIC_IP"); } private WorkflowClient getWorkflowClient() { @@ -767,25 +796,29 @@ public List retrieveRecordsFromDatabase(final Database database, final } public JsonNode getSourceDbConfig() { - return getDbConfig(sourcePsql, false, false, Type.SOURCE); + return getDbConfig(sourcePsql, false, false, sourceDatabaseName); } public JsonNode getDestinationDbConfig() { - return getDbConfig(destinationPsql, false, true, Type.DESTINATION); + return getDbConfig(destinationPsql, false, true, destinationDatabaseName); } public JsonNode getDestinationDbConfigWithHiddenPassword() { - return getDbConfig(destinationPsql, true, true, Type.DESTINATION); + return getDbConfig(destinationPsql, true, true, destinationDatabaseName); } public JsonNode getDbConfig(final PostgreSQLContainer psql, final boolean hiddenPassword, final boolean withSchema, - final Type connectorType) { + final String databaseName) { try { final Map dbConfig = - (isKube && isGke) ? GKEPostgresConfig.dbConfig(connectorType, hiddenPassword ? null : postgresPassword, withSchema) - : localConfig(psql, hiddenPassword, withSchema); + (isKube && isGke) ? GKEPostgresConfig.dbConfig( + hiddenPassword ? null : cloudSqlInstancePassword, + withSchema, + cloudSqlInstanceUsername, + cloudSqlInstancePublicIp, + databaseName) : localConfig(psql, hiddenPassword, withSchema); final var config = Jsons.jsonNode(dbConfig); LOGGER.info("Using db config: {}", Jsons.toPrettyString(config)); return config; @@ -1213,7 +1246,7 @@ public void createWorkspaceWithId(UUID workspaceId) throws Exception { .createWorkspaceIfNotExist(new WorkspaceCreateWithId() .id(workspaceId) .email("acceptance-tests@airbyte.io") - .name("Airbyte Acceptance Tests" + UUID.randomUUID())), + .name("Airbyte Acceptance Tests" + UUID.randomUUID()).organizationId(DEFAULT_ORGANIZATION_ID)), "create workspace", 10, FINAL_INTERVAL_SECS, MAX_TRIES); } @@ -1227,14 +1260,6 @@ public StreamStatusReadList getStreamStatuses(UUID connectionId, Long jobId, Int "get stream statuses", JITTER_MAX_INTERVAL_SECS, FINAL_INTERVAL_SECS, MAX_TRIES); } - /** - * Connector type. - */ - public enum Type { - SOURCE, - DESTINATION - } - public void setIncrementalAppendSyncMode(final AirbyteCatalog airbyteCatalog, final List cursorField) { airbyteCatalog.getStreams().forEach(stream -> { stream.getConfig().syncMode(SyncMode.INCREMENTAL) @@ -1303,4 +1328,8 @@ public void compareCatalog(AirbyteCatalog actual) { assertEquals(expected, actual); } + private static String generateRandomCloudSqlDatabaseName() { + return CLOUD_SQL_DATABASE_PREFIX + UUID.randomUUID(); + } + } diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/BaseConfigDatabaseTest.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/BaseConfigDatabaseTest.java index f88a60aaf05..b915348d49e 100644 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/BaseConfigDatabaseTest.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/BaseConfigDatabaseTest.java @@ -4,6 +4,10 @@ package io.airbyte.test.utils; +import static io.airbyte.db.instance.configs.jooq.generated.Tables.PERMISSION; + +import io.airbyte.config.Permission; +import io.airbyte.config.persistence.PermissionPersistenceHelper; import io.airbyte.db.Database; import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.factory.DataSourceFactory; @@ -11,9 +15,11 @@ import io.airbyte.db.init.DatabaseInitializationException; import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; import io.airbyte.db.instance.configs.ConfigsDatabaseTestProvider; +import io.airbyte.db.instance.configs.jooq.generated.Tables; import io.airbyte.db.instance.test.TestDatabaseProviders; import java.io.IOException; import java.sql.SQLException; +import java.time.OffsetDateTime; import javax.sql.DataSource; import org.flywaydb.core.Flyway; import org.jooq.DSLContext; @@ -136,6 +142,7 @@ protected static void truncateAllTables() throws SQLException { state, stream_reset, stream_refreshes, + stream_generation, \"user\", user_invitation, sso_config, @@ -145,6 +152,30 @@ protected static void truncateAllTables() throws SQLException { """)); } + /** + * This method used to live on PermissionPersistence, but it was deprecated in favor of the new + * PermissionService backed by a Micronaut Data repository. Many tests depended on this method, so + * rather than keep it in the deprecated PermissionPersistence, a simplified version is implemented + * here for tests only. + */ + protected static void writePermission(final Permission permission) throws SQLException { + final io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType permissionType = + PermissionPersistenceHelper.convertConfigPermissionTypeToJooqPermissionType(permission.getPermissionType()); + + final OffsetDateTime timestamp = OffsetDateTime.now(); + + database.query(ctx -> ctx + .insertInto(Tables.PERMISSION) + .set(PERMISSION.ID, permission.getPermissionId()) + .set(PERMISSION.PERMISSION_TYPE, permissionType) + .set(PERMISSION.USER_ID, permission.getUserId()) + .set(PERMISSION.WORKSPACE_ID, permission.getWorkspaceId()) + .set(PERMISSION.ORGANIZATION_ID, permission.getOrganizationId()) + .set(PERMISSION.CREATED_AT, timestamp) + .set(PERMISSION.UPDATED_AT, timestamp) + .execute()); + } + private static void createDbContainer() { container = new PostgreSQLContainer<>("postgres:13-alpine") .withDatabaseName("airbyte") diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/CloudSqlDatabaseProvisioner.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/CloudSqlDatabaseProvisioner.java new file mode 100644 index 00000000000..3057ec467fc --- /dev/null +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/CloudSqlDatabaseProvisioner.java @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.test.utils; + +import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; +import com.google.api.client.googleapis.json.GoogleJsonResponseException; +import com.google.api.client.json.gson.GsonFactory; +import com.google.api.services.sqladmin.SQLAdmin; +import com.google.api.services.sqladmin.model.Database; +import com.google.api.services.sqladmin.model.Operation; +import com.google.auth.http.HttpCredentialsAdapter; +import com.google.auth.oauth2.GoogleCredentials; +import com.google.common.annotations.VisibleForTesting; +import java.io.IOException; +import java.security.GeneralSecurityException; +import java.util.concurrent.Callable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Creates and deletes GCP CloudSQL databases. + */ +public class CloudSqlDatabaseProvisioner { + + private static final Logger LOGGER = LoggerFactory.getLogger(CloudSqlDatabaseProvisioner.class); + + private static final String SQL_OPERATION_DONE_STATUS = "DONE"; + private static final int DEFAULT_MAX_POLL_ATTEMPTS = 10; + private static final int DEFAULT_MAX_API_CALL_ATTEMPTS = 10; + private static final String APPLICATION_NAME = "cloud-sql-database-provisioner"; + + private final SQLAdmin sqlAdmin; + private final int maxPollAttempts; + private final int maxApiCallAttempts; + + @VisibleForTesting + CloudSqlDatabaseProvisioner(SQLAdmin sqlAdmin, int maxPollAttempts, int maxApiCallAttempts) { + this.sqlAdmin = sqlAdmin; + this.maxPollAttempts = maxPollAttempts; + this.maxApiCallAttempts = maxApiCallAttempts; + } + + public CloudSqlDatabaseProvisioner() throws GeneralSecurityException, IOException { + this.sqlAdmin = new SQLAdmin.Builder( + GoogleNetHttpTransport.newTrustedTransport(), + GsonFactory.getDefaultInstance(), + new HttpCredentialsAdapter(GoogleCredentials.getApplicationDefault())).setApplicationName(APPLICATION_NAME).build(); + this.maxPollAttempts = DEFAULT_MAX_POLL_ATTEMPTS; + this.maxApiCallAttempts = DEFAULT_MAX_API_CALL_ATTEMPTS; + } + + public synchronized String createDatabase(String projectId, String instanceId, String databaseName) throws IOException, InterruptedException { + Database database = new Database().setName(databaseName); + Operation operation = runWithRetry(() -> sqlAdmin.databases().insert(projectId, instanceId, database).execute()); + pollOperation(projectId, operation.getName()); + + return databaseName; + } + + public synchronized void deleteDatabase(String projectId, String instanceId, String databaseName) throws IOException, InterruptedException { + Operation operation = runWithRetry(() -> sqlAdmin.databases().delete(projectId, instanceId, databaseName).execute()); + pollOperation(projectId, operation.getName()); + } + + /** + * Database operations are asynchronous. This method polls the operation until it is done. + */ + @VisibleForTesting + void pollOperation(String projectId, String operationName) throws IOException, InterruptedException { + int pollAttempts = 0; + while (pollAttempts < maxPollAttempts) { + Operation operation = sqlAdmin.operations().get(projectId, operationName).execute(); + if (operation.getStatus().equals(SQL_OPERATION_DONE_STATUS)) { + return; + } + Thread.sleep(1000); + pollAttempts += 1; + } + + throw new RuntimeException("Operation " + operationName + " did not complete successfully"); + } + + /** + * If there's another operation already in progress in one same cloudsql instance then the api will + * return a 409 error. This method will retry api calls that return a 409 error. + */ + @VisibleForTesting + Operation runWithRetry(Callable callable) throws InterruptedException { + int attempts = 0; + while (attempts < maxApiCallAttempts) { + try { + return callable.call(); + } catch (Exception e) { + if (e instanceof GoogleJsonResponseException && ((GoogleJsonResponseException) e).getStatusCode() == 409) { + attempts++; + LOGGER.info("Attempt " + attempts + " failed with 409 error"); + LOGGER.info("Exception thrown by API: " + e.getMessage()); + Thread.sleep(1000); + } else { + throw new RuntimeException(e); + } + } + } + throw new RuntimeException("Max retries exceeded. Could not complete operation."); + } + +} diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java index 11d789d4787..9f18f42ba71 100644 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java @@ -8,7 +8,6 @@ import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; -import io.airbyte.test.utils.AcceptanceTestHarness.Type; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Path; @@ -20,33 +19,24 @@ /** * This class is used to provide information related to the test databases for running the - * {@link AcceptanceTestHarness} on GKE. We launch 2 postgres databases in GKE as pods which act as - * source and destination and the tests run against them. In order to allow the test instance to - * connect to these databases we use port forwarding Refer - * tools/bin/gke-kube-acceptance-test/acceptance_test_kube_gke.sh for more info + * {@link AcceptanceTestHarness} on GKE. */ class GKEPostgresConfig { - // NOTE: these two hosts refer to services named `acceptance-test-postgres-[source|destination]-svc` - // in the `acceptance-tests` namespace, running in the same cluster as the check/discover/sync - // workers. - // - // The namespace here needs to be in sync with the namespaces created in - // tools/bin/gke-kube-acceptance-test/acceptance_test_kube_gke.sh. - private static final String SOURCE_HOST = "acceptance-test-postgres-source-svc.acceptance-tests.svc.cluster.local"; - private static final String DESTINATION_HOST = "acceptance-test-postgres-destination-svc.acceptance-tests.svc.cluster.local"; private static final Integer PORT = 5432; - private static final String USERNAME = "postgresadmin"; - private static final String DB = "postgresdb"; - static Map dbConfig(final Type connectorType, final String password, final boolean withSchema) { + static Map dbConfig(final String password, + final boolean withSchema, + String username, + String cloudSqlInstanceIp, + String databaseName) { final Map dbConfig = new HashMap<>(); - dbConfig.put(JdbcUtils.HOST_KEY, connectorType == Type.SOURCE ? SOURCE_HOST : DESTINATION_HOST); + dbConfig.put(JdbcUtils.HOST_KEY, cloudSqlInstanceIp); dbConfig.put(JdbcUtils.PASSWORD_KEY, password == null ? "**********" : password); dbConfig.put(JdbcUtils.PORT_KEY, PORT); - dbConfig.put(JdbcUtils.DATABASE_KEY, DB); - dbConfig.put(JdbcUtils.USERNAME_KEY, USERNAME); + dbConfig.put(JdbcUtils.DATABASE_KEY, databaseName); + dbConfig.put(JdbcUtils.USERNAME_KEY, username); dbConfig.put(JdbcUtils.JDBC_URL_PARAMS, "connectTimeout=60"); if (withSchema) { @@ -56,20 +46,9 @@ static Map dbConfig(final Type connectorType, final String passw return dbConfig; } - static DataSource getDestinationDataSource(final String password) { - // Note: we set the connection timeout to 30s. The underlying Hikari default is also 30s -- - // https://github.com/brettwooldridge/HikariCP#frequently-used -- but our DataSourceFactory - // overrides that to MAX_INTEGER unless we explicitly specify it. - return DataSourceFactory.create(USERNAME, password, DatabaseDriver.POSTGRESQL.getDriverClassName(), - "jdbc:postgresql://localhost:4000/postgresdb", Map.of(PGProperty.CONNECT_TIMEOUT.getName(), "60")); - } - - static DataSource getSourceDataSource(final String password) { - // Note: we set the connection timeout to 30s. The underlying Hikari default is also 30s -- - // https://github.com/brettwooldridge/HikariCP#frequently-used -- but our DataSourceFactory - // overrides that to MAX_INTEGER unless we explicitly specify it. - return DataSourceFactory.create(USERNAME, password, DatabaseDriver.POSTGRESQL.getDriverClassName(), - "jdbc:postgresql://localhost:2000/postgresdb", Map.of(PGProperty.CONNECT_TIMEOUT.getName(), "60")); + static DataSource getDataSource(final String username, final String password, String cloudSqlInstanceIp, String databaseName) { + return DataSourceFactory.create(username, password, DatabaseDriver.POSTGRESQL.getDriverClassName(), + "jdbc:postgresql://" + cloudSqlInstanceIp + ":5432/" + databaseName, Map.of(PGProperty.CONNECT_TIMEOUT.getName(), "60")); } static void runSqlScript(final Path scriptFilePath, final Database db) throws SQLException, IOException { diff --git a/airbyte-test-utils/src/test/java/io/airbyte/test/utils/CloudSqlDatabaseProvisionerTest.java b/airbyte-test-utils/src/test/java/io/airbyte/test/utils/CloudSqlDatabaseProvisionerTest.java new file mode 100644 index 00000000000..8bf975eb163 --- /dev/null +++ b/airbyte-test-utils/src/test/java/io/airbyte/test/utils/CloudSqlDatabaseProvisionerTest.java @@ -0,0 +1,141 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.test.utils; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.google.api.client.googleapis.json.GoogleJsonResponseException; +import com.google.api.services.sqladmin.SQLAdmin; +import com.google.api.services.sqladmin.SQLAdmin.Operations; +import com.google.api.services.sqladmin.model.Database; +import com.google.api.services.sqladmin.model.Operation; +import java.io.IOException; +import java.util.concurrent.Callable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class CloudSqlDatabaseProvisionerTest { + + private static final String PROJECT_ID = "project-id"; + private static final String INSTANCE_ID = "instance-id"; + private static final String DATABASE_NAME = "database-name"; + private static final int POLL_ATTEMPTS = 2; + private static final int API_CALL_ATTEMPTS = 2; + + @Mock + private SQLAdmin sqlAdmin; + @Mock + private SQLAdmin.Databases databases; + @Mock + private Operations operations; + @Mock + private Operations.Get getOperation; + @Mock + private SQLAdmin.Databases.Insert insertDatabase; + @Mock + private SQLAdmin.Databases.Delete deleteDatabase; + @Mock + private Operation operation; + @Mock + private GoogleJsonResponseException googleJsonResponseException; + @Mock + private Callable callable; + + private CloudSqlDatabaseProvisioner provisioner; + + @BeforeEach + void setUp() { + provisioner = new CloudSqlDatabaseProvisioner(sqlAdmin, POLL_ATTEMPTS, API_CALL_ATTEMPTS); + } + + @Test + void testCreateDatabase() throws IOException, InterruptedException { + mockOperation(); + when(operation.getStatus()).thenReturn("DONE"); + when(sqlAdmin.databases()).thenReturn(databases); + when(databases.insert(anyString(), anyString(), any(Database.class))).thenReturn(insertDatabase); + when(insertDatabase.execute()).thenReturn(operation); + when(operation.getName()).thenReturn("operation-name"); + + provisioner.createDatabase(PROJECT_ID, INSTANCE_ID, DATABASE_NAME); + + verify(databases).insert(PROJECT_ID, INSTANCE_ID, new Database().setName(DATABASE_NAME)); + verify(insertDatabase).execute(); + } + + @Test + void testDeleteDatabase() throws IOException, InterruptedException { + mockOperation(); + when(operation.getStatus()).thenReturn("DONE"); + when(sqlAdmin.databases()).thenReturn(databases); + when(databases.delete(anyString(), anyString(), anyString())).thenReturn(deleteDatabase); + when(deleteDatabase.execute()).thenReturn(operation); + when(operation.getName()).thenReturn("operation-name"); + + provisioner.deleteDatabase(PROJECT_ID, INSTANCE_ID, DATABASE_NAME); + + verify(databases).delete(PROJECT_ID, INSTANCE_ID, DATABASE_NAME); + verify(deleteDatabase).execute(); + } + + @Test + void testPollOperationNotDoneAfterMaxStatusChecks() throws IOException { + mockOperation(); + when(operation.getStatus()) + .thenReturn("PENDING") + .thenReturn("RUNNING") + .thenReturn("DONE"); + assertThrows(RuntimeException.class, () -> provisioner.pollOperation(PROJECT_ID, "operation-name")); + } + + @Test + void testPollOperationDoneBeforeMaxStatusChecks() throws IOException { + mockOperation(); + when(operation.getStatus()) + .thenReturn("PENDING") + .thenReturn("DONE"); + assertDoesNotThrow(() -> provisioner.pollOperation(PROJECT_ID, "operation-name")); + } + + private void mockOperation() throws IOException { + when(sqlAdmin.operations()).thenReturn(operations); + when(operations.get(eq(PROJECT_ID), anyString())).thenReturn(getOperation); + when(getOperation.execute()).thenReturn(operation); + } + + @Test + void testMoreThanMaxAttempts() throws Exception { + when(callable.call()).thenThrow(googleJsonResponseException); + when(googleJsonResponseException.getStatusCode()).thenReturn(409); + assertThrows(RuntimeException.class, () -> provisioner.runWithRetry(callable)); + } + + @Test + void testNoRetry() throws Exception { + when(callable.call()).thenThrow(new RuntimeException()); + assertThrows(RuntimeException.class, () -> provisioner.runWithRetry(callable)); + } + + @Test + void testOneRetry() throws Exception { + when(googleJsonResponseException.getStatusCode()).thenReturn(409); + when(callable.call()) + .thenThrow(googleJsonResponseException) + .thenReturn(null); + + assertDoesNotThrow(() -> provisioner.runWithRetry(callable)); + } + +} diff --git a/airbyte-test-utils/stage_network_setup.png b/airbyte-test-utils/stage_network_setup.png new file mode 100644 index 00000000000..4ac000d9b58 Binary files /dev/null and b/airbyte-test-utils/stage_network_setup.png differ diff --git a/airbyte-tests/build.gradle.kts b/airbyte-tests/build.gradle.kts index 9360db83edf..89804a67fab 100644 --- a/airbyte-tests/build.gradle.kts +++ b/airbyte-tests/build.gradle.kts @@ -1,38 +1,38 @@ import org.gradle.api.tasks.testing.logging.TestLogEvent plugins { - id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.jvm.lib") } @Suppress("UnstableApiUsage") testing { - registerTestSuite(name="acceptanceTest", type="acceptance-test", dirName="test-acceptance") { - implementation.add(project()) - - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-tests")) - implementation(project(":airbyte-test-utils")) - implementation(project(":airbyte-commons-worker")) - - - - implementation(libs.failsafe) - implementation(libs.jackson.databind) - implementation(libs.okhttp) - implementation(libs.temporal.sdk) - implementation(libs.platform.testcontainers.postgresql) - implementation(libs.postgresql) - - // needed for fabric to connect to k8s. - runtimeOnly(libs.bouncycastle.bcpkix) - runtimeOnly(libs.bouncycastle.bcprov) - } + registerTestSuite(name = "acceptanceTest", type = "acceptance-test", dirName = "test-acceptance") { + implementation.add(project()) + + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-tests")) + implementation(project(":airbyte-test-utils")) + implementation(project(":airbyte-commons-worker")) + + + + implementation(libs.failsafe) + implementation(libs.jackson.databind) + implementation(libs.okhttp) + implementation(libs.temporal.sdk) + implementation(libs.platform.testcontainers.postgresql) + implementation(libs.postgresql) + + // needed for fabric to connect to k8s. + runtimeOnly(libs.bouncycastle.bcpkix) + runtimeOnly(libs.bouncycastle.bcprov) + } } /** @@ -44,62 +44,62 @@ testing { */ @Suppress("UnstableApiUsage") fun registerTestSuite(name: String, type: String, dirName: String, deps: JvmComponentDependencies.() -> Unit) { - testing { - suites.register(name) { - testType.set(type) - - deps(dependencies) - - sources { - java { - setSrcDirs(listOf("src/$dirName/java")) - } - resources { - setSrcDirs(listOf("src/$dirName/resources")) - } - } - - targets.all { - testTask.configure { - - val parallelExecutionEnabled = System.getenv()["TESTS_PARALLEL_EXECUTION_ENABLED"] ?: "true" - systemProperties = mapOf("junit.jupiter.execution.parallel.enabled" to parallelExecutionEnabled) - - testLogging { - events = setOf(TestLogEvent.PASSED, TestLogEvent.FAILED, TestLogEvent.STARTED, TestLogEvent.SKIPPED) - } - shouldRunAfter(suites.named("test")) - // Ensure they re-run since these are integration tests. - outputs.upToDateWhen { false } - } - } + testing { + suites.register(name) { + testType.set(type) + + deps(dependencies) + + sources { + java { + setSrcDirs(listOf("src/$dirName/java")) + } + resources { + setSrcDirs(listOf("src/$dirName/resources")) } + } - configurations.named("${name}Implementation") { - extendsFrom(configurations.getByName("testImplementation")) + targets.all { + testTask.configure { + + val parallelExecutionEnabled = System.getenv()["TESTS_PARALLEL_EXECUTION_ENABLED"] ?: "true" + systemProperties = mapOf("junit.jupiter.execution.parallel.enabled" to parallelExecutionEnabled) + + testLogging { + events = setOf(TestLogEvent.PASSED, TestLogEvent.FAILED, TestLogEvent.STARTED, TestLogEvent.SKIPPED) + } + shouldRunAfter(suites.named("test")) + // Ensure they re-run since these are integration tests. + outputs.upToDateWhen { false } } + } } + + configurations.named("${name}Implementation") { + extendsFrom(configurations.getByName("testImplementation")) + } + } } configurations.configureEach { - // Temporary hack to avoid dependency conflicts - exclude(group="io.micronaut.email") + // Temporary hack to avoid dependency conflicts + exclude(group = "io.micronaut.email") } dependencies { - implementation(project(":airbyte-api")) - implementation(project(":airbyte-container-orchestrator")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-container-orchestrator")) - testImplementation("com.airbyte:api:0.39.2") + testImplementation("com.airbyte:api:0.39.2") - implementation(libs.bundles.kubernetes.client) - implementation(libs.platform.testcontainers) + implementation(libs.bundles.kubernetes.client) + implementation(libs.platform.testcontainers) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) } tasks.withType().configureEach { - duplicatesStrategy = DuplicatesStrategy.INCLUDE + duplicatesStrategy = DuplicatesStrategy.INCLUDE } diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AcceptanceTestsResources.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AcceptanceTestsResources.java index 0cbb62fa5aa..72d7dcc2e67 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AcceptanceTestsResources.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AcceptanceTestsResources.java @@ -5,6 +5,7 @@ package io.airbyte.test.acceptance; import static io.airbyte.commons.auth.AirbyteAuthConstants.X_AIRBYTE_AUTH_HEADER; +import static io.airbyte.config.persistence.OrganizationPersistence.DEFAULT_ORGANIZATION_ID; import static io.airbyte.test.acceptance.AcceptanceTestConstants.IS_ENTERPRISE_TRUE; import static io.airbyte.test.acceptance.AcceptanceTestConstants.X_AIRBYTE_AUTH_HEADER_TEST_CLIENT_VALUE; @@ -42,6 +43,7 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.security.GeneralSecurityException; import java.sql.SQLException; import java.time.Duration; import java.util.Collections; @@ -276,7 +278,7 @@ void runSmallSyncForAWorkspaceId(final UUID workspaceId) throws Exception { StreamStatusJobType.SYNC); } - void init() throws URISyntaxException, IOException, InterruptedException, ApiException { + void init() throws URISyntaxException, IOException, InterruptedException, ApiException, GeneralSecurityException { // TODO(mfsiega-airbyte): clean up and centralize the way we do config. final boolean isGke = System.getenv().containsKey(IS_GKE); // Set up the API client. @@ -318,7 +320,8 @@ void init() throws URISyntaxException, IOException, InterruptedException, ApiExc // NOTE: the API client can't create workspaces in GKE deployments, so we need to provide a // workspace ID in that environment. workspaceId = System.getenv(AIRBYTE_ACCEPTANCE_TEST_WORKSPACE_ID) == null ? apiClient.getWorkspaceApi() - .createWorkspace(new WorkspaceCreate().email("acceptance-tests@airbyte.io").name("Airbyte Acceptance Tests" + UUID.randomUUID())) + .createWorkspace(new WorkspaceCreate().email("acceptance-tests@airbyte.io").name("Airbyte Acceptance Tests" + UUID.randomUUID()) + .organizationId(DEFAULT_ORGANIZATION_ID)) .getWorkspaceId() : UUID.fromString(System.getenv(AIRBYTE_ACCEPTANCE_TEST_WORKSPACE_ID)); LOGGER.info("workspaceId = " + workspaceId); diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java index 7331bcb527b..0d270edc9b0 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java @@ -7,45 +7,25 @@ import static io.airbyte.commons.auth.AirbyteAuthConstants.X_AIRBYTE_AUTH_HEADER; import static io.airbyte.test.acceptance.AcceptanceTestConstants.IS_ENTERPRISE_TRUE; import static io.airbyte.test.acceptance.AcceptanceTestConstants.X_AIRBYTE_AUTH_HEADER_TEST_CLIENT_VALUE; -import static io.airbyte.test.utils.AcceptanceTestHarness.COLUMN_ID; import static io.airbyte.test.utils.AcceptanceTestHarness.PUBLIC_SCHEMA_NAME; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import io.airbyte.api.client.AirbyteApiClient; import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.api.client.invoker.generated.ApiException; import io.airbyte.api.client.model.generated.AirbyteCatalog; -import io.airbyte.api.client.model.generated.AirbyteStream; -import io.airbyte.api.client.model.generated.AttemptInfoRead; -import io.airbyte.api.client.model.generated.ConnectionState; import io.airbyte.api.client.model.generated.DestinationDefinitionIdRequestBody; import io.airbyte.api.client.model.generated.DestinationDefinitionRead; -import io.airbyte.api.client.model.generated.DestinationRead; import io.airbyte.api.client.model.generated.DestinationSyncMode; import io.airbyte.api.client.model.generated.JobInfoRead; -import io.airbyte.api.client.model.generated.JobRead; -import io.airbyte.api.client.model.generated.JobStatus; import io.airbyte.api.client.model.generated.SourceDefinitionIdRequestBody; import io.airbyte.api.client.model.generated.SourceDefinitionRead; import io.airbyte.api.client.model.generated.SourceDiscoverSchemaRead; -import io.airbyte.api.client.model.generated.SourceRead; import io.airbyte.api.client.model.generated.StreamStatusJobType; import io.airbyte.api.client.model.generated.StreamStatusRunState; import io.airbyte.api.client.model.generated.SyncMode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.MoreBooleans; import io.airbyte.test.utils.AcceptanceTestHarness; import io.airbyte.test.utils.Asserts; import io.airbyte.test.utils.TestConnectionCreate; -import java.io.IOException; import java.net.URI; -import java.net.URISyntaxException; -import java.util.List; import java.util.Optional; import java.util.UUID; import org.junit.jupiter.api.AfterAll; @@ -90,7 +70,7 @@ class AdvancedAcceptanceTests { private static final String AIRBYTE_SERVER_HOST = Optional.ofNullable(System.getenv("AIRBYTE_SERVER_HOST")).orElse("http://localhost:8001"); @BeforeAll - static void init() throws URISyntaxException, IOException, InterruptedException, ApiException { + static void init() throws Exception { final URI url = new URI(AIRBYTE_SERVER_HOST); final var apiClient = new AirbyteApiClient( new ApiClient().setScheme(url.getScheme()) @@ -152,147 +132,10 @@ void testManualSync() throws Exception { Asserts.assertSourceAndDestinationDbRawRecordsInSync(testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, conn.getNamespaceFormat(), false, false); - LOGGER.info("===== before stream"); Asserts.assertStreamStatuses(testHarness, workspaceId, connectionId, connectionSyncRead.getJob().getId(), StreamStatusRunState.COMPLETE, StreamStatusJobType.SYNC); testHarness.cleanup(); } - @Test - void testCheckpointing() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(workspaceId); - final DestinationDefinitionRead destinationDefinition = testHarness.createE2eDestinationDefinition(workspaceId); - - final SourceRead source = testHarness.createSource( - "E2E Test Source -" + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "EXCEPTION_AFTER_N") - .put("throw_after_n_records", 100) - .build())); - - final DestinationRead destination = testHarness.createDestination( - "E2E Test Destination -" + UUID.randomUUID(), - workspaceId, - destinationDefinition.getDestinationDefinitionId(), - Jsons.jsonNode(ImmutableMap.of(TYPE, "SILENT"))); - - final UUID sourceId = source.getSourceId(); - final UUID destinationId = destination.getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - final AirbyteStream stream = catalog.getStreams().get(0).getStream(); - - assertEquals( - Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL), - stream.getSupportedSyncModes()); - assertTrue(MoreBooleans.isTruthy(stream.getSourceDefinedCursor())); - - final SyncMode syncMode = SyncMode.INCREMENTAL; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND; - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(syncMode) - .cursorField(List.of(COLUMN_ID)) - .selected(true) - .destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()).build()) - .getConnectionId(); - final JobInfoRead connectionSyncRead1 = testHarness.syncConnection(connectionId); - - // wait to get out of pending. - final JobRead runningJob = testHarness.waitWhileJobHasStatus(connectionSyncRead1.getJob(), Sets.newHashSet(JobStatus.PENDING)); - // wait to get out of running. - testHarness.waitWhileJobHasStatus(runningJob, Sets.newHashSet(JobStatus.RUNNING)); - // now cancel it so that we freeze state! - try { - testHarness.cancelSync(connectionSyncRead1.getJob().getId()); - } catch (final Exception e) { - LOGGER.error("error:", e); - } - - final ConnectionState connectionState = testHarness.waitForConnectionState(connectionId); - - /* - * the source is set to emit a state message every 5th message. because of the multithreaded nature, - * we can't guarantee exactly what checkpoint will be registered. what we can do is send enough - * messages to make sure that we check point at least once. - */ - assertNotNull(connectionState.getState()); - assertTrue(connectionState.getState().get(COLUMN1).isInt()); - LOGGER.info("state value: {}", connectionState.getState().get(COLUMN1).asInt()); - assertTrue(connectionState.getState().get(COLUMN1).asInt() > 0); - assertEquals(0, connectionState.getState().get(COLUMN1).asInt() % 5); - } - - // verify that when the worker uses backpressure from pipes that no records are lost. - @Test - void testBackpressure() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(workspaceId); - final DestinationDefinitionRead destinationDefinition = testHarness.createE2eDestinationDefinition(workspaceId); - - final SourceRead source = testHarness.createSource( - "E2E Test Source -" + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "INFINITE_FEED") - .put("max_records", 5000) - .build())); - - final DestinationRead destination = testHarness.createDestination( - "E2E Test Destination -" + UUID.randomUUID(), - workspaceId, - destinationDefinition.getDestinationDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "THROTTLED") - .put("millis_per_record", 1) - .build())); - - final UUID sourceId = source.getSourceId(); - final UUID destinationId = destination.getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - catalog.getStreams().forEach(s -> s.getConfig().selected(true)); - - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()).build()) - .getConnectionId(); - final JobInfoRead connectionSyncRead1 = testHarness.syncConnection(connectionId); - - // wait to get out of pending. - final JobRead runningJob = testHarness.waitWhileJobHasStatus(connectionSyncRead1.getJob(), Sets.newHashSet(JobStatus.PENDING)); - // wait to get out of running. - testHarness.waitWhileJobHasStatus(runningJob, Sets.newHashSet(JobStatus.RUNNING)); - - final JobInfoRead jobInfo = testHarness.getJobInfoRead(runningJob.getId()); - final AttemptInfoRead attemptInfoRead = jobInfo.getAttempts().get(jobInfo.getAttempts().size() - 1); - assertNotNull(attemptInfoRead); - - int expectedMessageNumber = 0; - final int max = 10_000; - for (final String logLine : attemptInfoRead.getLogs().getLogLines()) { - if (expectedMessageNumber > max) { - break; - } - - if (logLine.contains("received record: ") && logLine.contains("\"type\": \"RECORD\"")) { - assertTrue( - logLine.contains(String.format("\"column1\": \"%s\"", expectedMessageNumber)), - String.format("Expected %s but got: %s", expectedMessageNumber, logLine)); - expectedMessageNumber++; - } - } - } - } diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ApiAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ApiAcceptanceTests.java index ba8df5af9b7..85000ecf2fb 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ApiAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ApiAcceptanceTests.java @@ -37,9 +37,6 @@ import io.airbyte.test.utils.AcceptanceTestHarness; import io.airbyte.test.utils.Asserts; import io.airbyte.test.utils.TestConnectionCreate; -import java.io.IOException; -import java.net.URISyntaxException; -import java.sql.SQLException; import java.util.List; import java.util.Set; import java.util.UUID; @@ -81,7 +78,7 @@ class ApiAcceptanceTests { private UUID workspaceId; @BeforeEach - void setup() throws SQLException, URISyntaxException, IOException, ApiException, InterruptedException { + void setup() throws Exception { testResources = new AcceptanceTestsResources(); testResources.init(); testHarness = testResources.getTestHarness(); diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ConnectorBuilderTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ConnectorBuilderTests.java index f1f51799a75..59eb5320cce 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ConnectorBuilderTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ConnectorBuilderTests.java @@ -4,6 +4,7 @@ package io.airbyte.test.acceptance; +import static io.airbyte.config.persistence.OrganizationPersistence.DEFAULT_ORGANIZATION_ID; import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.core.JsonProcessingException; @@ -32,10 +33,7 @@ import io.airbyte.test.utils.AcceptanceTestHarness; import io.airbyte.test.utils.Databases; import io.airbyte.test.utils.SchemaTableNamePair; -import java.io.IOException; import java.net.URI; -import java.net.URISyntaxException; -import java.sql.SQLException; import java.util.Optional; import java.util.Set; import java.util.UUID; @@ -166,7 +164,7 @@ public class ConnectorBuilderTests { } @BeforeAll - static void init() throws URISyntaxException, IOException, InterruptedException, ApiException, SQLException { + static void init() throws Exception { final URI url = new URI(AIRBYTE_SERVER_HOST); final var underlyingApiClient = new ApiClient().setScheme(url.getScheme()) .setHost(url.getHost()) @@ -174,7 +172,8 @@ static void init() throws URISyntaxException, IOException, InterruptedException, .setBasePath("/api"); apiClient = new AirbyteApiClient(underlyingApiClient); workspaceId = apiClient.getWorkspaceApi() - .createWorkspace(new WorkspaceCreate().email("acceptance-tests@airbyte.io").name("Airbyte Acceptance Tests" + UUID.randomUUID().toString())) + .createWorkspace(new WorkspaceCreate().email("acceptance-tests@airbyte.io").name("Airbyte Acceptance Tests" + UUID.randomUUID().toString()) + .organizationId(DEFAULT_ORGANIZATION_ID)) .getWorkspaceId(); testHarness = new AcceptanceTestHarness(apiClient, null, workspaceId); testHarness.setup(); diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java index 7c74b999435..4813efa54c1 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java @@ -4,6 +4,7 @@ package io.airbyte.test.acceptance; +import static io.airbyte.config.persistence.OrganizationPersistence.DEFAULT_ORGANIZATION_ID; import static org.junit.Assert.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -38,6 +39,7 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.security.GeneralSecurityException; import java.util.List; import java.util.Optional; import java.util.UUID; @@ -114,7 +116,7 @@ private void createTestConnections() throws Exception { .build()); } - void init() throws ApiException, URISyntaxException, IOException, InterruptedException { + void init() throws ApiException, URISyntaxException, IOException, InterruptedException, GeneralSecurityException { // TODO(mfsiega-airbyte): clean up and centralize the way we do config. final boolean isGke = System.getenv().containsKey(IS_GKE); // Set up the API client. @@ -139,7 +141,8 @@ void init() throws ApiException, URISyntaxException, IOException, InterruptedExc final var webBackendApi = new WebBackendApi(underlyingWebBackendApiClient); final UUID workspaceId = System.getenv().get(AIRBYTE_ACCEPTANCE_TEST_WORKSPACE_ID) == null ? apiClient.getWorkspaceApi() - .createWorkspace(new WorkspaceCreate().email("acceptance-tests@airbyte.io").name("Airbyte Acceptance Tests" + UUID.randomUUID())) + .createWorkspace(new WorkspaceCreate().email("acceptance-tests@airbyte.io").name("Airbyte Acceptance Tests" + UUID.randomUUID()) + .organizationId(DEFAULT_ORGANIZATION_ID)) .getWorkspaceId() : UUID.fromString(System.getenv().get(AIRBYTE_ACCEPTANCE_TEST_WORKSPACE_ID)); diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java index a5c029a1fb6..afd56a5d870 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java @@ -4,51 +4,33 @@ package io.airbyte.test.acceptance; -import static io.airbyte.test.acceptance.AcceptanceTestsResources.DISABLE_TEMPORAL_TESTS_IN_GKE; import static io.airbyte.test.acceptance.AcceptanceTestsResources.FINAL_INTERVAL_SECS; -import static io.airbyte.test.acceptance.AcceptanceTestsResources.GERALT; import static io.airbyte.test.acceptance.AcceptanceTestsResources.IS_GKE; import static io.airbyte.test.acceptance.AcceptanceTestsResources.JITTER_MAX_INTERVAL_SECS; import static io.airbyte.test.acceptance.AcceptanceTestsResources.KUBE; import static io.airbyte.test.acceptance.AcceptanceTestsResources.MAX_TRIES; -import static io.airbyte.test.acceptance.AcceptanceTestsResources.STATE_AFTER_SYNC_ONE; -import static io.airbyte.test.acceptance.AcceptanceTestsResources.STATE_AFTER_SYNC_TWO; import static io.airbyte.test.acceptance.AcceptanceTestsResources.TRUE; import static io.airbyte.test.acceptance.AcceptanceTestsResources.WITHOUT_SCD_TABLE; import static io.airbyte.test.acceptance.AcceptanceTestsResources.WITH_SCD_TABLE; import static io.airbyte.test.utils.AcceptanceTestHarness.COLUMN_ID; import static io.airbyte.test.utils.AcceptanceTestHarness.COLUMN_NAME; -import static io.airbyte.test.utils.AcceptanceTestHarness.POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION; import static io.airbyte.test.utils.AcceptanceTestHarness.PUBLIC; import static io.airbyte.test.utils.AcceptanceTestHarness.PUBLIC_SCHEMA_NAME; import static io.airbyte.test.utils.AcceptanceTestHarness.STREAM_NAME; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; import io.airbyte.api.client.AirbyteApiClient; import io.airbyte.api.client.invoker.generated.ApiException; import io.airbyte.api.client.model.generated.AirbyteCatalog; -import io.airbyte.api.client.model.generated.AttemptInfoRead; -import io.airbyte.api.client.model.generated.AttemptStatus; import io.airbyte.api.client.model.generated.CheckConnectionRead; import io.airbyte.api.client.model.generated.ConnectionRead; import io.airbyte.api.client.model.generated.ConnectionScheduleData; -import io.airbyte.api.client.model.generated.ConnectionScheduleDataBasicSchedule; -import io.airbyte.api.client.model.generated.ConnectionScheduleDataBasicSchedule.TimeUnitEnum; import io.airbyte.api.client.model.generated.ConnectionScheduleDataCron; import io.airbyte.api.client.model.generated.ConnectionScheduleType; -import io.airbyte.api.client.model.generated.ConnectionState; -import io.airbyte.api.client.model.generated.DestinationDefinitionRead; -import io.airbyte.api.client.model.generated.DestinationRead; import io.airbyte.api.client.model.generated.DestinationSyncMode; -import io.airbyte.api.client.model.generated.JobConfigType; import io.airbyte.api.client.model.generated.JobInfoRead; import io.airbyte.api.client.model.generated.JobRead; import io.airbyte.api.client.model.generated.JobStatus; @@ -58,27 +40,19 @@ import io.airbyte.api.client.model.generated.SourceDiscoverSchemaRead; import io.airbyte.api.client.model.generated.SourceRead; import io.airbyte.api.client.model.generated.StreamDescriptor; -import io.airbyte.api.client.model.generated.StreamState; import io.airbyte.api.client.model.generated.StreamStatusJobType; import io.airbyte.api.client.model.generated.StreamStatusRunState; import io.airbyte.api.client.model.generated.SyncMode; import io.airbyte.api.client.model.generated.WebBackendConnectionUpdate; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.scheduling.state.WorkflowState; import io.airbyte.db.Database; import io.airbyte.test.utils.AcceptanceTestHarness; import io.airbyte.test.utils.Asserts; import io.airbyte.test.utils.Databases; import io.airbyte.test.utils.SchemaTableNamePair; import io.airbyte.test.utils.TestConnectionCreate; -import io.temporal.client.WorkflowQueryException; -import java.io.IOException; -import java.net.URISyntaxException; -import java.sql.SQLException; import java.time.Duration; -import java.util.Collections; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; @@ -87,7 +61,6 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInfo; import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; @@ -139,7 +112,7 @@ class SyncAcceptanceTests { UUID workspaceId; @BeforeEach - void setup() throws SQLException, URISyntaxException, IOException, ApiException, InterruptedException { + void setup() throws Exception { testResources = new AcceptanceTestsResources(); testResources.init(); testHarness = testResources.getTestHarness(); @@ -351,393 +324,6 @@ void testIncrementalSync() throws Exception { testResources.runIncrementalSyncForAWorkspaceId(workspaceId); } - @Test - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = DISABLE_TEMPORAL_TESTS_IN_GKE) - void testUpdateConnectionWhenWorkflowUnreachable() throws Exception { - // This test only covers the specific behavior of updating a connection that does not have an - // underlying temporal workflow. - // Also, this test doesn't verify correctness of the schedule update applied, as adding the ability - // to query a workflow for its current - // schedule is out of scope for the issue (https://github.com/airbytehq/airbyte/issues/11215). This - // test just ensures that the underlying workflow - // is running after the update method is called. - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .selected(true) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .primaryKey(List.of(List.of(COLUMN_NAME)))); - - LOGGER.info("Testing connection update when temporal is in a terminal state"); - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()) - .getConnectionId(); - - testHarness.terminateTemporalWorkflow(connectionId); - // This should throw an exception since the workflow is terminated and does not exist. - assertThrows(WorkflowQueryException.class, () -> testHarness.getWorkflowState(connectionId)); - - // we should still be able to update the connection when the temporal workflow is in this state - testHarness.updateConnectionSchedule( - connectionId, - ConnectionScheduleType.BASIC, - new ConnectionScheduleData().basicSchedule(new ConnectionScheduleDataBasicSchedule().timeUnit(TimeUnitEnum.HOURS).units(1L))); - // updateConnection should recreate the workflow. Querying for it should not throw an exception. - assertDoesNotThrow(() -> testHarness.getWorkflowState(connectionId)); - } - - @Test - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = DISABLE_TEMPORAL_TESTS_IN_GKE) - void testManualSyncRepairsWorkflowWhenWorkflowUnreachable() throws Exception { - // This test only covers the specific behavior of updating a connection that does not have an - // underlying temporal workflow. - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition( - workspaceId); - final SourceRead source = testHarness.createSource( - E2E_TEST_SOURCE + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, INFINITE_FEED) - .put(MAX_RECORDS, 5000) - .put(MESSAGE_INTERVAL, 100) - .build())); - final UUID sourceId = source.getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .selected(true) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .primaryKey(List.of(List.of(COLUMN_NAME)))); - - LOGGER.info("Testing manual sync when temporal is in a terminal state"); - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()) - .getConnectionId(); - - LOGGER.info("Starting first manual sync"); - final JobInfoRead firstJobInfo = testHarness.syncConnection(connectionId); - LOGGER.info("Terminating workflow during first sync"); - testHarness.terminateTemporalWorkflow(connectionId); - - LOGGER.info("Submitted another manual sync"); - testHarness.syncConnection(connectionId); - - LOGGER.info("Waiting for workflow to be recreated..."); - Thread.sleep(500); - - final WorkflowState workflowState = testHarness.getWorkflowState(connectionId); - assertTrue(workflowState.isRunning()); - assertTrue(workflowState.isSkipScheduling()); - - // verify that the first manual sync was marked as failed - final JobInfoRead terminatedJobInfo = testHarness.getJobInfoRead(firstJobInfo.getJob().getId()); - assertEquals(JobStatus.FAILED, terminatedJobInfo.getJob().getStatus()); - } - - @Test - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = DISABLE_TEMPORAL_TESTS_IN_GKE) - void testResetConnectionRepairsWorkflowWhenWorkflowUnreachable() throws Exception { - // This test only covers the specific behavior of updating a connection that does not have an - // underlying temporal workflow. - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - catalog.getStreams().forEach(s -> s.getConfig() - .selected(true) - .syncMode(SyncMode.INCREMENTAL) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .primaryKey(List.of(List.of(COLUMN_NAME)))); - - LOGGER.info("Testing reset connection when temporal is in a terminal state"); - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()) - .getConnectionId(); - - testHarness.terminateTemporalWorkflow(connectionId); - - final JobInfoRead jobInfoRead = testHarness.resetConnection(connectionId); - assertEquals(JobConfigType.RESET_CONNECTION, jobInfoRead.getJob().getConfigType()); - } - - @Test - void testResetCancelsRunningSync() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition( - workspaceId); - - final SourceRead source = testHarness.createSource( - E2E_TEST_SOURCE + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, INFINITE_FEED) - .put(MESSAGE_INTERVAL, 1000) - .put(MAX_RECORDS, Duration.ofMinutes(5).toSeconds()) - .build())); - - final UUID sourceId = source.getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - final SyncMode srcSyncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode dstSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(srcSyncMode).selected(true).destinationSyncMode(dstSyncMode)); - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()) - .getConnectionId(); - final JobInfoRead connectionSyncRead = testHarness.syncConnection(connectionId); - - // wait to get out of PENDING - final JobRead jobRead = testHarness.waitWhileJobHasStatus(connectionSyncRead.getJob(), Set.of(JobStatus.PENDING)); - assertEquals(JobStatus.RUNNING, jobRead.getStatus()); - - // send reset request while sync is still running - final JobInfoRead jobInfoRead = testHarness.resetConnection(connectionId); - - // verify that sync job was cancelled - final JobRead connectionSyncReadAfterReset = testHarness.getJobInfoRead(connectionSyncRead.getJob().getId()).getJob(); - assertEquals(JobStatus.CANCELLED, connectionSyncReadAfterReset.getStatus()); - - // wait for the reset to complete - testHarness.waitForSuccessfulJob(jobInfoRead.getJob()); - // TODO enable once stream status for resets has been fixed - // testHarness.assertStreamStatuses(workspaceId, connectionId, StreamStatusRunState.COMPLETE, - // StreamStatusJobType.RESET); - } - - // TODO (Angel): Enable once we fix the docker compose tests - @Test - @EnabledIfEnvironmentVariable(named = KUBE, - matches = TRUE) - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = SLOW_TEST_IN_GKE) - void testSyncAfterUpgradeToPerStreamState(final TestInfo testInfo) throws Exception { - LOGGER.info("Starting {}", testInfo.getDisplayName()); - // create custom source so that we don't share the source that is also being used by other tests - // Set the source to a version that does not support per-stream state - SourceDefinitionRead postgresSourceDefinition = - testHarness.createPostgresSourceDefinition(workspaceId, - POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - final SourceRead customPostgres = - testHarness.createSource("custom postgres", workspaceId, - postgresSourceDefinition.getSourceDefinitionId(), testHarness.getSourceDbConfig()); - final UUID sourceId = customPostgres.getSourceId(); - final UUID customSourceDefinitionId = customPostgres.getSourceDefinitionId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - - // Fetch the current/most recent source definition version - final SourceDefinitionRead sourceDefinitionRead = - testHarness.getSourceDefinition(testHarness.getPostgresSourceDefinitionId()); - final String currentSourceDefintionVersion = sourceDefinitionRead.getDockerImageTag(); - - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .selected(true) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND)); - final var conn = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()); - LOGGER.info("Beginning {} sync 1", testInfo.getDisplayName()); - - final var connectionId = conn.getConnectionId(); - final JobInfoRead connectionSyncRead1 = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead1.getJob()); - LOGGER.info(STATE_AFTER_SYNC_ONE, testHarness.getConnectionState(connectionId)); - - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - conn.getNamespaceFormat(), - false, WITHOUT_SCD_TABLE); - - // Set source to a version that supports per-stream state - testHarness.updateSourceDefinitionVersion(customSourceDefinitionId, - currentSourceDefintionVersion); - LOGGER.info("Upgraded source connector per-stream state supported version {}.", - currentSourceDefintionVersion); - - // add new records and run again. - final Database src = testHarness.getSourceDatabase(); - final var dst = testHarness.getDestinationDatabase(); - // get contents of source before mutating records. - final List expectedRecords = testHarness.retrieveRecordsFromDatabase(src, STREAM_NAME); - expectedRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).put(COLUMN_NAME, - GERALT).build())); - // add a new record - src.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(6, 'geralt')")); - // mutate a record that was already synced with out updating its cursor value. if we are actually - // full refreshing, this record will appear in the output and cause the test to fail. if we are, - // correctly, doing incremental, we will not find this value in the destination. - src.query(ctx -> ctx.execute("UPDATE id_and_name SET name='yennefer' WHERE id=2")); - - LOGGER.info("Starting {} sync 2", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead2 = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead2.getJob()); - LOGGER.info(STATE_AFTER_SYNC_TWO, testHarness.getConnectionState(connectionId)); - - Asserts.assertRawDestinationContains(dst, expectedRecords, conn.getNamespaceFormat(), - STREAM_NAME); - - // reset back to no data. - LOGGER.info("Starting {} reset", testInfo.getDisplayName()); - final JobInfoRead jobInfoRead = testHarness.resetConnection(connectionId); - testHarness.waitWhileJobHasStatus(jobInfoRead.getJob(), - Sets.newHashSet(JobStatus.PENDING, JobStatus.RUNNING, JobStatus.INCOMPLETE, JobStatus.FAILED)); - // This is a band-aid to prevent some race conditions where the job status was updated but we may - // still be cleaning up some data in the reset table. This would be an argument for reworking the - // source of truth of the replication workflow state to be in DB rather than in Memory and - // serialized automagically by temporal - testHarness.waitWhileJobIsRunning(jobInfoRead.getJob(), Duration.ofMinutes(1)); - - LOGGER.info("state after reset: {}", testHarness.getConnectionState(connectionId)); - - Asserts.assertRawDestinationContains(dst, Collections.emptyList(), conn.getNamespaceFormat(), - STREAM_NAME); - - // sync one more time. verify it is the equivalent of a full refresh. - final String expectedState = - """ - { - "cursor":"6", - "version":2, - "state_type":"cursor_based", - "stream_name":"id_and_name", - "cursor_field":["id"], - "stream_namespace":"public", - "cursor_record_count":1}" - """; - LOGGER.info("Starting {} sync 3", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead3 = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead3.getJob()); - final ConnectionState state = testHarness.getConnectionState(connectionId); - LOGGER.info("state after sync 3: {}", state); - - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - conn.getNamespaceFormat(), - false, WITHOUT_SCD_TABLE); - assertNotNull(state.getStreamState()); - assertEquals(1, state.getStreamState().size()); - final StreamState idAndNameState = state.getStreamState().get(0); - assertEquals(new StreamDescriptor().namespace(PUBLIC).name(STREAM_NAME), - idAndNameState.getStreamDescriptor()); - assertEquals(Jsons.deserialize(expectedState), idAndNameState.getStreamState()); - } - - @Test - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = SLOW_TEST_IN_GKE) - void testSyncAfterUpgradeToPerStreamStateWithNoNewData(final TestInfo testInfo) throws Exception { - LOGGER.info("Starting {}", testInfo.getDisplayName()); - // create custom source so that we don't share the source that is also being used by other tests - // Set the source to a version that does not support per-stream state - SourceDefinitionRead postgresSourceDefinition = testHarness.createPostgresSourceDefinition(workspaceId, POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - final SourceRead customPostgres = - testHarness.createSource("custom postgres", workspaceId, postgresSourceDefinition.getSourceDefinitionId(), testHarness.getSourceDbConfig()); - final UUID sourceId = customPostgres.getSourceId(); - final UUID customSourceDefinitionId = customPostgres.getSourceDefinitionId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - - // Fetch the current/most recent source definition version - final SourceDefinitionRead sourceDefinitionRead = testHarness.getSourceDefinition(testHarness.getPostgresSourceDefinitionId()); - final String currentSourceDefintionVersion = sourceDefinitionRead.getDockerImageTag(); - - // Set the source to a version that does not support per-stream state - LOGGER.info("Setting source connector to pre-per-stream state version {}...", - POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - testHarness.updateSourceDefinitionVersion(customSourceDefinitionId, POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .selected(true) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND)); - final var conn = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()); - LOGGER.info("Beginning {} sync 1", testInfo.getDisplayName()); - final var connectionId = conn.getConnectionId(); - final JobInfoRead connectionSyncRead1 = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead1.getJob()); - LOGGER.info(STATE_AFTER_SYNC_ONE, testHarness.getConnectionState(connectionId)); - - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - conn.getNamespaceFormat(), - false, WITHOUT_SCD_TABLE); - - // Set source to a version that supports per-stream state - testHarness.updateSourceDefinitionVersion(customSourceDefinitionId, currentSourceDefintionVersion); - LOGGER.info("Upgraded source connector per-stream state supported version {}.", currentSourceDefintionVersion); - - // sync one more time. verify that nothing has been synced - LOGGER.info("Starting {} sync 2", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead2 = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead2.getJob()); - LOGGER.info(STATE_AFTER_SYNC_TWO, testHarness.getConnectionState(connectionId)); - - final JobInfoRead syncJob = testHarness.getJobInfoRead(connectionSyncRead2.getJob().getId()); - final Optional result = syncJob.getAttempts().stream() - .min((a, b) -> Long.compare(b.getAttempt().getEndedAt(), a.getAttempt().getEndedAt())); - - assertTrue(result.isPresent()); - assertEquals(0, result.get().getAttempt().getRecordsSynced()); - assertEquals(0, result.get().getAttempt().getTotalStats().getRecordsEmitted()); - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - conn.getNamespaceFormat(), - false, WITHOUT_SCD_TABLE); - } - @Test @DisabledIfEnvironmentVariable(named = IS_GKE, matches = TRUE, @@ -994,70 +580,6 @@ void testIncrementalDedupeSyncRemoveOneColumn() throws Exception { testHarness.assertNormalizedDestinationContainsIdColumn(conn.getNamespaceFormat(), expectedNormalizedRecords); } - @Test - @Disabled - void testFailureTimeout() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition( - workspaceId); - final DestinationDefinitionRead destinationDefinition = testHarness.createE2eDestinationDefinition( - workspaceId); - - final SourceRead source = testHarness.createSource( - E2E_TEST_SOURCE + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, INFINITE_FEED) - .put(MAX_RECORDS, 1000) - .put(MESSAGE_INTERVAL, 100) - .build())); - - // Destination fails after processing 5 messages, so the job should fail after the graceful close - // timeout of 1 minute - final DestinationRead destination = testHarness.createDestination( - "E2E Test Destination -" + UUID.randomUUID(), - workspaceId, - destinationDefinition.getDestinationDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "FAILING") - .put("num_messages", 5) - .build())); - - final UUID sourceId = source.getSourceId(); - final UUID destinationId = destination.getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()) - .getConnectionId(); - - final JobInfoRead connectionSyncRead1 = testHarness.syncConnection(connectionId); - - // wait to get out of pending. - final JobRead runningJob = testHarness.waitWhileJobHasStatus(connectionSyncRead1.getJob(), Sets.newHashSet(JobStatus.PENDING)); - - // wait for job for max of 3 minutes, by which time the job attempt should have failed - testHarness.waitWhileJobHasStatus(runningJob, Sets.newHashSet(JobStatus.RUNNING), Duration.ofMinutes(3)); - - final JobInfoRead jobInfo = testHarness.getJobInfoRead(runningJob.getId()); - // Only look at the first attempt. It's possible that in the time between leaving RUNNING and - // retrieving the job info, we'll have started a new attempt. - final AttemptInfoRead attemptInfoRead = jobInfo.getAttempts().get(0); - - // assert that the job attempt failed, and cancel the job regardless of status to prevent retries - try { - assertEquals(AttemptStatus.FAILED, attemptInfoRead.getAttempt().getStatus()); - } finally { - testHarness.cancelSync(runningJob.getId()); - } - } - static void assertDestinationDbEmpty(final Database dst) throws Exception { final Set destinationTables = Databases.listAllTables(dst); diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadBasicAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadBasicAcceptanceTests.java index 00a2828429f..572f662f8dd 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadBasicAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadBasicAcceptanceTests.java @@ -10,13 +10,9 @@ import static io.airbyte.test.acceptance.AcceptanceTestsResources.TRUE; import static org.junit.jupiter.api.Assertions.assertEquals; -import io.airbyte.api.client.invoker.generated.ApiException; import io.airbyte.api.client.model.generated.AirbyteCatalog; import io.airbyte.api.client.model.generated.CheckConnectionRead; import io.airbyte.api.client.model.generated.CheckConnectionRead.StatusEnum; -import java.io.IOException; -import java.net.URISyntaxException; -import java.sql.SQLException; import java.util.UUID; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; @@ -39,7 +35,7 @@ public class WorkloadBasicAcceptanceTests { static final UUID RUN_DISCOVER_WITH_WORKLOAD_WORKSPACE_ID = UUID.fromString("3851861d-ac0b-440c-bd60-408cf9e7fc0e"); @BeforeEach - void setup() throws SQLException, URISyntaxException, IOException, ApiException, InterruptedException { + void setup() throws Exception { testResources.init(); testResources.setup(); } diff --git a/airbyte-webapp/Dockerfile b/airbyte-webapp/Dockerfile index f29bf9ba3cc..b396496a25c 100644 --- a/airbyte-webapp/Dockerfile +++ b/airbyte-webapp/Dockerfile @@ -6,8 +6,12 @@ EXPOSE 8080 USER root COPY bin/build /usr/share/nginx/html -RUN find /usr/share/nginx/html -type d -exec chmod 755 '{}' \; -o -type f -exec chmod 644 '{}' \; -RUN chown -R nginx:nginx /usr/share/nginx/html + +RUN < // Cast to Map +val engines = parsedJson["engines"] as? Map<*, *> // Safely cast to Map if 'engines' exists +val pnpmVer = engines?.get("pnpm")?.toString()?.trim() // Extract 'pnpm' as String and trim + +/* +This array should contain a path to all configs that are common to most build tasks and +might affect them (i.e. if any of those files change we want to rerun most tasks) +*/ +val commonConfigs = listOf( + ".env", + ".env.production", + "package.json", + "pnpm-lock.yaml", + "tsconfig.json", + ".prettierrc.js" +) + +configure { + download = true + version = nodeVersion + pnpmVersion = pnpmVer + distBaseUrl = "https://nodejs.org/dist" +} + +tasks.named("pnpmInstall") { + /* + Add patches folder to inputs of pnpmInstall task, since it has pnpm-lock.yml as an output + thus wouldn't rerun in case a patch get changed + */ + inputs.dir("patches") +} + +// fileTree to watch node_modules, but exclude the .cache dir since that might have changes on every build +val nodeModules = fileTree("node_modules") { + exclude(".cache") +} + +/* +fileTree to watch the public dir but exclude the auto generated buildInfo.json. It's content is anyway a +content hash, depending on the other files. +*/ +val publicDir = fileTree("public") { + exclude("buildInfo.json") +} + +tasks.register("pnpmBuild") { + dependsOn(tasks.named("pnpmInstall")) + + + environment.put("VERSION", rootProject.ext.get("version") as String) + + args = listOf("build") + + inputs.property("cloudEnv", System.getenv("WEBAPP_BUILD_CLOUD_ENV") ?: "") + inputs.files(commonConfigs) + inputs.files(nodeModules) + inputs.files(publicDir) + inputs.file(".eslintrc.js") + inputs.file(".stylelintrc") + inputs.file("orval.config.ts") + inputs.file("vite.config.mts") + inputs.file("index.html") + inputs.dir("scripts") + inputs.dir("src") + + outputs.dir("build/app") +} + +tasks.register("test") { + dependsOn(tasks.named("assemble")) + + args = listOf("run", "test:ci") + inputs.files(commonConfigs) + inputs.file("jest.config.ts") + inputs.file("babel.config.js") + inputs.dir("src") + + /* + The test has no outputs, thus we always treat the outputs up to date + as long as the inputs have not changed + */ + outputs.upToDateWhen { true } +} + +tasks.register("e2etest") { + dependsOn(tasks.named("pnpmInstall")) + + /* + If the cypressWebappKey property has been set from the outside (see tools/bin/e2e_test.sh) + we'll record the cypress session, otherwise we're not recording + */ + val recordCypress = project.hasProperty("cypressWebappKey") && project.property("cypressWebappKey") as Boolean + if (recordCypress) { + environment.put("CYPRESS_KEY", project.property("cypressWebappKey") as String) + args = listOf("run", "cypress:ci:record") + } else { + args = listOf("run", "cypress:ci") + } + + /* + Mark the outputs as never up to date, to ensure we always run the tests. + We want this because they are e2e tests and can depend on other factors e.g., external dependencies. + */ + outputs.upToDateWhen { false } +} + +tasks.register("cloudE2eTest") { + dependsOn(tasks.named("pnpmInstall")) + val recordCypress = project.hasProperty("cypressCloudWebappKey") && project.property("cypressCloudWebappKey") as Boolean + if (recordCypress) { + environment.put("CYPRESS_KEY", project.property("cypressCloudWebappKey") as String) + args = listOf("run", "cloud-test:stage:record") + } else { + args = listOf("run", "cloud-test:stage") + } + + /* + Mark the outputs as never up to date, to ensure we always run the tests. + We want this because they are e2e tests and can depend on other factors e.g., external dependencies. + */ + outputs.upToDateWhen { false } +} + +//tasks.register("validateLinks") { +// dependsOn(tasks.named("pnpmInstall")) +// +// args = listOf("run", "validate-links") +// +// inputs.file("scripts/validate-links.ts") +// inputs.file("src/core/utils/links.ts") +// +// // Configure the up-to-date check to always run in CI environments +// outputs.upToDateWhen { +// System.getenv("CI") == null +// } +//} + + +tasks.register("buildStorybook") { + dependsOn(tasks.named("pnpmInstall")) + + args = listOf("run", "build:storybook") + + inputs.files(commonConfigs) + inputs.files(nodeModules) + inputs.files(publicDir) + inputs.dir(".storybook") + inputs.dir("src") + + outputs.dir("build/storybook") + + environment = mapOf( + "NODE_OPTIONS" to "--max_old_space_size=8192" + ) +} + +tasks.register("copyBuildOutput") { + dependsOn(tasks.named("copyDocker"), tasks.named("pnpmBuild")) + + from("${project.projectDir}/build/app") + into("build/docker/bin/build") +} + +tasks.register("copyNginx") { + dependsOn(tasks.named("copyDocker")) + + from("${project.projectDir}/nginx") + into("build/docker/bin/nginx") +} + +// Those tasks should be run as part of the "check" task +tasks.named("check") { + dependsOn(/* tasks.named("validateLinks"), */ tasks.named("test")) +} + +tasks.named("build") { + dependsOn(tasks.named("buildStorybook")) +} + +tasks.named("buildDockerImage") { + dependsOn(tasks.named("copyDocker"), tasks.named("copyNginx"), tasks.named("copyBuildOutput")) +} + +// Include some cloud-specific tasks only in the airbyte-platform-internal environment +if (file("${project.projectDir}/../../cloud/cloud-webapp/cloud-tasks.gradle").exists()) { + apply(from = "${project.projectDir}/../../cloud/cloud-webapp/cloud-tasks.gradle") +} diff --git a/airbyte-webapp/cypress/cloud-e2e/cloud-login.cy.ts b/airbyte-webapp/cypress/cloud-e2e/cloud-login.cy.ts index 79ff80a0edb..39ef971ea53 100644 --- a/airbyte-webapp/cypress/cloud-e2e/cloud-login.cy.ts +++ b/airbyte-webapp/cypress/cloud-e2e/cloud-login.cy.ts @@ -7,10 +7,10 @@ describe("manually logging in and out of airbyte cloud", () => { cy.visit("/"); // unauthenticated users are redirected to /login assertOnLoginPage(); - - cy.get("[data-testid='login.email']").type(testUser.email); - cy.get("[data-testid='login.password']").type(testUser.password); - cy.get("[data-testid='login.submit']").click(); + cy.get("button").contains("Continue with Email").click(); + cy.get("input[name=username]").type(testUser.email); + cy.get("input[name=password]").type(testUser.password); + cy.get("input[name=login]").click(); cy.hasNavigatedTo("/workspaces"); cy.selectWorkspace(); diff --git a/airbyte-webapp/cypress/commands/cloud.ts b/airbyte-webapp/cypress/commands/cloud.ts index 4feaa8998dc..02e4371dc9e 100644 --- a/airbyte-webapp/cypress/commands/cloud.ts +++ b/airbyte-webapp/cypress/commands/cloud.ts @@ -16,9 +16,11 @@ Cypress.Commands.add("login", (user: TestUserCredentials = testUser) => { } cy.visit("/login"); - cy.get("[data-testid='login.email']", { timeout: 10000 }).type(user.email); - cy.get("[data-testid='login.password']").type(user.password); - cy.get("[data-testid='login.submit']").click(); + cy.get("button").contains("Continue with Email").click(); + cy.get("input[name=username]").type(testUser.email); + cy.get("input[name=password]").type(testUser.password); + cy.get("input[name=login]").click(); + cy.hasNavigatedTo("/workspaces"); }); diff --git a/airbyte-webapp/cypress/commands/common.ts b/airbyte-webapp/cypress/commands/common.ts index b054cba91a8..22719fb6335 100644 --- a/airbyte-webapp/cypress/commands/common.ts +++ b/airbyte-webapp/cypress/commands/common.ts @@ -30,10 +30,14 @@ export const openConnectorPage = (name: string) => { cy.get("div").contains(name).click(); }; -export const deleteEntity = (confirmationText: string) => { +export const deleteEntity = () => { cy.get("button[data-id='open-delete-modal']").click(); - cy.get("input[id='confirmation-text").type(confirmationText); - cy.get("button[data-id='delete']").click(); + cy.get("input[id='confirmation-text']") + .invoke("attr", "placeholder") + .then((placeholder) => { + cy.get("input[id='confirmation-text']").type(placeholder ?? ""); + cy.get("button[data-id='delete']").click(); + }); }; export const clearApp = () => { diff --git a/airbyte-webapp/cypress/commands/destination.ts b/airbyte-webapp/cypress/commands/destination.ts index ede436cea8f..8c3f6c305df 100644 --- a/airbyte-webapp/cypress/commands/destination.ts +++ b/airbyte-webapp/cypress/commands/destination.ts @@ -54,6 +54,6 @@ export const deleteDestination = (name: string) => { cy.intercept("/api/v1/destinations/delete").as("deleteDestination"); goToDestinationPage(); openConnectorPage(name); - deleteEntity(name); + deleteEntity(); cy.wait("@deleteDestination"); }; diff --git a/airbyte-webapp/cypress/commands/source.ts b/airbyte-webapp/cypress/commands/source.ts index a643c2c961f..727021e88bc 100644 --- a/airbyte-webapp/cypress/commands/source.ts +++ b/airbyte-webapp/cypress/commands/source.ts @@ -64,9 +64,10 @@ export const updateSource = (name: string, field: string, value: string, isDropd }; export const deleteSource = (name: string) => { + cy.log(`Deleting source ${name}`); cy.intercept("/api/v1/sources/delete").as("deleteSource"); goToSourcePage(); openConnectorPage(name); - deleteEntity("Test source cypress"); + deleteEntity(); cy.wait("@deleteSource"); }; diff --git a/airbyte-webapp/cypress/e2e/base.cy.ts b/airbyte-webapp/cypress/e2e/base.cy.ts index a02a4351a91..f6f1a0e4220 100644 --- a/airbyte-webapp/cypress/e2e/base.cy.ts +++ b/airbyte-webapp/cypress/e2e/base.cy.ts @@ -9,6 +9,6 @@ describe("Error handling view", () => { cy.visit("/"); - cy.get("div").contains("Cannot reach server. The server may still be starting up.").should("exist"); + cy.get("p").contains("Airbyte is temporarily unavailable.").should("exist"); }); }); diff --git a/airbyte-webapp/cypress/e2e/connection/autoDetectSchema.cy.ts b/airbyte-webapp/cypress/e2e/connection/autoDetectSchema.cy.ts index 61a84949836..5c4f26558a4 100644 --- a/airbyte-webapp/cypress/e2e/connection/autoDetectSchema.cy.ts +++ b/airbyte-webapp/cypress/e2e/connection/autoDetectSchema.cy.ts @@ -80,7 +80,7 @@ describe("Connection - Auto-detect schema changes", () => { it("does not show non-breaking change on list page", () => { connectionListPage.visit(); connectionListPage.getSchemaChangeIcon(connection, "non_breaking").should("not.exist"); - connectionListPage.getManualSyncButton(connection).should("be.enabled"); + connectionListPage.getConnectionStateSwitch(connection).should("be.checked").and("be.enabled"); }); it("shows non-breaking change that can be saved after refresh", () => { @@ -149,7 +149,7 @@ describe("Connection - Auto-detect schema changes", () => { it("shows breaking change on list page", () => { connectionListPage.visit(); connectionListPage.getSchemaChangeIcon(connection, "breaking").should("exist"); - connectionListPage.getManualSyncButton(connection).should("be.disabled"); + connectionListPage.getConnectionStateSwitch(connection).should("not.be.checked").and("not.be.enabled"); }); it("shows breaking change that can be saved after refresh and fix", () => { diff --git a/airbyte-webapp/cypress/e2e/connection/configuration.cy.ts b/airbyte-webapp/cypress/e2e/connection/configuration.cy.ts index 0344effbef9..be5b9189bf8 100644 --- a/airbyte-webapp/cypress/e2e/connection/configuration.cy.ts +++ b/airbyte-webapp/cypress/e2e/connection/configuration.cy.ts @@ -411,7 +411,7 @@ describe("Connection Configuration", () => { connection = connectionResponse; visit(connection); connectionSettings.goToSettingsPage(); - deleteEntity(connection.name); + deleteEntity(); }); }); }); diff --git a/airbyte-webapp/cypress/pages/connection/connectionListPageObject.ts b/airbyte-webapp/cypress/pages/connection/connectionListPageObject.ts index f571df8d5ed..928d4c86995 100644 --- a/airbyte-webapp/cypress/pages/connection/connectionListPageObject.ts +++ b/airbyte-webapp/cypress/pages/connection/connectionListPageObject.ts @@ -1,9 +1,10 @@ import { WebBackendConnectionListItem } from "@src/core/api/types/AirbyteClient"; import { getWorkspaceId } from "commands/api/workspace"; -const statusCell = (connectionId: string) => `[data-testId='statusCell-${connectionId}']`; +const schemaChangeCell = (connectionId: string) => `[data-testid='link-replication-${connectionId}']`; + const changesStatusIcon = (type: string) => `[data-testId='changesStatusIcon-${type}']`; -const manualSyncButton = "button[data-testId='manual-sync-button']"; +const connectionStateSwitch = (connectionId: string) => `[data-testId='connection-state-switch-${connectionId}']`; const newConnectionButton = "[data-testid='new-connection-button']"; export const visit = () => { @@ -13,10 +14,10 @@ export const visit = () => { }; export const getSchemaChangeIcon = (connection: WebBackendConnectionListItem, type: "breaking" | "non_breaking") => - cy.get(`${statusCell(connection.connectionId)} ${changesStatusIcon(type)}`); + cy.get(`${schemaChangeCell(connection.connectionId)} ${changesStatusIcon(type)}`); -export const getManualSyncButton = (connection: WebBackendConnectionListItem) => - cy.get(`${statusCell(connection.connectionId)} ${manualSyncButton}`); +export const getConnectionStateSwitch = (connection: WebBackendConnectionListItem) => + cy.get(`${connectionStateSwitch(connection.connectionId)}`); export const clickNewConnectionButton = () => { cy.get(newConnectionButton).click(); diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 65f6e92632a..49c6757c093 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -84,6 +84,7 @@ "date-fns": "^2.29.3", "dayjs": "^1.11.3", "diff": "^5.1.0", + "escape-string-regexp": "^5.0.0", "firebase": "^10.5.0", "framer-motion": "^6.3.11", "js-yaml": "^4.1.0", diff --git a/airbyte-webapp/pnpm-lock.yaml b/airbyte-webapp/pnpm-lock.yaml index fa81cd3ea88..94e3a04af53 100644 --- a/airbyte-webapp/pnpm-lock.yaml +++ b/airbyte-webapp/pnpm-lock.yaml @@ -96,6 +96,9 @@ dependencies: diff: specifier: ^5.1.0 version: 5.1.0 + escape-string-regexp: + specifier: ^5.0.0 + version: 5.0.0 firebase: specifier: ^10.5.0 version: 10.5.0 diff --git a/airbyte-webapp/src/App.tsx b/airbyte-webapp/src/App.tsx index 044878f7589..8f467618c85 100644 --- a/airbyte-webapp/src/App.tsx +++ b/airbyte-webapp/src/App.tsx @@ -2,7 +2,6 @@ import React, { Suspense } from "react"; import { HelmetProvider } from "react-helmet-async"; import { createBrowserRouter, RouterProvider } from "react-router-dom"; -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; import { DevToolsToggle } from "components/DevToolsToggle"; import { QueryProvider, useGetInstanceConfiguration } from "core/api"; @@ -10,6 +9,7 @@ import { InstanceConfigurationResponseEdition, InstanceConfigurationResponseTrackingStrategy, } from "core/api/types/AirbyteClient"; +import { DefaultErrorBoundary } from "core/errors"; import { AnalyticsProvider } from "core/services/analytics"; import { OSSAuthService } from "core/services/auth"; import { defaultOssFeatures, defaultEnterpriseFeatures, FeatureService } from "core/services/features"; @@ -65,11 +65,11 @@ const App: React.FC = () => { }> - + - + diff --git a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.module.scss b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.module.scss index 8138cf2246f..d40fbd469e9 100644 --- a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.module.scss +++ b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.module.scss @@ -24,6 +24,7 @@ &__summary { overflow: hidden; + flex-grow: 1; } &__modalLoading { diff --git a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobStats.module.scss b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobStats.module.scss index c6fc9e07b3b..bd59ad271ce 100644 --- a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobStats.module.scss +++ b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobStats.module.scss @@ -6,3 +6,25 @@ overflow: hidden; text-overflow: ellipsis; } + +.seeMore { + color: colors.$grey-500; +} + +.seeMoreIcon { + vertical-align: middle; +} + +.secondaryMessage { + padding: variables.$spacing-sm; + border-radius: variables.$border-radius-md; + font-family: monospace; + + &.errorMessage { + background-color: colors.$red-50; + } + + &.warningMessage { + background-color: colors.$yellow-50; + } +} diff --git a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobStats.tsx b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobStats.tsx index 23337a8cb8f..4e14d706979 100644 --- a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobStats.tsx +++ b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobStats.tsx @@ -1,12 +1,15 @@ +import classNames from "classnames"; import dayjs from "dayjs"; +import { useState } from "react"; import { FormattedMessage, useIntl } from "react-intl"; +import { Button } from "components/ui/Button"; import { FlexContainer } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; import { Text } from "components/ui/Text"; import { JobWithAttempts } from "area/connection/types/jobs"; -import { isJobPartialSuccess } from "area/connection/utils/jobs"; -import { AttemptRead, FailureReason } from "core/api/types/AirbyteClient"; +import { failureUiDetailsFromReason } from "core/utils/errorStatusMessage"; import { formatBytes } from "core/utils/numberHelper"; import { useLocalStorage } from "core/utils/useLocalStorage"; @@ -22,7 +25,6 @@ export const JobStats: React.FC = ({ jobWithAttempts }) => { const [showExtendedStats] = useLocalStorage("airbyte_extended-attempts-stats", false); const { job, attempts } = jobWithAttempts; - const isPartialSuccess = isJobPartialSuccess(jobWithAttempts.attempts); const lastAttempt = attempts && attempts[attempts.length - 1]; const start = dayjs(job.createdAt * 1000); @@ -31,26 +33,8 @@ export const JobStats: React.FC = ({ jobWithAttempts }) => { const minutes = Math.abs(end.diff(start, "minute")) - hours * 60; const seconds = Math.abs(end.diff(start, "second")) - minutes * 60 - hours * 3600; - const getFailureFromAttempt = (attempt: AttemptRead): FailureReason | undefined => - attempt.failureSummary?.failures[0]; - - const getFailureOrigin = (attempt: AttemptRead) => { - const failure = getFailureFromAttempt(attempt); - const failureOrigin = failure?.failureOrigin ?? formatMessage({ id: "errorView.unknown" }); - - return `${formatMessage({ - id: "sources.failureOrigin", - })}: ${failureOrigin}`; - }; - - const getExternalFailureMessage = (attempt: AttemptRead) => { - const failure = getFailureFromAttempt(attempt); - const failureMessage = failure?.externalMessage ?? formatMessage({ id: "errorView.unknown" }); - - return `${formatMessage({ - id: "sources.message", - })}: ${failureMessage}`; - }; + const failureUiDetails = failureUiDetailsFromReason(lastAttempt.failureSummary?.failures[0], formatMessage); + const [isSecondaryMessageExpanded, setIsSecondaryMessageExpanded] = useState(false); if (job.status === "running") { return null; @@ -66,47 +50,47 @@ export const JobStats: React.FC = ({ jobWithAttempts }) => { )} {job.aggregatedStats && ( <> - + {formatBytes(job.aggregatedStats.bytesEmitted)} - + | - + - + | - + - + | - + {hours ? : null} {hours || minutes ? : null} {showExtendedStats && ( <> - + | - + - + | - + @@ -114,17 +98,48 @@ export const JobStats: React.FC = ({ jobWithAttempts }) => { )} - {job.status === "failed" && lastAttempt && ( - + {job.status === "failed" && failureUiDetails && ( + {formatMessage( + { id: "failureMessage.label" }, { - id: "ui.keyValuePairV3", - }, - { - key: getFailureOrigin(lastAttempt), - value: getExternalFailureMessage(lastAttempt), + type: ( + + {failureUiDetails.typeLabel}: + + ), + message: failureUiDetails.message, } )} + {failureUiDetails?.secondaryMessage && ( + <> +   + + + )} + + )} + {failureUiDetails && isSecondaryMessageExpanded && ( + + {failureUiDetails.secondaryMessage} )} diff --git a/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.module.scss b/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.module.scss index 29ff0e73840..a3b1ce43d99 100644 --- a/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.module.scss +++ b/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.module.scss @@ -3,16 +3,5 @@ .internalFailureContainer { border-radius: variables.$border-radius-md; - background-color: colors.$red-50; padding-bottom: variables.$spacing-md; } - -.internalFailureReason { - max-height: 300px; - overflow-y: auto; - overflow-x: auto; - white-space: nowrap; - background: colors.$red-30; - padding: variables.$spacing-sm variables.$spacing-md; - font-family: monospace; -} diff --git a/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.tsx b/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.tsx index 4821f44c31d..0edcd04e866 100644 --- a/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.tsx +++ b/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.tsx @@ -1,4 +1,3 @@ -import { useMemo } from "react"; import { FormattedMessage, useIntl } from "react-intl"; import { Box } from "components/ui/Box"; @@ -8,6 +7,7 @@ import { Message } from "components/ui/Message"; import { AttemptFailureSummary, FailureType } from "core/api/types/AirbyteClient"; import { copyToClipboard } from "core/utils/clipboard"; +import { failureUiDetailsFromReason } from "core/utils/errorStatusMessage"; import { useNotificationService } from "hooks/services/Notification"; import styles from "./JobLogsModalFailureMessage.module.scss"; @@ -19,33 +19,22 @@ interface JobLogsModalFailureMessageProps { export const JobLogsModalFailureMessage: React.FC = ({ failureSummary }) => { const { registerNotification } = useNotificationService(); const { formatMessage } = useIntl(); + const failureUiDetails = failureUiDetailsFromReason(failureSummary?.failures[0], formatMessage); - const internalFailureReason = useMemo(() => failureSummary?.failures[0]?.internalMessage, [failureSummary]); - - const externalFailureReason = useMemo(() => failureSummary?.failures[0]?.externalMessage, [failureSummary]); - - const failureToShow = useMemo( - () => - !failureSummary || - failureSummary?.failures.some(({ failureType }) => failureType === FailureType.manual_cancellation) - ? "none" - : failureSummary?.failures[0]?.internalMessage - ? "internal" - : failureSummary?.failures[0]?.externalMessage - ? "external" - : "unknown", - [failureSummary] + const isFailureCancellation = failureSummary?.failures.some( + ({ failureType }) => failureType === FailureType.manual_cancellation ); + const showFailureMessage = !isFailureCancellation && failureUiDetails; - if (failureToShow === "none") { + if (!showFailureMessage) { return null; } const onCopyTextBtnClick = async () => { - if (!internalFailureReason) { + if (!failureUiDetails.secondaryMessage) { return; } - await copyToClipboard(internalFailureReason); + await copyToClipboard(failureUiDetails.secondaryMessage); registerNotification({ type: "success", @@ -56,35 +45,27 @@ export const JobLogsModalFailureMessage: React.FC - {failureToShow === "internal" && ( -

    - - - +
    + + + + {failureUiDetails.secondaryMessage && ( - - } - /> - - -
    {internalFailureReason}
    -
    -
    - )} - - {failureToShow === "external" && ( - } - /> - )} - - {failureToShow === "unknown" && } />} + )} + + } + > + {failureUiDetails.secondaryMessage} + +
    ); }; diff --git a/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.module.scss b/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.module.scss index 29e8533bd97..4660f30789c 100644 --- a/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.module.scss +++ b/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.module.scss @@ -6,6 +6,6 @@ greenVar: colors.$green; darkBlueVar: colors.$dark-blue-300; redVar: colors.$red; - blackVar: colors.$black; - emptyVar: colors.$white; + blackVar: colors.$inverse; + emptyVar: colors.$foreground; } diff --git a/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx b/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx index b46d8aafe09..83d52f00250 100644 --- a/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx +++ b/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx @@ -6,16 +6,14 @@ import { Link } from "components/ui/Link"; import { Table } from "components/ui/Table"; import { useCurrentWorkspaceLink } from "area/workspace/utils"; -import { ConnectionScheduleType, SchemaChange } from "core/api/types/AirbyteClient"; -import { FeatureItem, useFeature } from "core/services/features"; import { RoutePaths } from "pages/routePaths"; -import ConnectionSettingsCell from "./components/ConnectionSettingsCell"; import { ConnectionStatusCell } from "./components/ConnectionStatusCell"; import { ConnectorNameCell } from "./components/ConnectorNameCell"; import { FrequencyCell } from "./components/FrequencyCell"; import { LastSyncCell } from "./components/LastSyncCell"; -import { StatusCell } from "./components/StatusCell"; +import { SchemaChangeCell } from "./components/SchemaChangeCell"; +import { StateSwitchCell } from "./components/StateSwitchCell"; import { StreamsStatusCell } from "./components/StreamStatusCell"; import styles from "./ConnectionTable.module.scss"; import { ConnectionTableDataItem } from "./types"; @@ -28,7 +26,6 @@ interface ConnectionTableProps { const ConnectionTable: React.FC = ({ data, entity, variant }) => { const createLink = useCurrentWorkspaceLink(); - const allowAutoDetectSchema = useFeature(FeatureItem.AllowAutoDetectSchema); const streamCentricUIEnabled = false; const columnHelper = createColumnHelper(); @@ -158,28 +155,29 @@ const ConnectionTable: React.FC = ({ data, entity, variant thClassName: styles.thEnabled, }, cell: (props) => ( - ), enableSorting: false, }), - columnHelper.accessor("connectionId", { + columnHelper.accessor("schemaChange", { header: "", meta: { thClassName: styles.thConnectionSettings, }, - cell: (props) => , + cell: (props) => ( + + ), enableSorting: false, }), ], - [columnHelper, createLink, entity, allowAutoDetectSchema] + [columnHelper, createLink, entity] ); return ( diff --git a/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.module.scss b/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.module.scss deleted file mode 100644 index fd1bf72927d..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.module.scss +++ /dev/null @@ -1,28 +0,0 @@ -@use "scss/variables"; -@use "scss/colors"; - -.button { - min-width: 17px; - font-size: 17px; - border: none; - background: none; - padding: 0; -} - -.link { - color: transparent; - padding: variables.$spacing-xs variables.$spacing-sm; - - &:focus, - &:hover { - color: colors.$grey-600 !important; - } - - tr:hover & { - color: colors.$grey-400; - } -} - -.icon { - color: inherit; -} diff --git a/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx b/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx deleted file mode 100644 index 7de3a818713..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx +++ /dev/null @@ -1,33 +0,0 @@ -import React from "react"; - -import { Icon } from "components/ui/Icon"; -import { Link } from "components/ui/Link"; - -import { useCurrentWorkspace } from "hooks/services/useWorkspace"; -import { ConnectionRoutePaths, RoutePaths } from "pages/routePaths"; - -import styles from "./ConnectionSettingsCell.module.scss"; - -interface IProps { - id: string; -} - -const ConnectorCell: React.FC = ({ id }) => { - const { workspaceId } = useCurrentWorkspace(); - - const openSettings = (event: React.MouseEvent) => { - event.stopPropagation(); - }; - - const settingPath = `/${RoutePaths.Workspaces}/${workspaceId}/${RoutePaths.Connections}/${id}/${ConnectionRoutePaths.Replication}`; - - return ( - - ); -}; - -export default ConnectorCell; diff --git a/airbyte-webapp/src/components/EntityTable/components/SchemaChangeCell.tsx b/airbyte-webapp/src/components/EntityTable/components/SchemaChangeCell.tsx new file mode 100644 index 00000000000..fd2f9b41d26 --- /dev/null +++ b/airbyte-webapp/src/components/EntityTable/components/SchemaChangeCell.tsx @@ -0,0 +1,28 @@ +import React from "react"; + +import { Link } from "components/ui/Link"; + +import { ConnectionId, SchemaChange } from "core/api/types/AirbyteClient"; +import { FeatureItem, useFeature } from "core/services/features"; +import { ConnectionRoutePaths } from "pages/routePaths"; + +import { ChangesStatusIcon } from "./ChangesStatusIcon"; + +interface SchemaChangeCellProps { + connectionId: ConnectionId; + schemaChange: SchemaChange; +} + +export const SchemaChangeCell: React.FC = ({ connectionId, schemaChange }) => { + const allowAutoDetectSchema = useFeature(FeatureItem.AllowAutoDetectSchema); + + if (!allowAutoDetectSchema || schemaChange !== SchemaChange.breaking) { + return null; + } + + return ( + + + + ); +}; diff --git a/airbyte-webapp/src/components/EntityTable/components/StateSwitchCell.test.tsx b/airbyte-webapp/src/components/EntityTable/components/StateSwitchCell.test.tsx new file mode 100644 index 00000000000..edef2e92104 --- /dev/null +++ b/airbyte-webapp/src/components/EntityTable/components/StateSwitchCell.test.tsx @@ -0,0 +1,70 @@ +import { render } from "@testing-library/react"; + +import { TestSuspenseBoundary, TestWrapper } from "test-utils"; +import { mockWorkspace } from "test-utils/mock-data/mockWorkspace"; + +import { StateSwitchCell } from "./StateSwitchCell"; + +jest.mock("core/api", () => ({ + useCurrentWorkspace: jest.fn(() => mockWorkspace), + useUpdateConnection: jest.fn(() => ({ + mutateAsync: jest.fn(), + isLoading: false, + })), +})); + +jest.mock("core/utils/rbac", () => ({ + useIntent: jest.fn(() => true), +})); + +const mockId = "mock-id"; + +describe(`${StateSwitchCell.name}`, () => { + it("renders enabled switch", () => { + const { getByTestId } = render( + + + , + { + wrapper: TestWrapper, + } + ); + + const switchElement = getByTestId("connection-state-switch-mock-id"); + + expect(switchElement).toBeEnabled(); + expect(switchElement).toBeChecked(); + }); + + it("renders disabled switch when connection has `breaking` changes", () => { + const { getByTestId } = render( + + + , + { + wrapper: TestWrapper, + } + ); + + expect(getByTestId("connection-state-switch-mock-id")).toBeDisabled(); + }); + + it("renders disabled switch when connection is in loading state", () => { + jest.doMock("core/api", () => ({ + useUpdateConnection: jest.fn(() => ({ + isLoading: true, + })), + })); + + const { getByTestId } = render( + + + , + { + wrapper: TestWrapper, + } + ); + + expect(getByTestId("connection-state-switch-mock-id")).toBeDisabled(); + }); +}); diff --git a/airbyte-webapp/src/components/EntityTable/components/StateSwitchCell.tsx b/airbyte-webapp/src/components/EntityTable/components/StateSwitchCell.tsx new file mode 100644 index 00000000000..4a82cd035e4 --- /dev/null +++ b/airbyte-webapp/src/components/EntityTable/components/StateSwitchCell.tsx @@ -0,0 +1,45 @@ +import React from "react"; + +import { FlexContainer } from "components/ui/Flex"; +import { Switch } from "components/ui/Switch"; + +import { useCurrentWorkspace, useUpdateConnection } from "core/api"; +import { ConnectionId, ConnectionStatus, SchemaChange } from "core/api/types/AirbyteClient"; +import { useIntent } from "core/utils/rbac"; +import { useAnalyticsTrackFunctions } from "hooks/services/ConnectionEdit/useAnalyticsTrackFunctions"; + +interface StateSwitchCellProps { + connectionId: ConnectionId; + enabled?: boolean; + schemaChange?: SchemaChange; +} + +export const StateSwitchCell: React.FC = ({ connectionId, enabled, schemaChange }) => { + const { trackConnectionStatusUpdate } = useAnalyticsTrackFunctions(); + const { workspaceId } = useCurrentWorkspace(); + const canEditConnection = useIntent("EditConnection", { workspaceId }); + const { mutateAsync: updateConnection, isLoading } = useUpdateConnection(); + + const onChange = async ({ target: { checked } }: React.ChangeEvent) => { + const updatedConnection = await updateConnection({ + connectionId, + status: checked ? ConnectionStatus.active : ConnectionStatus.inactive, + }); + trackConnectionStatusUpdate(updatedConnection); + }; + + const isDisabled = schemaChange === SchemaChange.breaking || !canEditConnection || isLoading; + + return ( + + + + ); +}; diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCell.module.scss b/airbyte-webapp/src/components/EntityTable/components/StatusCell.module.scss deleted file mode 100644 index 663c904fc6d..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCell.module.scss +++ /dev/null @@ -1,7 +0,0 @@ -.container { - display: flex; - flex-direction: row; - align-content: center; - justify-content: space-between; - width: 120px; -} diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCell.test.tsx b/airbyte-webapp/src/components/EntityTable/components/StatusCell.test.tsx deleted file mode 100644 index b5e6fc4aa9c..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCell.test.tsx +++ /dev/null @@ -1,83 +0,0 @@ -import { render, waitFor } from "@testing-library/react"; - -import { TestWrapper, TestSuspenseBoundary, mockConnection } from "test-utils"; -import { mockWorkspace } from "test-utils/mock-data/mockWorkspace"; - -import { StatusCell } from "./StatusCell"; - -jest.mock("core/api", () => ({ - useConnectionList: jest.fn(() => ({ - connections: [], - })), - useCurrentWorkspace: jest.fn(() => mockWorkspace), - useSyncConnection: jest.fn(() => ({ - mutateAsync: jest.fn(), - })), - useUpdateConnection: jest.fn(() => ({ - mutateAsync: jest.fn(), - isLoading: false, - })), -})); - -jest.mock("core/utils/rbac", () => ({ - useIntent: jest.fn(() => true), -})); - -const mockId = "mock-id"; - -describe("", () => { - it("renders switch when connection has schedule", () => { - const { getByTestId } = render( - - - , - { - wrapper: TestWrapper, - } - ); - - const switchElement = getByTestId("enable-connection-switch"); - - expect(switchElement).toBeEnabled(); - expect(switchElement).toBeChecked(); - }); - - it("renders button when connection does not have schedule", async () => { - const { getByTestId } = render( - - - , - { - wrapper: TestWrapper, - } - ); - - await waitFor(() => expect(getByTestId("manual-sync-button")).toBeEnabled()); - }); - - it("disables switch when hasBreakingChange is true", () => { - const { getByTestId } = render( - - - , - { - wrapper: TestWrapper, - } - ); - - expect(getByTestId("enable-connection-switch")).toBeDisabled(); - }); - - it("disables manual sync button when hasBreakingChange is true", () => { - const { getByTestId } = render( - - - , - { - wrapper: TestWrapper, - } - ); - - expect(getByTestId("manual-sync-button")).toBeDisabled(); - }); -}); diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCell.tsx b/airbyte-webapp/src/components/EntityTable/components/StatusCell.tsx deleted file mode 100644 index 9b07a619d09..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCell.tsx +++ /dev/null @@ -1,51 +0,0 @@ -import React from "react"; - -import { Link } from "components/ui/Link"; - -import { SchemaChange, WebBackendConnectionListItem } from "core/api/types/AirbyteClient"; -import { FeatureItem, useFeature } from "core/services/features"; -import { ConnectionRoutePaths } from "pages/routePaths"; - -import { ChangesStatusIcon } from "./ChangesStatusIcon"; -import styles from "./StatusCell.module.scss"; -import { StatusCellControl } from "./StatusCellControl"; - -interface StatusCellProps { - hasBreakingChange?: boolean; - enabled?: boolean; - isSyncing?: boolean; - isManual?: boolean; - id: string; - schemaChange?: SchemaChange; - connection: WebBackendConnectionListItem; -} - -export const StatusCell: React.FC = ({ - enabled, - isManual, - id, - isSyncing, - schemaChange, - hasBreakingChange, - connection, -}) => { - const allowAutoDetectSchema = useFeature(FeatureItem.AllowAutoDetectSchema); - - return ( -
    - - {allowAutoDetectSchema && hasBreakingChange && ( - - - - )} -
    - ); -}; diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCellControl.tsx b/airbyte-webapp/src/components/EntityTable/components/StatusCellControl.tsx deleted file mode 100644 index bd5fadd4c73..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCellControl.tsx +++ /dev/null @@ -1,92 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; - -import { Button } from "components/ui/Button"; -import { Switch } from "components/ui/Switch"; - -import { useCurrentWorkspace, useSyncConnection, useUpdateConnection } from "core/api"; -import { ConnectionStatus, WebBackendConnectionListItem } from "core/api/types/AirbyteClient"; -import { Action, Namespace, getFrequencyFromScheduleData, useAnalyticsService } from "core/services/analytics"; -import { useIntent } from "core/utils/rbac"; - -interface StatusCellControlProps { - hasBreakingChange?: boolean; - enabled?: boolean; - isSyncing?: boolean; - isManual?: boolean; - id: string; - connection: WebBackendConnectionListItem; -} - -export const StatusCellControl: React.FC = ({ - enabled, - isManual, - id, - isSyncing, - hasBreakingChange, - connection, -}) => { - const analyticsService = useAnalyticsService(); - const { mutateAsync: updateConnection, isLoading } = useUpdateConnection(); - const { mutateAsync: syncConnection, isLoading: isSyncStarting } = useSyncConnection(); - - const { workspaceId } = useCurrentWorkspace(); - const canEditConnection = useIntent("EditConnection", { workspaceId }); - const canSyncConnection = useIntent("SyncConnection", { workspaceId }); - - const onRunManualSync = (event: React.SyntheticEvent) => { - event.stopPropagation(); - - if (connection) { - syncConnection(connection); - } - }; - - if (!isManual) { - const onSwitchChange = async (event: React.SyntheticEvent) => { - event.stopPropagation(); - await updateConnection({ - connectionId: id, - status: enabled ? ConnectionStatus.inactive : ConnectionStatus.active, - }).then((updatedConnection) => { - const action = updatedConnection.status === ConnectionStatus.active ? Action.REENABLE : Action.DISABLE; - - analyticsService.track(Namespace.CONNECTION, action, { - frequency: getFrequencyFromScheduleData(connection.scheduleData), - connector_source: connection.source?.sourceName, - connector_source_definition_id: connection.source?.sourceDefinitionId, - connector_destination: connection.destination?.destinationName, - connector_destination_definition_id: connection.destination?.destinationDefinitionId, - }); - }); - }; - - return ( - // this is so we can stop event propagation so the row doesn't receive the click and redirect - // eslint-disable-next-line jsx-a11y/no-static-element-interactions -
    event.stopPropagation()} - onKeyPress={(event: React.SyntheticEvent) => event.stopPropagation()} - > - -
    - ); - } - - return ( - - ); -}; diff --git a/airbyte-webapp/src/components/JobFailure/JobFailure.tsx b/airbyte-webapp/src/components/JobFailure/JobFailure.tsx index 29842b386f0..c50ee379817 100644 --- a/airbyte-webapp/src/components/JobFailure/JobFailure.tsx +++ b/airbyte-webapp/src/components/JobFailure/JobFailure.tsx @@ -145,7 +145,7 @@ export const JobFailure: React.FC = ({ job, fallbackMessage }) expanded={isStacktraceExpanded} messageId="jobs.failure.expandStacktrace" /> - {isStacktraceExpanded && } + {isStacktraceExpanded && } )} {job.logs?.logLines && job.logs.logLines.length > 0 && ( @@ -156,7 +156,7 @@ export const JobFailure: React.FC = ({ job, fallbackMessage }) messageId="jobs.failure.expandLogs" icon={} /> - {isLogsExpanded && } + {isLogsExpanded && } )} diff --git a/airbyte-webapp/src/components/Logs/Logs.tsx b/airbyte-webapp/src/components/Logs/Logs.tsx index cb9afd9a386..80257b3829c 100644 --- a/airbyte-webapp/src/components/Logs/Logs.tsx +++ b/airbyte-webapp/src/components/Logs/Logs.tsx @@ -7,6 +7,7 @@ import styles from "./Logs.module.scss"; interface LogsProps { logsArray?: string[]; maxRows?: number; + follow?: boolean; } const ROW_HEIGHT = 19; @@ -19,7 +20,7 @@ function trimLogs(logs: string[]) { return trimmedLogs; } -const Logs: React.FC = ({ logsArray, maxRows = 21 }) => { +const Logs: React.FC = ({ logsArray, maxRows = 21, follow }) => { const trimmedLogs = trimLogs(logsArray || []); const logsJoin = trimmedLogs.length ? trimmedLogs.join("\n") : "No logs available"; @@ -41,7 +42,7 @@ const Logs: React.FC = ({ logsArray, maxRows = 21 }) => { lineClassName={styles.logLine} highlightLineClassName={styles.highlightLogLine} selectableLines - follow + follow={follow} style={{ background: "transparent" }} scrollToLine={undefined} highlight={[]} diff --git a/airbyte-webapp/src/components/common/ApiErrorBoundary/ApiErrorBoundary.tsx b/airbyte-webapp/src/components/common/ApiErrorBoundary/ApiErrorBoundary.tsx deleted file mode 100644 index 836d6c06dbb..00000000000 --- a/airbyte-webapp/src/components/common/ApiErrorBoundary/ApiErrorBoundary.tsx +++ /dev/null @@ -1,151 +0,0 @@ -import { useQueryErrorResetBoundary } from "@tanstack/react-query"; -import React from "react"; -import { FormattedMessage } from "react-intl"; -import { NavigateFunction, useNavigate } from "react-router-dom"; -import { useLocation } from "react-use"; -import { LocationSensorState } from "react-use/lib/useLocation"; - -import { CommonRequestError } from "core/api"; -import { isFormBuildError } from "core/form/FormBuildError"; -import { trackError } from "core/utils/datadog"; -import { TrackErrorFn } from "hooks/services/AppMonitoringService"; -import { ErrorOccurredView } from "views/common/ErrorOccurredView"; -import { ResourceNotFoundErrorBoundary } from "views/common/ResourceNotFoundErrorBoundary"; -import { StartOverErrorView } from "views/common/StartOverErrorView"; - -import { ServerUnavailableView } from "./ServerUnavailableView"; - -interface ApiErrorBoundaryState { - errorId?: string; - message?: string; - didRetry?: boolean; - retryDelay?: number; -} - -enum ErrorId { - FormBuild = "form.build", - ServerUnavailable = "server.unavailable", - UnknownError = "unknown", -} - -interface ApiErrorBoundaryHookProps { - location: LocationSensorState; - onRetry?: () => void; - navigate: NavigateFunction; - trackError: TrackErrorFn; -} - -interface ApiErrorBoundaryProps { - onError?: (errorId?: string) => void; -} - -const RETRY_DELAY = 2500; - -class ApiErrorBoundaryComponent extends React.Component< - React.PropsWithChildren, - ApiErrorBoundaryState -> { - state: ApiErrorBoundaryState = { - retryDelay: RETRY_DELAY, - }; - - static getDerivedStateFromError(error: { message: string; status?: number; __type?: string }): ApiErrorBoundaryState { - if (isFormBuildError(error)) { - return { errorId: ErrorId.FormBuild, message: error.message }; - } - - const isNetworkBoundaryMessage = error.message === "Failed to fetch"; - const is502 = error.status === 502; - - if (isNetworkBoundaryMessage || is502) { - return { errorId: ErrorId.ServerUnavailable, didRetry: false }; - } - - return { errorId: ErrorId.UnknownError, didRetry: false }; - } - - componentDidUpdate(prevProps: ApiErrorBoundaryHookProps) { - const { location } = this.props; - - if (location !== prevProps.location) { - this.setState({ errorId: undefined, didRetry: false }); - this.props.onError?.(undefined); - } else { - this.props.onError?.(this.state.errorId); - } - } - - componentDidCatch(error: Error) { - const context = { - errorBoundary: this.constructor.name, - requestStatus: error instanceof CommonRequestError ? error.status : undefined, - }; - - this.props.trackError(error, context); - } - - retry = () => { - this.setState((state) => ({ - didRetry: true, - errorId: undefined, - retryDelay: Math.round((state?.retryDelay || RETRY_DELAY) * 1.2), - })); - this.props.onRetry?.(); - }; - - render(): React.ReactNode { - const { navigate, children } = this.props; - const { errorId, didRetry, message, retryDelay } = this.state; - - if (errorId === ErrorId.FormBuild) { - return ( - - -
    - - - } - docLink="https://docs.airbyte.com/connector-development/connector-specification-reference/#airbyte-modifications-to-jsonschema" - /> - ); - } - - if (errorId === ErrorId.ServerUnavailable && !didRetry) { - return ; - } - - return !errorId ? ( - } trackError={this.props.trackError}> - {children} - - ) : ( - } - ctaButtonText={} - onCtaButtonClick={() => { - navigate(".."); - }} - /> - ); - } -} - -export const ApiErrorBoundary: React.FC> = ({ children, ...props }) => { - const { reset } = useQueryErrorResetBoundary(); - const location = useLocation(); - const navigate = useNavigate(); - - return ( - - {children} - - ); -}; diff --git a/airbyte-webapp/src/components/common/ApiErrorBoundary/ServerUnavailableView.tsx b/airbyte-webapp/src/components/common/ApiErrorBoundary/ServerUnavailableView.tsx deleted file mode 100644 index 7553d5e6000..00000000000 --- a/airbyte-webapp/src/components/common/ApiErrorBoundary/ServerUnavailableView.tsx +++ /dev/null @@ -1,27 +0,0 @@ -import React, { useEffect } from "react"; -import { FormattedMessage } from "react-intl"; - -import { ErrorOccurredView } from "views/common/ErrorOccurredView"; - -interface ServerUnavailableViewProps { - onRetryClick: () => void; - retryDelay: number; -} - -export const ServerUnavailableView: React.FC = ({ onRetryClick, retryDelay }) => { - useEffect(() => { - const timer: ReturnType = setTimeout(() => { - onRetryClick(); - }, retryDelay); - return () => clearTimeout(timer); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, []); - - return ( - } - ctaButtonText={} - onCtaButtonClick={onRetryClick} - /> - ); -}; diff --git a/airbyte-webapp/src/components/common/ApiErrorBoundary/index.ts b/airbyte-webapp/src/components/common/ApiErrorBoundary/index.ts deleted file mode 100644 index 0f3fb4d7305..00000000000 --- a/airbyte-webapp/src/components/common/ApiErrorBoundary/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./ApiErrorBoundary"; diff --git a/airbyte-webapp/src/components/common/ConnectionDangerBlock/ConnectionDangerBlock.tsx b/airbyte-webapp/src/components/common/ConnectionDangerBlock/ConnectionDangerBlock.tsx index 03f469cc77a..2c1570f2015 100644 --- a/airbyte-webapp/src/components/common/ConnectionDangerBlock/ConnectionDangerBlock.tsx +++ b/airbyte-webapp/src/components/common/ConnectionDangerBlock/ConnectionDangerBlock.tsx @@ -2,13 +2,17 @@ import React, { useCallback } from "react"; import { FormattedMessage } from "react-intl"; import { FormFieldLayout } from "components/connection/ConnectionForm/FormFieldLayout"; +import { useConnectionStatus } from "components/connection/ConnectionStatus/useConnectionStatus"; +import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; import { Text } from "components/ui/Text"; +import { ConnectionStatus } from "core/api/types/AirbyteClient"; import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; +import { useExperiment } from "hooks/services/Experiment"; import { useDeleteModal } from "hooks/useDeleteModal"; interface DeleteBlockProps { @@ -20,21 +24,42 @@ interface DeleteBlockProps { export const ConnectionDangerBlock: React.FC = ({ onDelete, onReset }) => { const { mode, connection } = useConnectionFormService(); const onDeleteButtonClick = useDeleteModal("connection", onDelete, undefined, connection?.name); + const sayClearInsteadOfReset = useExperiment("connection.clearNotReset", false); + const connectionStatus = useConnectionStatus(connection.connectionId ?? ""); const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); const resetWithModal = useCallback(() => { - openConfirmationModal({ - text: `form.resetDataText`, - title: `form.resetData`, - submitButtonText: "form.reset", - cancelButtonText: "form.noNeed", - onSubmit: async () => { - await onReset(); - closeConfirmationModal(); - }, - submitButtonDataId: "reset", - }); - }, [closeConfirmationModal, openConfirmationModal, onReset]); + sayClearInsteadOfReset + ? openConfirmationModal({ + title: "connection.actions.clearData.confirm.title", + text: "connection.actions.clearData.confirm.text", + additionalContent: ( + + + + + + ), + submitButtonText: "connection.stream.actions.clearData.confirm.submit", + cancelButtonText: "connection.stream.actions.clearData.confirm.cancel", + onSubmit: async () => { + await onReset(); + closeConfirmationModal(); + }, + }) + : openConfirmationModal({ + text: `form.resetDataText`, + title: `form.resetData`, + submitButtonText: "form.reset", + cancelButtonText: "form.noNeed", + onSubmit: async () => { + await onReset(); + closeConfirmationModal(); + }, + submitButtonDataId: "reset", + }); + }, [closeConfirmationModal, onReset, openConfirmationModal, sayClearInsteadOfReset]); + const onResetButtonClick = () => { resetWithModal(); }; @@ -45,19 +70,25 @@ export const ConnectionDangerBlock: React.FC = ({ onDelete, on - + - + diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/CreateConnectionFormControls.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/CreateConnectionFormControls.tsx index 91f3bdd13d7..217e542ec4c 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/CreateConnectionFormControls.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/CreateConnectionFormControls.tsx @@ -36,7 +36,7 @@ export const CreateConnectionFormControls: React.FC = () => { }); return ( - + {errorMessage} diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.test.ts b/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.test.ts index 09085efc7c6..ba0fa51b4da 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.test.ts +++ b/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.test.ts @@ -14,7 +14,7 @@ import { analyzeSyncCatalogBreakingChanges } from "./calculateInitialCatalog"; const mockSyncSchemaStream: AirbyteStreamAndConfiguration = { stream: { sourceDefinedCursor: true, - defaultCursorField: ["source_cursor"], + defaultCursorField: ["new_source_cursor"], sourceDefinedPrimaryKey: [["new_primary_key"]], jsonSchema: {}, name: "test", @@ -31,6 +31,26 @@ const mockSyncSchemaStream: AirbyteStreamAndConfiguration = { }, }; +const mockSyncSchemaStreamUserDefined: AirbyteStreamAndConfiguration = { + stream: { + sourceDefinedCursor: true, + defaultCursorField: [], + sourceDefinedPrimaryKey: [], + jsonSchema: {}, + name: "test", + namespace: "namespace-test", + supportedSyncModes: [], + }, + config: { + destinationSyncMode: DestinationSyncMode.append, + selected: false, + syncMode: SyncMode.full_refresh, + cursorField: ["old_cursor"], + primaryKey: [["old_primary_key"]], + aliasName: "", + }, +}; + describe("analyzeSyncCatalogBreakingChanges", () => { it("should return syncCatalog unchanged when schemaChange is no_change and catalogDiff is undefined", () => { const syncCatalog: AirbyteCatalog = { streams: [mockSyncSchemaStream] }; @@ -62,7 +82,7 @@ describe("analyzeSyncCatalogBreakingChanges", () => { ], }; const result = analyzeSyncCatalogBreakingChanges(syncCatalog, catalogDiff, SchemaChange.breaking); - expect(result.streams[0].config?.primaryKey).toEqual([]); + expect(result.streams[0].config?.primaryKey).toEqual([["new_primary_key"]]); }); it("should return syncCatalog with transformed streams when there are breaking changes - cursor", () => { @@ -83,6 +103,48 @@ describe("analyzeSyncCatalogBreakingChanges", () => { ], }; const result = analyzeSyncCatalogBreakingChanges(syncCatalog, catalogDiff, SchemaChange.breaking); + expect(result.streams[0].config?.cursorField).toEqual(["new_source_cursor"]); + }); + + it("should return syncCatalog with transformed streams when there are breaking changes - primaryKey - user-defined", () => { + const syncCatalog: AirbyteCatalog = { streams: [mockSyncSchemaStreamUserDefined] }; + const catalogDiff: CatalogDiff = { + transforms: [ + { + transformType: StreamTransformTransformType.update_stream, + streamDescriptor: { name: "test", namespace: "namespace-test" }, + updateStream: [ + { + breaking: true, + transformType: FieldTransformTransformType.remove_field, + fieldName: ["old_primary_key"], + }, + ], + }, + ], + }; + const result = analyzeSyncCatalogBreakingChanges(syncCatalog, catalogDiff, SchemaChange.breaking); + expect(result.streams[0].config?.primaryKey).toEqual([]); + }); + + it("should return syncCatalog with transformed streams when there are breaking changes - cursor - user-defined", () => { + const syncCatalog: AirbyteCatalog = { streams: [mockSyncSchemaStreamUserDefined] }; + const catalogDiff: CatalogDiff = { + transforms: [ + { + transformType: StreamTransformTransformType.update_stream, + streamDescriptor: { name: "test", namespace: "namespace-test" }, + updateStream: [ + { + breaking: true, + transformType: FieldTransformTransformType.remove_field, + fieldName: ["old_cursor"], + }, + ], + }, + ], + }; + const result = analyzeSyncCatalogBreakingChanges(syncCatalog, catalogDiff, SchemaChange.breaking); expect(result.streams[0].config?.cursorField).toEqual([]); }); diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.ts b/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.ts index e33988585ab..ef8d6f0fe06 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.ts +++ b/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.ts @@ -19,20 +19,18 @@ const clearBreakingFieldChanges = ( } const { primaryKey, cursorField } = nodeStream.config; + const stream = nodeStream.stream; let clearPrimaryKey = false; let clearCursorField = false; - for (const streamTransformation of breakingChangesByStream) { if (!streamTransformation.updateStream || !streamTransformation.updateStream?.length) { continue; } - // get all of the removed field paths for this transformation const breakingFieldPaths = streamTransformation.updateStream .filter(({ breaking }) => breaking) .map((update) => update.fieldName); - // if there is a primary key in the config, and any of its field paths were removed, we'll be clearing it if ( !!primaryKey?.length && @@ -40,20 +38,26 @@ const clearBreakingFieldChanges = ( ) { clearPrimaryKey = true; } - // if there is a cursor field, and any of its field path was removed, we'll be clearing it if (!!cursorField?.length && breakingFieldPaths.some((path) => isEqual(path, cursorField))) { clearCursorField = true; } } - if (clearPrimaryKey || clearCursorField) { return { ...nodeStream, config: { ...nodeStream.config, - primaryKey: clearPrimaryKey ? [] : nodeStream.config.primaryKey, - cursorField: clearCursorField ? [] : nodeStream.config.cursorField, + primaryKey: stream?.sourceDefinedPrimaryKey // it's possible there's a new source-defined primary key, in which case that should take precedence + ? stream?.sourceDefinedPrimaryKey + : clearPrimaryKey + ? [] + : nodeStream.config.primaryKey, + cursorField: nodeStream.stream?.defaultCursorField + ? nodeStream.stream?.defaultCursorField // likewise, a source-defined cursor should never be cleared + : clearCursorField + ? [] + : nodeStream.config.cursorField, }, }; } diff --git a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.module.scss b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.module.scss new file mode 100644 index 00000000000..421b431efdc --- /dev/null +++ b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.module.scss @@ -0,0 +1,3 @@ +.switch { + width: 90px; +} diff --git a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.tsx b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.tsx new file mode 100644 index 00000000000..dce68bf5151 --- /dev/null +++ b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.tsx @@ -0,0 +1,107 @@ +import React from "react"; +import { FormattedMessage } from "react-intl"; +import { useNavigate } from "react-router-dom"; + +import { Box } from "components/ui/Box"; +import { Button } from "components/ui/Button"; +import { FlexContainer } from "components/ui/Flex"; +import { SwitchNext } from "components/ui/SwitchNext"; +import { Text } from "components/ui/Text"; +import { Tooltip } from "components/ui/Tooltip"; + +import { ConnectionStatus } from "core/api/types/AirbyteClient"; +import { useSchemaChanges } from "hooks/connection/useSchemaChanges"; +import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; +import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; +import { ConnectionRoutePaths } from "pages/routePaths"; + +import styles from "./ConnectionHeaderControls.module.scss"; +import { FormattedScheduleDataMessage } from "./FormattedScheduleDataMessage"; +import { useConnectionStatus } from "../ConnectionStatus/useConnectionStatus"; +import { useConnectionSyncContext } from "../ConnectionSync/ConnectionSyncContext"; +import { FreeHistoricalSyncIndicator } from "../EnabledControl/FreeHistoricalSyncIndicator"; + +export const ConnectionHeaderControls: React.FC = () => { + const { mode } = useConnectionFormService(); + const { connection, updateConnectionStatus, connectionUpdating } = useConnectionEditService(); + const { hasBreakingSchemaChange } = useSchemaChanges(connection.schemaChange); + const navigate = useNavigate(); + + const connectionStatus = useConnectionStatus(connection.connectionId ?? ""); + const isReadOnly = mode === "readonly"; + + const { syncStarting, cancelStarting, cancelJob, syncConnection, connectionEnabled, resetStarting, jobResetRunning } = + useConnectionSyncContext(); + + const onScheduleBtnClick = () => { + navigate(`${ConnectionRoutePaths.Settings}`, { + state: { action: "scheduleType" }, + }); + }; + + const onChangeStatus = async (checked: boolean) => + await updateConnectionStatus(checked ? ConnectionStatus.active : ConnectionStatus.inactive); + + const isDisabled = isReadOnly || syncStarting || cancelStarting || resetStarting; + const isStartSyncBtnDisabled = isDisabled || !connectionEnabled; + const isCancelBtnDisabled = isDisabled || connectionUpdating; + const isSwitchDisabled = isDisabled || hasBreakingSchemaChange; + + return ( + + + + + + } + placement="top" + > + + + {!connectionStatus.isRunning && ( + + )} + {connectionStatus.isRunning && cancelJob && ( + + )} + + + + + ); +}; diff --git a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/FormattedScheduleDataMessage.test.tsx b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/FormattedScheduleDataMessage.test.tsx new file mode 100644 index 00000000000..ba81396a96f --- /dev/null +++ b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/FormattedScheduleDataMessage.test.tsx @@ -0,0 +1,58 @@ +import { render } from "@testing-library/react"; + +import { TestWrapper } from "test-utils"; + +import { ConnectionScheduleData, ConnectionScheduleDataBasicScheduleTimeUnit } from "core/api/types/AirbyteClient"; + +import { FormattedScheduleDataMessage, FormattedScheduleDataMessageProps } from "./FormattedScheduleDataMessage"; + +describe("FormattedScheduleDataMessage", () => { + const renderComponent = (props: FormattedScheduleDataMessageProps) => { + return render( + + + + ); + }; + + it("should render 'Manual' schedule type if scheduleData wasn't provided", () => { + const { getByText } = renderComponent({ scheduleType: "manual" }); + expect(getByText("Manual")).toBeInTheDocument(); + }); + + it("should render '24 hours' schedule type", () => { + const scheduleData = { + basicSchedule: { + units: 24, + timeUnit: "hours" as ConnectionScheduleDataBasicScheduleTimeUnit, + }, + }; + const { getByText } = renderComponent({ scheduleType: "basic", scheduleData }); + expect(getByText("Every 24 hours")).toBeInTheDocument(); + }); + + it("should render 'Cron' schedule type with humanized format", () => { + const scheduleData = { + cron: { + cronExpression: "0 0 14 ? * THU" as string, + cronTimeZone: "UTC", + }, + }; + const { getByText } = renderComponent({ scheduleType: "cron", scheduleData }); + expect(getByText("At 02:00 PM, only on Thursday")).toBeInTheDocument(); + }); + + it("should NOT render anything", () => { + const scheduleData = { + basic: { + units: 24, + timeUnit: "hours" as ConnectionScheduleDataBasicScheduleTimeUnit, + }, + }; + const { queryByText } = renderComponent({ + scheduleType: "cron", + scheduleData: scheduleData as unknown as ConnectionScheduleData, // for testing purposes + }); + expect(queryByText("24")).toBeNull(); + }); +}); diff --git a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/FormattedScheduleDataMessage.tsx b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/FormattedScheduleDataMessage.tsx new file mode 100644 index 00000000000..d8c16a46eb7 --- /dev/null +++ b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/FormattedScheduleDataMessage.tsx @@ -0,0 +1,45 @@ +import React from "react"; +import { FormattedMessage } from "react-intl"; + +import { ConnectionScheduleData, ConnectionScheduleType } from "core/api/types/AirbyteClient"; +import { humanizeCron } from "core/utils/cron"; + +export interface FormattedScheduleDataMessageProps { + scheduleType?: ConnectionScheduleType; + scheduleData?: ConnectionScheduleData; +} + +/** + * Formats schedule data based on the schedule type and schedule data. + * If schedule type is "manual" returns "Manual". + * If schedule type is "basic" returns "Every {units} {timeUnit}". + * If schedule type is "cron" returns humanized cron expression. + * @param scheduleType + * @param scheduleData + */ +export const FormattedScheduleDataMessage: React.FC = ({ + scheduleType, + scheduleData, +}: { + scheduleType?: ConnectionScheduleType; + scheduleData?: ConnectionScheduleData; +}) => { + if (scheduleType === "manual") { + return ; + } + + if (scheduleType === "basic" && scheduleData?.basicSchedule) { + return ( + + ); + } + + if (scheduleType === "cron" && scheduleData?.cron) { + return <>{humanizeCron(scheduleData.cron.cronExpression)}; + } + + return null; +}; diff --git a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/index.ts b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/index.ts new file mode 100644 index 00000000000..77b3a955a0b --- /dev/null +++ b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/index.ts @@ -0,0 +1 @@ +export { ConnectionHeaderControls } from "./ConnectionHeaderControls"; diff --git a/airbyte-webapp/src/components/connection/ConnectionStatusIndicator/ConnectionStatusIndicator.module.scss b/airbyte-webapp/src/components/connection/ConnectionStatusIndicator/ConnectionStatusIndicator.module.scss index 6d5fab33906..630b78b4d22 100644 --- a/airbyte-webapp/src/components/connection/ConnectionStatusIndicator/ConnectionStatusIndicator.module.scss +++ b/airbyte-webapp/src/components/connection/ConnectionStatusIndicator/ConnectionStatusIndicator.module.scss @@ -4,8 +4,10 @@ .status { position: relative; + transform: scale(1.2); .icon { + transform: scale(1.1); width: 20px; height: 20px; display: flex; @@ -34,9 +36,3 @@ } } } - -.spinner { - position: absolute; - top: -1px; - left: -1px; -} diff --git a/airbyte-webapp/src/components/connection/ConnectionSync/ConnectionSyncContext.tsx b/airbyte-webapp/src/components/connection/ConnectionSync/ConnectionSyncContext.tsx index 655c59af670..99ee01150d8 100644 --- a/airbyte-webapp/src/components/connection/ConnectionSync/ConnectionSyncContext.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionSync/ConnectionSyncContext.tsx @@ -8,6 +8,7 @@ import { useListJobsForConnectionStatus, jobsKeys, prependArtificialJobToStatus, + useRefreshConnectionStreams, useResetConnectionStream, } from "core/api"; import { @@ -26,6 +27,8 @@ interface ConnectionSyncContext { jobSyncRunning: boolean; cancelJob: (() => Promise) | undefined; cancelStarting: boolean; + refreshStreams: (streams?: ConnectionStream[]) => Promise; + refreshStarting: boolean; resetStreams: (streams?: ConnectionStream[]) => Promise; resetStarting: boolean; jobResetRunning: boolean; @@ -68,6 +71,10 @@ const useConnectionSyncContextInit = (connection: WebBackendConnectionRead): Con const { mutateAsync: doResetConnection, isLoading: resetStarting } = useResetConnection(); const { mutateAsync: resetStream } = useResetConnectionStream(connection.connectionId); + const { mutateAsync: refreshStreams, isLoading: refreshStarting } = useRefreshConnectionStreams( + connection.connectionId + ); + const resetStreams = useCallback( async (streams?: ConnectionStream[]) => { if (streams) { @@ -99,6 +106,8 @@ const useConnectionSyncContextInit = (connection: WebBackendConnectionRead): Con jobSyncRunning, cancelJob, cancelStarting, + refreshStreams, + refreshStarting, resetStreams, resetStarting, jobResetRunning, diff --git a/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.module.scss b/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.module.scss index 45ffca0140e..67f84d77f76 100644 --- a/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.module.scss +++ b/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.module.scss @@ -1,5 +1,6 @@ -@forward "src/components/ui/Button/Button.module.scss"; +// stylelint-disable-next-line airbyte/no-use-renaming +@use "scss/mixins"; .linkText { - text-decoration: none; + @include mixins.link-text; } diff --git a/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.tsx b/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.tsx index 3f6ed1916c6..3cce3965f29 100644 --- a/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.tsx @@ -124,7 +124,7 @@ export const SelectDestination: React.FC = () => { diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.module.scss b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.module.scss index 51309843cf8..320dafb6570 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.module.scss +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.module.scss @@ -1,3 +1,28 @@ +@use "scss/colors"; +@use "scss/variables"; + +@keyframes highlight { + 0%, + 50% { + position: relative; + box-shadow: variables.$box-shadow-highlight colors.$blue-200; + z-index: 1; + } + + 99% { + z-index: 1; + } + + 100% { + box-shadow: 0 0 0 0 transparent; + z-index: 0; + } +} + .container { width: 300px; + + &.highlighted { + animation: highlight 2s ease-out; + } } diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.tsx index f4cd0cd00c2..43f3f0fb387 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.tsx @@ -1,5 +1,39 @@ +import classNames from "classnames"; +import { useState } from "react"; +import { Location, useLocation, useNavigate } from "react-router-dom"; +import { useEffectOnce } from "react-use"; + import styles from "./InputContainer.module.scss"; -export const InputContainer: React.FC = ({ children }) => { - return
    {children}
    ; +export interface LocationWithState extends Location { + state: { action?: "scheduleType" }; +} + +export const InputContainer: React.FC> = ({ + children, + highlightAfterRedirect, +}) => { + const [highlighted, setHighlighted] = useState(false); + const navigate = useNavigate(); + const { state: locationState, pathname } = useLocation() as LocationWithState; + + useEffectOnce(() => { + let highlightTimeout: number; + + if (highlightAfterRedirect && locationState?.action === "scheduleType") { + // remove the redirection info from the location state + navigate(pathname, { replace: true }); + + setHighlighted(true); + highlightTimeout = window.setTimeout(() => { + setHighlighted(false); + }, 1500); + } + + return () => { + window.clearTimeout(highlightTimeout); + }; + }); + + return
    {children}
    ; }; diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.module.scss b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.module.scss index 2d73d12f941..ecbc6448098 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.module.scss +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.module.scss @@ -1,24 +1,25 @@ @use "scss/colors"; @use "scss/variables"; +@use "scss/mixins"; .linkText { - // base "button" - display: inline-flex; - align-items: center; - justify-content: center; - text-decoration: none; - border-radius: variables.$border-radius-sm; - font-weight: 600; - cursor: pointer; - background-color: colors.$white; + @include mixins.link-text; +} + +.nextLink { + @include mixins.base-button; + + // primary + color: colors.$white; + background-color: colors.$blue-400; + border: 0; - // secondary - color: colors.$grey-400; - border: 1px solid colors.$grey-300; + &:hover { + background-color: colors.$blue-500; + color: colors.$white; + } - // sizeXS - height: variables.$button-height-xs; - font-size: variables.$font-size-sm; - line-height: 15px; - padding: 10px; + &:active { + background-color: colors.$blue-600; + } } diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.tsx index 27a174a0926..91b02d7f44d 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.tsx @@ -67,7 +67,10 @@ const SimplifiedConnectionCreationReplication: React.FC = () => { return ( <> - + @@ -149,7 +152,7 @@ const FirstNav: React.FC = () => { ), search: `?${SOURCE_ID_PARAM}=${source.sourceId}&${DESTINATION_ID_PARAM}=${destination.destinationId}`, }} - className={classNames(styles.linkText)} + className={classNames(styles.nextLink)} onClick={() => { // we're navigating to the next step which retains the creation form's state clearFormChange(CREATE_CONNECTION_FORM_ID); @@ -158,7 +161,7 @@ const FirstNav: React.FC = () => { ) : ( - )} diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField.tsx index 54332aee47c..5f550687d0a 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField.tsx @@ -126,7 +126,7 @@ const SimplifiedScheduleTypeFormControl: React.FC<{ disabled: boolean }> = ({ di
    } /> - + isDisabled={disabled} id={controlId} diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard.module.scss b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard.module.scss index c5229bad8e5..854b0e53650 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard.module.scss +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard.module.scss @@ -1,4 +1,5 @@ @use "scss/colors"; +@use "scss/variables"; .advancedSettings { cursor: pointer; @@ -6,6 +7,7 @@ border: none; text-decoration: underline; color: colors.$blue; + font-size: variables.$font-size-md; font-weight: 600; padding: 0; transition: color 0.2s ease-in; @@ -21,12 +23,6 @@ overflow-x: hidden; } -.hr { - border-width: 0; - border-top: 1px solid colors.$grey-100; - transform: scaleX(2); // expand out of its container to the card edge -} - .hidden { display: none; } diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard.tsx index 08d41fbb492..7028803b1ae 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard.tsx @@ -52,7 +52,7 @@ export const SimplifiedConnectionsSettingsCard: React.FC}
    - + {/* readonly mode disables all elements, including buttons, from the fieldset */} {/* to keep this toggle available, style and attribute a span like a button */} - + {isCreating && ( @@ -74,26 +74,26 @@ export const SimplifiedConnectionsSettingsCard: React.FC )} - {isAdvancedOpen &&
    } - {/* using styles.hidden to show/hide as residency field makes an http request for geographies */} {/* which triggers a suspense boundary - none of the places for a suspense fallback are good UX */} {/* so always render, making the geography request as part of the initial page load */} - - {canEditDataGeographies && } - {!isCreating && ( - - )} - {!isCreating && } - - - {canBackfillNewColumns && } - + + + {canEditDataGeographies && } + {!isCreating && ( + + )} + {!isCreating && } + + + {canBackfillNewColumns && } + + {!isCreating && ( diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.module.scss b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.module.scss index 2aa1d198e42..7762211e8b3 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.module.scss +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.module.scss @@ -8,10 +8,11 @@ .collapsedQuestion { overflow: hidden; height: 0; + transition: height variables.$transition-out; } .expandedQuestion { overflow: hidden; - height: 171px; + height: 131px; transition: height variables.$transition-out; } diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.tsx index d155539d3c9..5a84efc0c7f 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.tsx @@ -3,12 +3,12 @@ import { useFormContext } from "react-hook-form"; import { FormattedMessage } from "react-intl"; import { FormConnectionFormValues } from "components/connection/ConnectionForm/formConfig"; -import { FormFieldLayout } from "components/connection/ConnectionForm/FormFieldLayout"; import { RadioButtonTiles } from "components/connection/CreateConnection/RadioButtonTiles"; import { updateStreamSyncMode } from "components/connection/syncCatalog/SyncCatalog/updateStreamSyncMode"; import { SyncModeValue } from "components/connection/syncCatalog/SyncModeSelect"; import { ControlLabels } from "components/LabeledControl"; import { Badge } from "components/ui/Badge"; +import { Box } from "components/ui/Box"; import { FlexContainer } from "components/ui/Flex"; import { Icon } from "components/ui/Icon"; import { Text } from "components/ui/Text"; @@ -144,7 +144,7 @@ export const SimplifiedSchemaQuestionnaire = () => { const enforcedSelectedDelivery = getEnforcedDelivery(questionnaireOutcomes); const enforcedIncrementOrRefresh = getEnforcedIncrementOrRefresh(supportedSyncModes); - const [selectedDelivery, _setSelectedDelivery] = useState(enforcedSelectedDelivery); + const [selectedDelivery, _setSelectedDelivery] = useState(enforcedSelectedDelivery ?? "mirrorSource"); const [selectedIncrementOrRefresh, _setSelectedIncrementOrRefresh] = useState( enforcedIncrementOrRefresh ); @@ -198,12 +198,6 @@ export const SimplifiedSchemaQuestionnaire = () => { return []; }, [selectedDelivery, questionnaireOutcomes.mirrorSource, selectedIncrementOrRefresh]); - // if a source & destination sync mode selection has been made (by default or by the user), show the result - let selectionMessage; - if (selectedModes.length) { - selectionMessage = ; - } - // when a sync mode is selected, choose it for all streams const { trigger, getValues, setValue } = useFormContext(); useEffect(() => { @@ -265,12 +259,9 @@ export const SimplifiedSchemaQuestionnaire = () => { }, [showSecondQuestion, analyticsService]); return ( - + {showFirstQuestion && ( - + @@ -281,42 +272,37 @@ export const SimplifiedSchemaQuestionnaire = () => { } /> - + )}
    - - - - - - - } - /> - - + + + + + + + + } + /> + + +
    - - {selectionMessage && ( - - - {selectionMessage} - - )}
    ); }; diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/__snapshots__/CreateConnectionForm.test.tsx.snap b/airbyte-webapp/src/components/connection/CreateConnectionForm/__snapshots__/CreateConnectionForm.test.tsx.snap index a1e2bcb3ef4..9e4e96c22f6 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/__snapshots__/CreateConnectionForm.test.tsx.snap +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/__snapshots__/CreateConnectionForm.test.tsx.snap @@ -1291,18 +1291,22 @@ exports[`CreateConnectionForm should render with an error 1`] = ` >
    -
    -
    - +
    +
    +
    - Test Error - + + Test Error + +
    diff --git a/airbyte-webapp/src/components/connection/syncCatalog/StreamsConfigTable/StreamsConfigTableRow.module.scss b/airbyte-webapp/src/components/connection/syncCatalog/StreamsConfigTable/StreamsConfigTableRow.module.scss index d90dde236ce..869a121d828 100644 --- a/airbyte-webapp/src/components/connection/syncCatalog/StreamsConfigTable/StreamsConfigTableRow.module.scss +++ b/airbyte-webapp/src/components/connection/syncCatalog/StreamsConfigTable/StreamsConfigTableRow.module.scss @@ -14,7 +14,7 @@ 0%, 50% { position: relative; - box-shadow: 0 0 47px -5px colors.$blue-200; + box-shadow: variables.$box-shadow-highlight colors.$blue-200; z-index: 1; } diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/AuthenticationSection.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/AuthenticationSection.tsx index e0c4b343d1e..2ed97001381 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/AuthenticationSection.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/AuthenticationSection.tsx @@ -1,12 +1,10 @@ +import { useFormContext } from "react-hook-form"; import { useIntl } from "react-intl"; import GroupControls from "components/GroupControls"; import { ControlLabels } from "components/LabeledControl"; -import { - OAuthAuthenticatorRefreshTokenUpdater, - SessionTokenAuthenticatorRequestAuthentication, -} from "core/api/types/ConnectorManifest"; +import { SessionTokenAuthenticatorRequestAuthentication } from "core/api/types/ConnectorManifest"; import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; import { links } from "core/utils/links"; @@ -26,11 +24,7 @@ import { API_KEY_AUTHENTICATOR, BASIC_AUTHENTICATOR, BEARER_AUTHENTICATOR, - extractInterpolatedConfigKey, - inferredAuthValues, - OAUTH_ACCESS_TOKEN_INPUT, OAUTH_AUTHENTICATOR, - OAUTH_TOKEN_EXPIRY_DATE_INPUT, SESSION_TOKEN_AUTHENTICATOR, useBuilderWatch, BuilderErrorHandler, @@ -39,7 +33,10 @@ import { SESSION_TOKEN_REQUEST_BEARER_AUTHENTICATOR, NO_AUTH, BuilderFormAuthenticator, + interpolateConfigKey, + BuilderFormOAuthAuthenticator, } from "../types"; +import { LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE, useGetUniqueKey } from "../useLockedInputs"; const AUTH_PATH = "formValues.global.authenticator"; const authPath = (path: T) => `${AUTH_PATH}.${path}` as const; @@ -47,6 +44,7 @@ const authPath = (path: T) => `${AUTH_PATH}.${path}` as const; export const AuthenticationSection: React.FC = () => { const { formatMessage } = useIntl(); const analyticsService = useAnalyticsService(); + const getUniqueKey = useGetUniqueKey(); return ( { "BasicHttpAuthenticator", "OAuthAuthenticator", ]} - onSelect={(type) => + onSelect={(newType) => { analyticsService.track(Namespace.CONNECTOR_BUILDER, Action.AUTHENTICATION_METHOD_SELECT, { actionDescription: "Authentication method selected", - auth_type: type, - }) - } + auth_type: newType, + }); + }} options={[ { label: formatMessage({ id: "connectorBuilder.authentication.method.noAuth" }), default: { type: NO_AUTH } }, { label: formatMessage({ id: "connectorBuilder.authentication.method.apiKey" }), default: { type: API_KEY_AUTHENTICATOR, - ...inferredAuthValues("ApiKeyAuthenticator"), inject_into: { type: "RequestOption", inject_into: "header", field_name: "", }, + api_token: interpolateConfigKey( + getUniqueKey(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[API_KEY_AUTHENTICATOR].api_token.key) + ), }, children: ( <> @@ -97,7 +97,9 @@ export const AuthenticationSection: React.FC = () => { label: formatMessage({ id: "connectorBuilder.authentication.method.bearer" }), default: { type: BEARER_AUTHENTICATOR, - ...(inferredAuthValues("BearerAuthenticator") as Record<"api_token", string>), + api_token: interpolateConfigKey( + getUniqueKey(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[BEARER_AUTHENTICATOR].api_token.key) + ), }, children: , }, @@ -105,7 +107,12 @@ export const AuthenticationSection: React.FC = () => { label: formatMessage({ id: "connectorBuilder.authentication.method.basicHttp" }), default: { type: BASIC_AUTHENTICATOR, - ...(inferredAuthValues("BasicHttpAuthenticator") as Record<"username" | "password", string>), + username: interpolateConfigKey( + getUniqueKey(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[BASIC_AUTHENTICATOR].username.key) + ), + password: interpolateConfigKey( + getUniqueKey(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[BASIC_AUTHENTICATOR].password.key) + ), }, children: ( <> @@ -118,13 +125,18 @@ export const AuthenticationSection: React.FC = () => { label: formatMessage({ id: "connectorBuilder.authentication.method.oAuth" }), default: { type: OAUTH_AUTHENTICATOR, - ...(inferredAuthValues("OAuthAuthenticator") as Record< - "client_id" | "client_secret" | "refresh_token" | "oauth_access_token" | "oauth_token_expiry_date", - string - >), refresh_request_body: [], token_refresh_endpoint: "", grant_type: "refresh_token", + client_id: interpolateConfigKey( + getUniqueKey(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].client_id.key) + ), + client_secret: interpolateConfigKey( + getUniqueKey(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].client_secret.key) + ), + refresh_token: interpolateConfigKey( + getUniqueKey(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].refresh_token.key) + ), }, children: , }, @@ -168,8 +180,10 @@ export const AuthenticationSection: React.FC = () => { const OAuthForm = () => { const { formatMessage } = useIntl(); + const { setValue } = useFormContext(); const grantType = useBuilderWatch(authPath("grant_type")); - const refreshToken = useBuilderWatch(authPath("refresh_token")); + const getUniqueKey = useGetUniqueKey(); + return ( <> { path={authPath("grant_type")} options={["refresh_token", "client_credentials"]} manifestPath="OAuthAuthenticator.properties.grant_type" + onChange={(newValue) => { + if (newValue === "client_credentials") { + setValue(authPath("refresh_token"), undefined); + } else if (newValue === "refresh_token") { + setValue( + authPath("refresh_token"), + interpolateConfigKey( + getUniqueKey(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].refresh_token.key) + ) + ); + } + }} /> {grantType === "refresh_token" && ( <> - + label={formatMessage({ id: "connectorBuilder.authentication.refreshTokenUpdater.label" })} tooltip={formatMessage({ id: "connectorBuilder.authentication.refreshTokenUpdater.tooltip" })} fieldPath={authPath("refresh_token_updater")} initialValues={{ refresh_token_name: "", - access_token_config_path: [OAUTH_ACCESS_TOKEN_INPUT], - refresh_token_config_path: [extractInterpolatedConfigKey(refreshToken) || ""], - token_expiry_date_config_path: [OAUTH_TOKEN_EXPIRY_DATE_INPUT], + access_token: interpolateConfigKey( + getUniqueKey( + LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].refresh_token_updater + .access_token_config_path.key + ) + ), + token_expiry_date: interpolateConfigKey( + getUniqueKey( + LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].refresh_token_updater + .token_expiry_date_config_path.key + ) + ), }} > { authPath("login_requester"), true ); + const getUniqueKey = useGetUniqueKey(); + return ( <> }> @@ -270,7 +307,7 @@ const SessionTokenForm = () => { path={authPath("login_requester.authenticator")} label={formatMessage({ id: "connectorBuilder.authentication.loginRequester.authenticator.label" })} manifestPath="HttpRequester.properties.authenticator" - manifestOptionPaths={["ApiKeyAuthenticator", "BearerAuthenticator", "BasicHttpAuthenticator"]} + manifestOptionPaths={[API_KEY_AUTHENTICATOR, BEARER_AUTHENTICATOR, BASIC_AUTHENTICATOR]} options={[ { label: formatMessage({ id: "connectorBuilder.authentication.method.noAuth" }), @@ -280,12 +317,14 @@ const SessionTokenForm = () => { label: formatMessage({ id: "connectorBuilder.authentication.method.apiKey" }), default: { type: API_KEY_AUTHENTICATOR, - ...inferredAuthValues("ApiKeyAuthenticator"), inject_into: { type: "RequestOption", inject_into: "header", field_name: "", }, + api_token: interpolateConfigKey( + getUniqueKey(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[API_KEY_AUTHENTICATOR].api_token.key) + ), }, children: ( <> @@ -302,7 +341,9 @@ const SessionTokenForm = () => { label: formatMessage({ id: "connectorBuilder.authentication.method.bearer" }), default: { type: BEARER_AUTHENTICATOR, - ...(inferredAuthValues(BEARER_AUTHENTICATOR) as Record<"api_token", string>), + api_token: interpolateConfigKey( + getUniqueKey(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[BEARER_AUTHENTICATOR].api_token.key) + ), }, children: , }, @@ -310,7 +351,12 @@ const SessionTokenForm = () => { label: formatMessage({ id: "connectorBuilder.authentication.method.basicHttp" }), default: { type: BASIC_AUTHENTICATOR, - ...(inferredAuthValues(BASIC_AUTHENTICATOR) as Record<"username" | "password", string>), + username: interpolateConfigKey( + getUniqueKey(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[BASIC_AUTHENTICATOR].username.key) + ), + password: interpolateConfigKey( + getUniqueKey(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[BASIC_AUTHENTICATOR].password.key) + ), }, children: ( <> diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderField.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderField.tsx index a1f6ec4c475..22c8bf9654d 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderField.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderField.tsx @@ -59,6 +59,8 @@ export type BuilderFieldProps = BaseFieldProps & onChange?: (newValue: string) => void; onBlur?: (value: string) => void; disabled?: boolean; + step?: number; + min?: number; } | { type: "date" | "date-time"; onChange?: (newValue: string) => void } | { type: "boolean"; onChange?: (newValue: boolean) => void; disabled?: boolean; disabledTooltip?: string } @@ -217,6 +219,8 @@ const InnerBuilderField: React.FC = ({ readOnly={readOnly} adornment={adornment} disabled={props.disabled} + step={props.step} + min={props.min} onBlur={(e) => { field.onBlur(); props.onBlur?.(e.target.value); diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderFieldWithInputs.module.scss b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderFieldWithInputs.module.scss index 8376bdea7fe..6be0513c44a 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderFieldWithInputs.module.scss +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderFieldWithInputs.module.scss @@ -7,6 +7,8 @@ border: none; background: none; color: colors.$grey-300; + padding: 0; + padding-right: variables.$spacing-md; &:hover { background: none; @@ -29,7 +31,7 @@ .container { position: absolute; - right: variables.$spacing-sm; + right: 0; top: 0; display: flex; flex-direction: column; @@ -48,14 +50,14 @@ background: none; border: none; display: flex; - gap: variables.$spacing-xs; color: inherit; align-items: center; cursor: pointer; + font-size: variables.$font-size-sm; } .inputWithHelper { - padding-right: 55px; + padding-right: 35px; } .newInput { diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderFieldWithInputs.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderFieldWithInputs.tsx index 82dca201580..3c58cce6452 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderFieldWithInputs.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderFieldWithInputs.tsx @@ -13,7 +13,6 @@ import { ConnectorBuilderMainRHFContext } from "services/connectorBuilder/Connec import { BuilderField, BuilderFieldProps } from "./BuilderField"; import styles from "./BuilderFieldWithInputs.module.scss"; import { InputForm, newInputInEditing } from "./InputsForm"; -import { useInferredInputs } from "../useInferredInputs"; export const BuilderFieldWithInputs: React.FC = (props) => { return ( @@ -31,14 +30,13 @@ const UserInputHelper = (props: UserInputHelperProps) => { throw new Error("rhf context not available"); } const inputs = watch("formValues.inputs"); - const inferredInputs = useInferredInputs(); const listOptions = useMemo(() => { - const options: Array> = [...inputs, ...inferredInputs].map((input) => ({ + const options: Array> = inputs.map((input) => ({ label: input.definition.title || input.key, value: input.key, })); return options; - }, [inputs, inferredInputs]); + }, [inputs]); return ; }; @@ -57,7 +55,7 @@ const InnerUserInputHelper = React.memo( selectedValue={undefined} onSelect={(selectedValue) => { if (selectedValue) { - setValue(path, `${getValues(path) || ""}{{ config['${selectedValue}'] }}`, { + setValue(path, `${getValues(path) || ""}{{ config["${selectedValue}"] }}`, { shouldDirty: true, shouldTouch: true, shouldValidate: true, @@ -113,16 +111,7 @@ InnerUserInputHelper.displayName = "InnerUserInputHelper"; const UserInputHelperControlButton: React.FC> = () => { return ( - - {"{{"} - - {"}}"} - - } - placement="top" - > + } placement="top"> ); diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderOneOf.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderOneOf.tsx index 080c99d12d2..981d06c7200 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderOneOf.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderOneOf.tsx @@ -1,5 +1,5 @@ import React from "react"; -import { useController, useFormContext } from "react-hook-form"; +import { useFormContext, useWatch } from "react-hook-form"; import GroupControls from "components/GroupControls"; import { ControlLabels } from "components/LabeledControl"; @@ -39,9 +39,11 @@ export const BuilderOneOf = ({ onSelect, }: BuilderOneOfProps) => { const { setValue, unregister } = useFormContext(); - const { field } = useController({ name: `${path}.type` }); + const fieldName = `${path}.type`; + // Use value from useWatch instead of from useController, since the former will respect updates made to parent paths from setValue + const fieldValue = useWatch({ name: fieldName }); - const selectedOption = options.find((option) => option.default.type === field.value); + const selectedOption = options.find((option) => option.default.type === fieldValue); const { label: finalLabel, tooltip: finalTooltip } = getLabelAndTooltip( label, tooltip, @@ -65,7 +67,7 @@ export const BuilderOneOf = ({ adaptiveWidth={false} selectedValue={selectedOption ?? options[0]} onSelect={(selectedOption: OneOfOption) => { - if (selectedOption.default.type === field.value) { + if (selectedOption.default.type === fieldValue) { return; } // clear all values for this oneOf and set selected option and default values @@ -74,7 +76,7 @@ export const BuilderOneOf = ({ onSelect?.(selectedOption.default.type); }} - data-testid={field.name} + data-testid={fieldName} /> } > diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderSidebar.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderSidebar.tsx index 915f1ccac6c..ce4677c634b 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderSidebar.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderSidebar.tsx @@ -17,7 +17,6 @@ import styles from "./BuilderSidebar.module.scss"; import { Sidebar } from "../Sidebar"; import { useBuilderWatch } from "../types"; import { useBuilderErrors } from "../useBuilderErrors"; -import { useInferredInputs } from "../useInferredInputs"; interface ViewSelectButtonProps { className?: string; @@ -66,8 +65,6 @@ export const BuilderSidebar: React.FC = () => { setValue("view", selectedView); }; - const inferredInputsLength = useInferredInputs().length; - return ( @@ -104,7 +101,7 @@ export const BuilderSidebar: React.FC = () => { diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/ErrorHandlerSection.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/ErrorHandlerSection.tsx index 297259b6748..b4e10cc5182 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/ErrorHandlerSection.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/ErrorHandlerSection.tsx @@ -44,6 +44,8 @@ export const ErrorHandlerSection: React.FC = (props) = type="number" path={buildPath("backoff_strategy.backoff_time_in_seconds")} manifestPath="ConstantBackoffStrategy.properties.backoff_time_in_seconds" + step={1} + min={0} /> ), }, diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/IncrementalSection.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/IncrementalSection.tsx index 73c30457672..4e36adca7d4 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/IncrementalSection.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/IncrementalSection.tsx @@ -28,8 +28,10 @@ import { SMALL_DURATION_OPTIONS, StreamPathFn, builderIncrementalSyncToManifest, + interpolateConfigKey, useBuilderWatch, } from "../types"; +import { LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME, useGetUniqueKey } from "../useLockedInputs"; interface IncrementalSectionProps { streamFieldPath: StreamPathFn; @@ -39,6 +41,7 @@ interface IncrementalSectionProps { export const IncrementalSection: React.FC = ({ streamFieldPath, currentStreamIndex }) => { const { formatMessage } = useIntl(); const filterMode = useBuilderWatch(streamFieldPath("incrementalSync.filter_mode")); + const getExistingOrUniqueKey = useGetUniqueKey(); return ( = ({ streamFi defaultValue: { datetime_format: "", cursor_datetime_formats: [], - start_datetime: { type: "user_input" }, + start_datetime: { + type: "user_input", + value: interpolateConfigKey( + getExistingOrUniqueKey(LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME.start_datetime.key, "start_datetime") + ), + }, end_datetime: { type: "now" }, step: "", cursor_field: "", @@ -131,7 +139,12 @@ export const IncrementalSection: React.FC = ({ streamFi options={[ { label: formatMessage({ id: "connectorBuilder.incremental.userInput" }), - default: { type: "user_input" }, + default: { + type: "user_input", + value: interpolateConfigKey( + getExistingOrUniqueKey(LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME.start_datetime.key, "start_datetime") + ), + }, children: ( = ({ streamFi options={[ { label: formatMessage({ id: "connectorBuilder.incremental.userInput" }), - default: { type: "user_input" }, + default: { + type: "user_input", + value: interpolateConfigKey( + getExistingOrUniqueKey(LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME.end_datetime.key, "end_datetime") + ), + }, children: ( [...inputs, ...inferredInputs].map((input) => input.key), [inputs, inferredInputs]); + const { updateTestingValues } = useConnectorBuilderFormState(); + const formValues = watch("formValues"); + const testingValues = watch("testingValues"); + const usedKeys = useMemo(() => formValues.inputs.map((input) => input.key), [formValues.inputs]); const inputInEditValidation = useMemo( () => yup.object().shape({ // make sure key can only occur once - key: yup.string().notOneOf( - inputInEditing?.isNew - ? usedKeys - : [ - // We need to catch the case where an inferred input is auto-created with - // the same key as an existing input, but we don't want to show an error - // if this input is the only one with this key and the user just decided - // to set the key back to its initial value. - // So, we take n-1 duplicate keys for comparison. I.e. if there is only 1 - // "foo" key, we add 1-1=0 copies of "foo" to the notOneOf array and - // avoid incorrectly flagging it as a duplicate; but if there are 2 "foo" - // keys, we add 2-1=1 copies of "foo" to the notOneOf array and we *do* - // catch the duplicate. - ...usedKeys.filter((key) => key === inputInEditing?.key).slice(1), - // and we still need to compare against the other keys - ...usedKeys.filter((key) => key !== inputInEditing?.key), - ], - "connectorBuilder.duplicateFieldID" - ), + key: yup + .string() + .notOneOf( + inputInEditing?.isNew ? usedKeys : usedKeys.filter((key) => key !== inputInEditing?.key), + "connectorBuilder.duplicateFieldID" + ), required: yup.bool(), definition: yup.object().shape({ title: yup.string().required("form.empty.error"), @@ -139,19 +131,26 @@ export const InputForm = ({ mode: "onChange", }); const onSubmit = async (inputInEditing: InputInEditing) => { - if (inputInEditing.isInferredInputOverride) { - setValue(`formValues.inferredInputOverrides.${inputInEditing.key}`, inputInEditing.definition); - onClose(); - } else { - const newInput = inputInEditingToFormInput(inputInEditing); + const newInput = inputInEditingToFormInput(inputInEditing); + if (inputInEditing.isNew) { + setValue("formValues.inputs", [...formValues.inputs, newInput]); + } else if (inputInEditing.key === inputInEditing.previousKey) { setValue( "formValues.inputs", - inputInEditing.isNew - ? [...inputs, newInput] - : inputs.map((input) => (input.key === inputInEditing.previousKey ? newInput : input)) + formValues.inputs.map((input) => (input.key === inputInEditing.key ? newInput : input)) + ); + } else { + await updateInputKeyAndReferences( + inputInEditing.previousKey!, + newInput, + formValues, + testingValues, + setValue, + updateTestingValues ); - onClose(newInput); } + + onClose(newInput); analyticsService.track( Namespace.CONNECTOR_BUILDER, inputInEditing.isNew ? Action.USER_INPUT_CREATE : Action.USER_INPUT_EDIT, @@ -178,7 +177,7 @@ export const InputForm = ({ onDelete={() => { setValue( "formValues.inputs", - inputs.filter((input) => input.key !== inputInEditing.key) + formValues.inputs.filter((input) => input.key !== inputInEditing.key) ); onClose(); analyticsService.track(Namespace.CONNECTOR_BUILDER, Action.USER_INPUT_DELETE, { @@ -195,6 +194,70 @@ export const InputForm = ({ ); }; +async function updateInputKeyAndReferences( + previousKey: string, + newInput: BuilderFormInput, + formValues: BuilderFormValues, + testingValues: ConnectorBuilderProjectTestingValues | undefined, + setValue: UseFormSetValue, + updateTestingValues: TestingValuesUpdate +) { + const newInputs = formValues.inputs.map((input) => (input.key === previousKey ? newInput : input)); + + const stringifiedFormValues = JSON.stringify(formValues); + const escapedPreviousKey = escapeStringRegexp(previousKey); + + // replace {{ ... config.key ... }} style references + const interpolatedConfigReferenceRegexDot = new RegExp( + `(?{{[^}]*?config\\.)(${escapedPreviousKey})(?((\\s|\\.).*?)?}})`, + "g" + ); + const dotReferencesReplaced = stringifiedFormValues.replaceAll( + interpolatedConfigReferenceRegexDot, + `$${newInput.key}$` + ); + + // replace {{ ... config['key'] ... }} style references + const interpolatedConfigReferenceRegexBracket = new RegExp( + `(?{{[^}]*?config\\[('|\\\\")+)(${escapedPreviousKey})(?('|\\\\")+\\].*?}})`, + "g" + ); + const bracketReferencesReplaced = dotReferencesReplaced.replaceAll( + interpolatedConfigReferenceRegexBracket, + `$${newInput.key}$` + ); + + const parsedUpdatedFormValues = JSON.parse(bracketReferencesReplaced); + setValue("formValues", { + ...parsedUpdatedFormValues, + inputs: newInputs, + }); + + // update key in testing values if present + const previousTestingValue = testingValues?.[previousKey]; + if (previousTestingValue) { + try { + const spec = builderInputsToSpec(newInputs); + await updateTestingValues({ + spec: spec?.connection_specification ?? {}, + testingValues: { + ...testingValues, + [previousKey]: undefined, + [newInput.key]: previousTestingValue, + }, + }); + } catch (e) { + // Could not update persisted testing values, likely because another required field does not have a value. + // Instead, just update the testing values in the form state so that the testing values menu uses the new key next time it is opened. + setValue("testingValues", { + ...testingValues, + [previousKey]: undefined, + [newInput.key]: previousTestingValue, + }); + } + } +} + const InputModal = ({ inputInEditing, onClose, @@ -206,9 +269,8 @@ const InputModal = ({ onClose: () => void; onSubmit: (inputInEditing: InputInEditing) => void; }) => { - const isInferredInputOverride = inputInEditing.isInferredInputOverride; const { - formState: { isValid }, + formState: { isValid, isSubmitting }, setValue, handleSubmit, } = useFormContext(); @@ -243,7 +305,7 @@ const InputModal = ({ path="definition.title" type="string" onBlur={(newValue) => { - if (!isInferredInputOverride && !inputInEditing.key) { + if (!values.key) { setValue("key", sluggify(newValue || ""), { shouldValidate: true }); } }} @@ -253,7 +315,6 @@ const InputModal = ({ - {values.type !== "unknown" && !isInferredInputOverride ? ( + {values.type !== "unknown" && !values.isLocked ? ( <> - ) : ( - - ) + } /> )} - {!inputInEditing.isNew && !inputInEditing.isInferredInputOverride && ( + {!inputInEditing.isNew && !inputInEditing.isLocked && (
    - diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/InputsView.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/InputsView.tsx index 0e0587e923f..b945646fb6d 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/InputsView.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/InputsView.tsx @@ -22,19 +22,16 @@ import { BuilderConfigView } from "./BuilderConfigView"; import { KeyboardSensor, PointerSensor } from "./dndSensors"; import { InputForm, InputInEditing, newInputInEditing } from "./InputsForm"; import styles from "./InputsView.module.scss"; -import { BuilderFormInput, orderInputs, useBuilderWatch } from "../types"; -import { useInferredInputs } from "../useInferredInputs"; +import { BuilderFormInput, useBuilderWatch } from "../types"; const supportedTypes = ["string", "integer", "number", "array", "boolean", "enum", "unknown"] as const; export const InputsView: React.FC = () => { const { formatMessage } = useIntl(); const inputs = useBuilderWatch("formValues.inputs"); - const storedInputOrder = useBuilderWatch("formValues.inputOrder"); const { setValue } = useFormContext(); const { permission } = useConnectorBuilderFormState(); const [inputInEditing, setInputInEditing] = useState(undefined); - const inferredInputs = useInferredInputs(); const sensors = useSensors( useSensor(PointerSensor), useSensor(KeyboardSensor, { @@ -42,19 +39,15 @@ export const InputsView: React.FC = () => { }) ); - const { orderedInputs, inputOrder } = useMemo(() => { - const orderedInputs = orderInputs(inputs, inferredInputs, storedInputOrder); - const inputOrder = orderedInputs.map((input) => input.id); - return { orderedInputs, inputOrder }; - }, [inferredInputs, storedInputOrder, inputs]); + const inputsWithIds = useMemo(() => inputs.map((input) => ({ input, id: input.key })), [inputs]); const handleDragEnd = (event: DragEndEvent) => { const { active, over } = event; if (over !== null && active.id !== over.id) { - const oldIndex = inputOrder.indexOf(active.id.toString()); - const newIndex = inputOrder.indexOf(over.id.toString()); - setValue("formValues.inputOrder", arrayMove(inputOrder, oldIndex, newIndex)); + const oldIndex = inputs.findIndex((input) => input.key === active.id.toString()); + const newIndex = inputs.findIndex((input) => input.key === over.id.toString()); + setValue("formValues.inputs", arrayMove(inputs, oldIndex, newIndex)); } }; @@ -66,9 +59,9 @@ export const InputsView: React.FC = () => { - - {orderedInputs.map((input) => ( - + + {inputsWithIds.map((inputWithId) => ( + ))} @@ -115,30 +108,26 @@ function getType(definition: BuilderFormInput["definition"]): InputInEditing["ty return supportedType; } -function formInputToInputInEditing( - { key, definition, required }: BuilderFormInput, - isInferredInputOverride: boolean -): InputInEditing { +function formInputToInputInEditing({ key, definition, required, isLocked }: BuilderFormInput): InputInEditing { return { key, previousKey: key, definition, required, + isLocked, isNew: false, showDefaultValueField: Boolean(definition.default), type: getType(definition), - isInferredInputOverride, }; } interface SortableInputProps { input: BuilderFormInput; - isInferred: boolean; id: string; setInputInEditing: (inputInEditing: InputInEditing) => void; } -const SortableInput: React.FC = ({ input, isInferred, id, setInputInEditing }) => { +const SortableInput: React.FC = ({ input, id, setInputInEditing }) => { const { attributes, listeners, setNodeRef, transform, transition, isDragging } = useSortable({ id }); const { permission } = useConnectorBuilderFormState(); const canEdit = permission !== "readOnly"; @@ -164,7 +153,7 @@ const SortableInput: React.FC = ({ input, isInferred, id, se aria-label="Edit" type="button" onClick={() => { - setInputInEditing(formInputToInputInEditing(input, isInferred)); + setInputInEditing(formInputToInputInEditing(input)); }} data-no-dnd="true" > diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/PaginationSection.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/PaginationSection.tsx index 5139aa547fa..184559f0151 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/PaginationSection.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/PaginationSection.tsx @@ -90,6 +90,8 @@ export const PaginationSection: React.FC = ({ streamFiel manifestPath="OffsetIncrement.properties.page_size" path={streamFieldPath("paginator.strategy.page_size")} optional + step={1} + min={1} /> {pageSize ? ( = ({ streamFiel path={streamFieldPath("paginator.strategy.page_size")} manifestPath="PageIncrement.properties.page_size" optional + step={1} + min={1} /> {pageSize ? ( = ({ streamFiel } }} optional + step={1} + min={1} /> {pageSize ? ( @@ -185,7 +186,7 @@ export const StreamTester: React.FC<{ )} {!isFetching && streamReadData && streamReadData.test_read_limit_reached && showLimitWarning && ( = ({ testingVal {showInputsWarning && ( { setShowInputsWarning(false); }} - text={} + text={} /> )} {permission === "adminReadOnly" && ( diff --git a/airbyte-webapp/src/components/connectorBuilder/convertManifestToBuilderForm.ts b/airbyte-webapp/src/components/connectorBuilder/convertManifestToBuilderForm.ts index 0d59c6a4cbc..543a288704d 100644 --- a/airbyte-webapp/src/components/connectorBuilder/convertManifestToBuilderForm.ts +++ b/airbyte-webapp/src/components/connectorBuilder/convertManifestToBuilderForm.ts @@ -1,6 +1,9 @@ import { dump } from "js-yaml"; import cloneDeep from "lodash/cloneDeep"; +import get from "lodash/get"; +import isArray from "lodash/isArray"; import isEqual from "lodash/isEqual"; +import isString from "lodash/isString"; import pick from "lodash/pick"; import { match } from "ts-pattern"; @@ -30,17 +33,17 @@ import { HttpRequesterErrorHandler, NoAuth, SessionTokenAuthenticator, + DatetimeBasedCursorType, } from "core/api/types/ConnectorManifest"; import { removeEmptyProperties } from "core/utils/form"; import { API_KEY_AUTHENTICATOR, - authTypeToKeyToInferredInput, BASIC_AUTHENTICATOR, BEARER_AUTHENTICATOR, BuilderErrorHandler, BuilderFormAuthenticator, - BuilderFormValues, + BuilderFormInput, BuilderIncrementalSync, BuilderPaginator, BuilderRequestBody, @@ -49,20 +52,21 @@ import { DEFAULT_BUILDER_FORM_VALUES, DEFAULT_BUILDER_STREAM_VALUES, extractInterpolatedConfigKey, - getInferredAuthValue, - hasIncrementalSyncUserInput, INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT, - incrementalSyncInferredInputs, + interpolateConfigKey, isInterpolatedConfigKey, NO_AUTH, - OAUTH_ACCESS_TOKEN_INPUT, OAUTH_AUTHENTICATOR, - OAUTH_TOKEN_EXPIRY_DATE_INPUT, RequestOptionOrPathInject, SESSION_TOKEN_AUTHENTICATOR, YamlString, YamlSupportedComponentName, } from "./types"; +import { + getKeyToDesiredLockedInput, + LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE, + LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME, +} from "./useLockedInputs"; import { formatJson } from "./utils"; import { AirbyteJSONSchema } from "../../core/jsonSchema/types"; @@ -72,15 +76,7 @@ export const convertToBuilderFormValuesSync = (resolvedManifest: ConnectorManife const streams = resolvedManifest.streams; if (streams === undefined || streams.length === 0) { - const { inputs, inferredInputOverrides, inputOrder } = manifestSpecAndAuthToBuilder( - resolvedManifest.spec, - undefined, - undefined - ); - builderFormValues.inputs = inputs; - builderFormValues.inferredInputOverrides = inferredInputOverrides; - builderFormValues.inputOrder = inputOrder; - + builderFormValues.inputs = manifestSpecToBuilderInputs(resolvedManifest.spec, { type: NO_AUTH }, []); return removeEmptyProperties(builderFormValues); } @@ -96,19 +92,20 @@ export const convertToBuilderFormValuesSync = (resolvedManifest: ConnectorManife serializedStreamToIndex, streams[0].retriever.requester.url_base, streams[0].retriever.requester.authenticator, - resolvedManifest.metadata + resolvedManifest.metadata, + resolvedManifest.spec ) ); - const { inputs, inferredInputOverrides, auth, inputOrder } = manifestSpecAndAuthToBuilder( - resolvedManifest.spec, + builderFormValues.global.authenticator = manifestAuthenticatorToBuilder( streams[0].retriever.requester.authenticator, + resolvedManifest.spec + ); + builderFormValues.inputs = manifestSpecToBuilderInputs( + resolvedManifest.spec, + builderFormValues.global.authenticator, builderFormValues.streams ); - builderFormValues.inputs = inputs; - builderFormValues.inferredInputOverrides = inferredInputOverrides; - builderFormValues.global.authenticator = auth; - builderFormValues.inputOrder = inputOrder; return removeEmptyProperties(builderFormValues); }; @@ -151,7 +148,8 @@ const manifestStreamToBuilder = ( serializedStreamToIndex: Record, firstStreamUrlBase: string, firstStreamAuthenticator?: HttpRequesterAuthenticator, - metadata?: DeclarativeComponentSchemaMetadata + metadata?: DeclarativeComponentSchemaMetadata, + spec?: Spec ): BuilderStream => { assertType(stream.retriever, "SimpleRetriever", stream.name); const retriever = stream.retriever; @@ -211,7 +209,8 @@ const manifestStreamToBuilder = ( manifestIncrementalSyncToBuilder, "incrementalSync", stream.name, - metadata + metadata, + spec ), parentStreams, parameterizedRequests, @@ -245,22 +244,21 @@ function requesterToRequestBody(requester: HttpRequester): BuilderRequestBody { if (requester.request_body_data && typeof requester.request_body_data === "object") { return { type: "form_list", values: Object.entries(requester.request_body_data) }; } - if (requester.request_body_data && typeof requester.request_body_data === "string") { + if (requester.request_body_data && isString(requester.request_body_data)) { return { type: "string_freeform", value: requester.request_body_data }; } if (!requester.request_body_json) { return { type: "json_list", values: [] }; } - const allStringValues = Object.values(requester.request_body_json).every((value) => typeof value === "string"); + const allStringValues = Object.values(requester.request_body_json).every((value) => isString(value)); if (allStringValues) { return { type: "json_list", values: Object.entries(requester.request_body_json) }; } return { type: "json_freeform", - value: - typeof requester.request_body_json === "string" - ? requester.request_body_json - : formatJson(requester.request_body_json), + value: isString(requester.request_body_json) + ? requester.request_body_json + : formatJson(requester.request_body_json), }; } @@ -303,16 +301,15 @@ function manifestPartitionRouterToBuilder( parameterizedRequests: [ { ...partitionRouter, - values: - typeof partitionRouter.values === "string" - ? { - value: partitionRouter.values, - type: "variable" as const, - } - : { - value: partitionRouter.values, - type: "list" as const, - }, + values: isString(partitionRouter.values) + ? { + value: partitionRouter.values, + type: "variable" as const, + } + : { + value: partitionRouter.values, + type: "list" as const, + }, }, ], }; @@ -450,25 +447,26 @@ export function manifestTransformationsToBuilder( } function getFormat( - format: DatetimeBasedCursorStartDatetime | DatetimeBasedCursorEndDatetime, + manifestCursorDatetime: DatetimeBasedCursorStartDatetime | DatetimeBasedCursorEndDatetime, manifestIncrementalSync: DeclarativeStreamIncrementalSync ) { - if (typeof format === "string" || !format.datetime_format) { + if (isString(manifestCursorDatetime) || !manifestCursorDatetime.datetime_format) { return manifestIncrementalSync.datetime_format; } - return format.datetime_format; + return manifestCursorDatetime.datetime_format; } function isFormatSupported( - format: DatetimeBasedCursorStartDatetime | DatetimeBasedCursorEndDatetime, + manifestCursorDatetime: DatetimeBasedCursorStartDatetime | DatetimeBasedCursorEndDatetime, manifestIncrementalSync: DeclarativeStreamIncrementalSync ) { - return getFormat(format, manifestIncrementalSync) === INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT; + return getFormat(manifestCursorDatetime, manifestIncrementalSync) === INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT; } export function manifestIncrementalSyncToBuilder( manifestIncrementalSync: DeclarativeStreamIncrementalSync | undefined, - streamName?: string + streamName?: string, + spec?: Spec ): BuilderStream["incrementalSync"] | undefined { if (!manifestIncrementalSync) { return undefined; @@ -500,30 +498,44 @@ export function manifestIncrementalSyncToBuilder( ...regularFields } = manifestIncrementalSync; + if ( + (manifestStartDateTime && + typeof manifestStartDateTime !== "string" && + (manifestStartDateTime.max_datetime || manifestStartDateTime.min_datetime)) || + (manifestEndDateTime && + typeof manifestEndDateTime !== "string" && + (manifestEndDateTime.max_datetime || manifestEndDateTime.min_datetime)) + ) { + throw new ManifestCompatibilityError( + streamName, + "DatetimeBasedCursor max_datetime and min_datetime are not supported" + ); + } + let start_datetime: BuilderIncrementalSync["start_datetime"] = { type: "custom", - value: typeof manifestStartDateTime === "string" ? manifestStartDateTime : manifestStartDateTime.datetime, + value: isString(manifestStartDateTime) ? manifestStartDateTime : manifestStartDateTime.datetime, format: getFormat(manifestStartDateTime, manifestIncrementalSync), }; let end_datetime: BuilderIncrementalSync["end_datetime"] = { type: "custom", - value: typeof manifestEndDateTime === "string" ? manifestEndDateTime : manifestEndDateTime?.datetime || "", + value: isString(manifestEndDateTime) ? manifestEndDateTime : manifestEndDateTime?.datetime || "", format: manifestEndDateTime ? getFormat(manifestEndDateTime, manifestIncrementalSync) : undefined, }; - if ( - start_datetime.value === "{{ config['start_date'] }}" && - isFormatSupported(manifestStartDateTime, manifestIncrementalSync) - ) { - start_datetime = { type: "user_input" }; + const startDateSpecKey = tryExtractAndValidateIncrementalKey( + ["start_datetime"], + start_datetime.value, + spec, + streamName + ); + if (startDateSpecKey && isFormatSupported(manifestStartDateTime, manifestIncrementalSync)) { + start_datetime = { type: "user_input", value: interpolateConfigKey(startDateSpecKey) }; } - if ( - end_datetime.value === "{{ config['end_date'] }}" && - manifestEndDateTime && - isFormatSupported(manifestEndDateTime, manifestIncrementalSync) - ) { - end_datetime = { type: "user_input" }; + const endDateSpecKey = tryExtractAndValidateIncrementalKey(["end_datetime"], end_datetime.value, spec, streamName); + if (manifestEndDateTime && endDateSpecKey && isFormatSupported(manifestEndDateTime, manifestIncrementalSync)) { + end_datetime = { type: "user_input", value: interpolateConfigKey(endDateSpecKey) }; } else if ( !manifestEndDateTime || end_datetime.value === `{{ now_utc().strftime('${INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT}') }}` @@ -652,7 +664,7 @@ function removeTrailingSlashes(path: string) { return path.replace(/\/+$/, ""); } -type SupportedAuthenticators = +type SupportedAuthenticator = | ApiKeyAuthenticator | BasicHttpAuthenticator | BearerAuthenticator @@ -660,7 +672,7 @@ type SupportedAuthenticators = | NoAuth | SessionTokenAuthenticator; -function isSupportedAuthenticator(authenticator: HttpRequesterAuthenticator): authenticator is SupportedAuthenticators { +function isSupportedAuthenticator(authenticator: HttpRequesterAuthenticator): authenticator is SupportedAuthenticator { const supportedAuthTypes: string[] = [ NO_AUTH, API_KEY_AUTHENTICATOR, @@ -673,154 +685,190 @@ function isSupportedAuthenticator(authenticator: HttpRequesterAuthenticator): au } function manifestAuthenticatorToBuilder( - manifestAuthenticator: HttpRequesterAuthenticator | undefined, + authenticator: HttpRequesterAuthenticator | undefined, + spec: Spec | undefined, streamName?: string ): BuilderFormAuthenticator { - let builderAuthenticator: BuilderFormAuthenticator; - if (manifestAuthenticator === undefined) { - builderAuthenticator = { - type: "NoAuth", + if (authenticator === undefined) { + return { + type: NO_AUTH, }; - } else if (manifestAuthenticator.type === undefined) { + } else if (authenticator.type === undefined) { throw new ManifestCompatibilityError(streamName, "Authenticator has no type"); - } else if (!isSupportedAuthenticator(manifestAuthenticator)) { - throw new ManifestCompatibilityError(streamName, `Unsupported authenticator type: ${manifestAuthenticator.type}`); - } else if (manifestAuthenticator.type === "ApiKeyAuthenticator") { - builderAuthenticator = { - ...manifestAuthenticator, - inject_into: manifestAuthenticator.inject_into ?? { - type: "RequestOption", - field_name: manifestAuthenticator.header || "", - inject_into: "header", - }, - }; - } else if (manifestAuthenticator.type === "OAuthAuthenticator") { - if ( - Object.values(manifestAuthenticator.refresh_request_body ?? {}).filter((value) => typeof value !== "string") - .length > 0 - ) { - throw new ManifestCompatibilityError( - streamName, - "OAuthAuthenticator contains a refresh_request_body with non-string values" - ); + } else if (!isSupportedAuthenticator(authenticator)) { + throw new ManifestCompatibilityError(streamName, `Unsupported authenticator type: ${authenticator.type}`); + } + + switch (authenticator.type) { + case NO_AUTH: { + return { + type: NO_AUTH, + }; + } + + case API_KEY_AUTHENTICATOR: { + return { + ...authenticator, + inject_into: authenticator.inject_into ?? { + type: "RequestOption", + field_name: authenticator.header || "", + inject_into: "header", + }, + api_token: interpolateConfigKey(extractAndValidateAuthKey(["api_token"], authenticator, spec, streamName)), + }; + } + + case BEARER_AUTHENTICATOR: { + return { + ...authenticator, + api_token: interpolateConfigKey(extractAndValidateAuthKey(["api_token"], authenticator, spec, streamName)), + }; + } + + case BASIC_AUTHENTICATOR: { + return { + ...authenticator, + username: interpolateConfigKey(extractAndValidateAuthKey(["username"], authenticator, spec, streamName)), + password: interpolateConfigKey(extractAndValidateAuthKey(["password"], authenticator, spec, streamName)), + }; } - const refreshTokenUpdater = manifestAuthenticator.refresh_token_updater; - if (refreshTokenUpdater) { - if (!isEqual(refreshTokenUpdater?.access_token_config_path, [OAUTH_ACCESS_TOKEN_INPUT])) { + case OAUTH_AUTHENTICATOR: { + if ( + Object.values(authenticator.refresh_request_body ?? {}).filter((value) => typeof value !== "string").length > 0 + ) { throw new ManifestCompatibilityError( streamName, - `OAuthAuthenticator access token config path needs to be [${OAUTH_ACCESS_TOKEN_INPUT}]` + "OAuthAuthenticator contains a refresh_request_body with non-string values" ); } - if (!isEqual(refreshTokenUpdater?.token_expiry_date_config_path, [OAUTH_TOKEN_EXPIRY_DATE_INPUT])) { + if ( + authenticator.grant_type && + authenticator.grant_type !== "refresh_token" && + authenticator.grant_type !== "client_credentials" + ) { throw new ManifestCompatibilityError( streamName, - `OAuthAuthenticator token expiry date config path needs to be [${OAUTH_TOKEN_EXPIRY_DATE_INPUT}]` + "OAuthAuthenticator sets custom grant_type, but it must be one of 'refresh_token' or 'client_credentials'" ); } + + let builderAuthenticator: BuilderFormAuthenticator = { + ...authenticator, + refresh_request_body: Object.entries(authenticator.refresh_request_body ?? {}), + grant_type: authenticator.grant_type ?? "refresh_token", + refresh_token_updater: undefined, + client_id: interpolateConfigKey(extractAndValidateAuthKey(["client_id"], authenticator, spec, streamName)), + client_secret: interpolateConfigKey( + extractAndValidateAuthKey(["client_secret"], authenticator, spec, streamName) + ), + }; + + if (!authenticator.grant_type || authenticator.grant_type === "refresh_token") { + const refreshTokenSpecKey = extractAndValidateAuthKey(["refresh_token"], authenticator, spec, streamName); + builderAuthenticator = { + ...builderAuthenticator, + refresh_token: interpolateConfigKey(refreshTokenSpecKey), + }; + + if (authenticator.refresh_token_updater) { + if (!isEqual(authenticator.refresh_token_updater?.refresh_token_config_path, [refreshTokenSpecKey])) { + throw new ManifestCompatibilityError( + streamName, + "OAuthAuthenticator refresh_token_config_path needs to match the config path used for refresh_token" + ); + } + const { + access_token_config_path, + token_expiry_date_config_path, + refresh_token_config_path, + ...refresh_token_updater + } = authenticator.refresh_token_updater; + builderAuthenticator = { + ...builderAuthenticator, + refresh_token_updater: { + ...refresh_token_updater, + access_token: interpolateConfigKey( + extractAndValidateAuthKey( + ["refresh_token_updater", "access_token_config_path"], + authenticator, + spec, + streamName + ) + ), + token_expiry_date: interpolateConfigKey( + extractAndValidateAuthKey( + ["refresh_token_updater", "token_expiry_date_config_path"], + authenticator, + spec, + streamName + ) + ), + }, + }; + } + } + + return builderAuthenticator; + } + + case SESSION_TOKEN_AUTHENTICATOR: { + const manifestLoginRequester = authenticator.login_requester; if ( - !isEqual(refreshTokenUpdater?.refresh_token_config_path, [ - extractInterpolatedConfigKey(manifestAuthenticator.refresh_token), - ]) + manifestLoginRequester.authenticator && + manifestLoginRequester.authenticator?.type !== NO_AUTH && + manifestLoginRequester.authenticator?.type !== API_KEY_AUTHENTICATOR && + manifestLoginRequester.authenticator?.type !== BEARER_AUTHENTICATOR && + manifestLoginRequester.authenticator?.type !== BASIC_AUTHENTICATOR ) { throw new ManifestCompatibilityError( streamName, - "OAuthAuthenticator refresh_token_config_path needs to match the config value used for refresh_token" + `SessionTokenAuthenticator login_requester.authenticator must have one of the following types: ${NO_AUTH}, ${API_KEY_AUTHENTICATOR}, ${BEARER_AUTHENTICATOR}, ${BASIC_AUTHENTICATOR}` ); } - } - if ( - manifestAuthenticator.grant_type && - manifestAuthenticator.grant_type !== "refresh_token" && - manifestAuthenticator.grant_type !== "client_credentials" - ) { - throw new ManifestCompatibilityError(streamName, "OAuthAuthenticator sets custom grant_type"); - } - - builderAuthenticator = { - ...manifestAuthenticator, - refresh_request_body: Object.entries(manifestAuthenticator.refresh_request_body ?? {}), - grant_type: manifestAuthenticator.grant_type ?? "refresh_token", - }; - } else if (manifestAuthenticator.type === "SessionTokenAuthenticator") { - const manifestLoginRequester = manifestAuthenticator.login_requester; - if ( - manifestLoginRequester.authenticator && - manifestLoginRequester.authenticator?.type !== NO_AUTH && - manifestLoginRequester.authenticator?.type !== API_KEY_AUTHENTICATOR && - manifestLoginRequester.authenticator?.type !== BEARER_AUTHENTICATOR && - manifestLoginRequester.authenticator?.type !== BASIC_AUTHENTICATOR - ) { - throw new ManifestCompatibilityError( - streamName, - `SessionTokenAuthenticator login_requester.authenticator must have one of the following types: ${NO_AUTH}, ${API_KEY_AUTHENTICATOR}, ${BEARER_AUTHENTICATOR}, ${BASIC_AUTHENTICATOR}` + const builderLoginRequesterAuthenticator = manifestAuthenticatorToBuilder( + manifestLoginRequester.authenticator, + spec, + streamName ); - } - builderAuthenticator = { - ...manifestAuthenticator, - login_requester: { - url: `${removeTrailingSlashes(manifestLoginRequester.url_base)}/${removeLeadingSlashes( - manifestLoginRequester.path - )}`, - authenticator: manifestLoginRequester.authenticator ?? { type: NO_AUTH }, - httpMethod: manifestLoginRequester.http_method === "GET" ? "GET" : "POST", - requestOptions: { - requestParameters: Object.entries(manifestLoginRequester.request_parameters ?? {}), - requestHeaders: Object.entries(manifestLoginRequester.request_headers ?? {}), - requestBody: requesterToRequestBody(manifestLoginRequester), - }, - errorHandler: manifestErrorHandlerToBuilder(manifestLoginRequester.error_handler), - }, - }; - } else { - builderAuthenticator = manifestAuthenticator; - } - // verify that all auth keys which require a user input have a {{config[]}} value - - const inferredInputs = authTypeToKeyToInferredInput(builderAuthenticator); - const userInputAuthKeys = Object.keys(inferredInputs); - - for (const userInputAuthKey of userInputAuthKeys) { - if ( - !inferredInputs[userInputAuthKey].as_config_path && - !isInterpolatedConfigKey(getInferredAuthValue(builderAuthenticator, userInputAuthKey)) - ) { - throw new ManifestCompatibilityError( - undefined, - `Authenticator's ${userInputAuthKey} value must be of the form {{ config['key'] }}` - ); + return { + ...authenticator, + login_requester: { + url: `${removeTrailingSlashes(manifestLoginRequester.url_base)}/${removeLeadingSlashes( + manifestLoginRequester.path + )}`, + authenticator: builderLoginRequesterAuthenticator as + | ApiKeyAuthenticator + | BearerAuthenticator + | BasicHttpAuthenticator, + httpMethod: manifestLoginRequester.http_method === "GET" ? "GET" : "POST", + requestOptions: { + requestParameters: Object.entries(manifestLoginRequester.request_parameters ?? {}), + requestHeaders: Object.entries(manifestLoginRequester.request_headers ?? {}), + requestBody: requesterToRequestBody(manifestLoginRequester), + }, + errorHandler: manifestErrorHandlerToBuilder(manifestLoginRequester.error_handler), + }, + }; } } - - return builderAuthenticator; } -function manifestSpecAndAuthToBuilder( +function manifestSpecToBuilderInputs( manifestSpec: Spec | undefined, - manifestAuthenticator: HttpRequesterAuthenticator | undefined, - streams: BuilderStream[] | undefined + authenticator: BuilderFormAuthenticator, + streams: BuilderStream[] ) { - const result: { - inputs: BuilderFormValues["inputs"]; - inferredInputOverrides: BuilderFormValues["inferredInputOverrides"]; - auth: BuilderFormAuthenticator; - inputOrder: string[]; - } = { - inputs: [], - inferredInputOverrides: {}, - auth: manifestAuthenticatorToBuilder(manifestAuthenticator), - inputOrder: [], - }; - if (manifestSpec === undefined) { - return result; + return []; } + const lockedInputKeys = Object.keys(getKeyToDesiredLockedInput(authenticator, streams)); + const required = manifestSpec.connection_specification.required as string[] | undefined; - Object.entries(manifestSpec.connection_specification.properties as Record) + return Object.entries(manifestSpec.connection_specification.properties as Record) .sort(([_keyA, valueA], [_keyB, valueB]) => { if (valueA.order !== undefined && valueB.order !== undefined) { return valueA.order - valueB.order; @@ -833,37 +881,14 @@ function manifestSpecAndAuthToBuilder( } return 0; }) - .forEach(([specKey, specDefinition]) => { - const matchingInferredInput = getMatchingInferredInput(result.auth, streams, specKey); - if (matchingInferredInput) { - result.inferredInputOverrides[matchingInferredInput.key] = specDefinition; - } else { - result.inputs.push({ - key: specKey, - definition: specDefinition, - required: required?.includes(specKey) || false, - }); - } - if (specDefinition.order !== undefined) { - result.inputOrder.push(specKey); - } + .map(([specKey, specDefinition]) => { + return { + key: specKey, + definition: specDefinition, + required: required?.includes(specKey) || false, + isLocked: lockedInputKeys.includes(specKey), + }; }); - - return result; -} - -function getMatchingInferredInput( - auth: BuilderFormAuthenticator, - streams: BuilderStream[] | undefined, - specKey: string -) { - if (streams && specKey === "start_date" && hasIncrementalSyncUserInput(streams, "start_datetime")) { - return incrementalSyncInferredInputs.start_date; - } - if (streams && specKey === "end_date" && hasIncrementalSyncUserInput(streams, "end_datetime")) { - return incrementalSyncInferredInputs.end_date; - } - return Object.values(authTypeToKeyToInferredInput(auth)).find((input) => input.key === specKey); } function assertType( @@ -895,21 +920,133 @@ export function isManifestCompatibilityError(error: { __type?: string }): error function convertOrDumpAsString( manifestValue: ManifestInput, - convertFn: (manifestValue: ManifestInput, streamName?: string) => BuilderOutput | undefined, + convertFn: (manifestValue: ManifestInput, streamName?: string, spec?: Spec) => BuilderOutput | undefined, componentName: YamlSupportedComponentName, streamName?: string | undefined, - metadata?: DeclarativeComponentSchemaMetadata + metadata?: DeclarativeComponentSchemaMetadata, + spec?: Spec ): BuilderOutput | YamlString | undefined { if (streamName && metadata?.yamlComponents?.streams?.[streamName]?.includes(componentName)) { return dump(manifestValue); } try { - return convertFn(manifestValue, streamName); + return convertFn(manifestValue, streamName, spec); } catch (e) { - if (e instanceof ManifestCompatibilityError) { + if (isManifestCompatibilityError(e)) { return dump(manifestValue); } throw e; } } + +const extractAndValidateAuthKey = ( + path: string[], + authenticator: SupportedAuthenticator, + manifestSpec: Spec | undefined, + streamName?: string +) => { + return extractAndValidateSpecKey( + path, + get(authenticator, path), + get(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[authenticator.type], path), + authenticator.type, + manifestSpec, + streamName + ); +}; + +const tryExtractAndValidateIncrementalKey = ( + path: string[], + value: string, + manifestSpec: Spec | undefined, + streamName?: string +) => { + try { + return extractAndValidateSpecKey( + path, + value, + get(LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME, path), + DatetimeBasedCursorType.DatetimeBasedCursor, + manifestSpec, + streamName + ); + } catch (e) { + if (isManifestCompatibilityError(e)) { + // if the manifest value doesn't point to the expected input in the spec, just treat it as custom + return undefined; + } + throw e; + } +}; + +const extractAndValidateSpecKey = ( + path: string[], + value: string | string[] | undefined, + lockedInput: BuilderFormInput, + componentName: string, + spec: Spec | undefined, + streamName?: string +): string => { + const manifestPath = `${componentName}.${path.join(".")}`; + + let specKey: string | undefined = undefined; + if (isArray(value)) { + if (value.length < 1) { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} has an empty path, but a non-empty path is required.` + ); + } + if (value.length > 1) { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} points to a nested config path, but only top-level config fields are supported.` + ); + } + [specKey] = value; + } + if (isString(value)) { + if (!isInterpolatedConfigKey(value)) { + throw new ManifestCompatibilityError(streamName, `${manifestPath} must be of the form {{ config["key"] }}`); + } + specKey = extractInterpolatedConfigKey(value); + } + if (!specKey) { + throw new ManifestCompatibilityError(streamName, `${manifestPath} must point to a config field`); + } + + const specDefinition = specKey ? spec?.connection_specification?.properties?.[specKey] : undefined; + if (!specDefinition) { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} references spec key "${specKey}", which must appear in the spec` + ); + } + if (lockedInput.required && !spec?.connection_specification?.required?.includes(specKey)) { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} references spec key "${specKey}", which must be required in the spec` + ); + } + if (specDefinition.type !== "string") { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} references spec key "${specKey}", which must be of type string` + ); + } + if (lockedInput.definition.airbyte_secret && !specDefinition.airbyte_secret) { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} references spec key "${specKey}", which must have airbyte_secret set to true` + ); + } + if (lockedInput.definition.pattern && specDefinition.pattern !== lockedInput.definition.pattern) { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} references spec key "${specKey}", which must have pattern "${lockedInput.definition.pattern}"` + ); + } + + return specKey; +}; diff --git a/airbyte-webapp/src/components/connectorBuilder/types.ts b/airbyte-webapp/src/components/connectorBuilder/types.ts index e05a3c2a4b7..a15a005aa4a 100644 --- a/airbyte-webapp/src/components/connectorBuilder/types.ts +++ b/airbyte-webapp/src/components/connectorBuilder/types.ts @@ -50,8 +50,8 @@ import { RequestOptionInjectInto, NoAuthType, HttpRequester, + OAuthAuthenticatorRefreshTokenUpdater, } from "core/api/types/ConnectorManifest"; -import { naturalComparator } from "core/utils/objects"; import { CDK_VERSION } from "./cdk"; import { formatJson } from "./utils"; @@ -72,7 +72,7 @@ export interface BuilderFormInput { key: string; required: boolean; definition: AirbyteJSONSchema; - as_config_path?: boolean; + isLocked?: boolean; } type BuilderHttpMethod = "GET" | "POST"; @@ -93,16 +93,27 @@ export type BuilderSessionTokenAuthenticator = Omit & { - refresh_request_body: Array<[string, string]>; - }) + | BuilderFormOAuthAuthenticator | ApiKeyAuthenticator | BearerAuthenticator | BasicHttpAuthenticator - | BuilderSessionTokenAuthenticator -) & { type: string }; + | BuilderSessionTokenAuthenticator; + +export type BuilderFormOAuthAuthenticator = Omit< + OAuthAuthenticator, + "refresh_request_body" | "refresh_token_updater" +> & { + refresh_request_body: Array<[string, string]>; + refresh_token_updater?: Omit< + OAuthAuthenticatorRefreshTokenUpdater, + "access_token_config_path" | "token_expiry_date_config_path" | "refresh_token_config_path" + > & { + access_token: string; + token_expiry_date: string; + }; +}; export interface BuilderFormValues { global: { @@ -110,8 +121,6 @@ export interface BuilderFormValues { authenticator: BuilderFormAuthenticator; }; inputs: BuilderFormInput[]; - inferredInputOverrides: Record>; - inputOrder: string[]; streams: BuilderStream[]; checkStreams: string[]; version: string; @@ -173,12 +182,14 @@ export interface BuilderIncrementalSync end_datetime: | { type: "user_input"; + value: string; } | { type: "now" } | { type: "custom"; value: string; format?: string }; start_datetime: | { type: "user_input"; + value: string; } | { type: "custom"; value: string; format?: string }; slicer?: { @@ -318,8 +329,6 @@ export const DEFAULT_BUILDER_FORM_VALUES: BuilderFormValues = { authenticator: { type: "NoAuth" }, }, inputs: [], - inferredInputOverrides: {}, - inputOrder: [], streams: [], checkStreams: [], version: CDK_VERSION, @@ -373,167 +382,9 @@ export const CURSOR_PAGINATION: CursorPaginationType = "CursorPagination"; export const OFFSET_INCREMENT: OffsetIncrementType = "OffsetIncrement"; export const PAGE_INCREMENT: PageIncrementType = "PageIncrement"; -export const incrementalSyncInferredInputs: Record<"start_date" | "end_date", BuilderFormInput> = { - start_date: { - key: "start_date", - required: true, - definition: { - type: "string", - title: "Start date", - format: "date-time", - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - }, - }, - end_date: { - key: "end_date", - required: true, - definition: { - type: "string", - title: "End date", - format: "date-time", - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - }, - }, -}; - -export const DEFAULT_INFERRED_INPUT_ORDER = [ - "api_key", - "username", - "password", - "client_id", - "client_secret", - "client_refresh_token", -]; - -export const authTypeToKeyToInferredInput = ( - authenticator: BuilderFormAuthenticator | { type: BuilderFormAuthenticator["type"] } -): Record => { - switch (authenticator.type) { - case "NoAuth": - return {}; - case API_KEY_AUTHENTICATOR: - return { - api_token: { - key: "api_key", - required: true, - definition: { - type: "string", - title: "API Key", - airbyte_secret: true, - }, - }, - }; - case BEARER_AUTHENTICATOR: - return { - api_token: { - key: "api_key", - required: true, - definition: { - type: "string", - title: "API Key", - airbyte_secret: true, - }, - }, - }; - case BASIC_AUTHENTICATOR: - return { - username: { - key: "username", - required: true, - definition: { - type: "string", - title: "Username", - }, - }, - password: { - key: "password", - required: false, - definition: { - type: "string", - title: "Password", - always_show: true, - airbyte_secret: true, - }, - }, - }; - case OAUTH_AUTHENTICATOR: - const baseInputs: Record = { - client_id: { - key: "client_id", - required: true, - definition: { - type: "string", - title: "Client ID", - airbyte_secret: true, - }, - }, - client_secret: { - key: "client_secret", - required: true, - definition: { - type: "string", - title: "Client secret", - airbyte_secret: true, - }, - }, - }; - if (!("grant_type" in authenticator) || authenticator.grant_type === "refresh_token") { - baseInputs.refresh_token = { - key: "client_refresh_token", - required: true, - definition: { - type: "string", - title: "Refresh token", - airbyte_secret: true, - }, - }; - if ("refresh_token_updater" in authenticator && authenticator.refresh_token_updater) { - baseInputs.oauth_access_token = { - key: "oauth_access_token", - required: false, - definition: { - type: "string", - title: "Access token", - airbyte_secret: true, - description: - "The current access token. This field might be overridden by the connector based on the token refresh endpoint response.", - }, - as_config_path: true, - }; - baseInputs.oauth_token_expiry_date = { - key: "oauth_token_expiry_date", - required: false, - definition: { - type: "string", - title: "Token expiry date", - format: "date-time", - description: - "The date the current access token expires in. This field might be overridden by the connector based on the token refresh endpoint response.", - }, - as_config_path: true, - }; - } - } - return baseInputs; - case SESSION_TOKEN_AUTHENTICATOR: - if ("login_requester" in authenticator && "type" in authenticator.login_requester.authenticator) { - return authTypeToKeyToInferredInput(authenticator.login_requester.authenticator); - } - return {}; - } -}; - export const OAUTH_ACCESS_TOKEN_INPUT = "oauth_access_token"; export const OAUTH_TOKEN_EXPIRY_DATE_INPUT = "oauth_token_expiry_date"; -export const inferredAuthValues = (type: BuilderFormAuthenticator["type"]): Record => { - return Object.fromEntries( - Object.entries(authTypeToKeyToInferredInput({ type })).map(([authKey, inferredInput]) => { - return [authKey, interpolateConfigKey(inferredInput.key)]; - }) - ); -}; - export function hasIncrementalSyncUserInput( streams: BuilderFormValues["streams"], key: "start_datetime" | "end_datetime" @@ -546,63 +397,24 @@ export function hasIncrementalSyncUserInput( ); } -export const getInferredAuthValue = (authenticator: BuilderFormAuthenticator, authKey: string) => { - if (authenticator.type === "SessionTokenAuthenticator") { - return Reflect.get(authenticator.login_requester.authenticator, authKey); - } - return Reflect.get(authenticator, authKey); -}; - -export function getInferredInputList( - authenticator: BuilderFormAuthenticator, - inferredInputOverrides: BuilderFormValues["inferredInputOverrides"], - startDateInput: boolean, - endDateInput: boolean -): BuilderFormInput[] { - const authKeyToInferredInput = authTypeToKeyToInferredInput(authenticator); - const authKeys = Object.keys(authKeyToInferredInput); - const inputs = authKeys.flatMap((authKey) => { - if ( - authKeyToInferredInput[authKey].as_config_path || - extractInterpolatedConfigKey(getInferredAuthValue(authenticator, authKey)) === authKeyToInferredInput[authKey].key - ) { - return [authKeyToInferredInput[authKey]]; - } - return []; - }); - - if (startDateInput) { - inputs.push(incrementalSyncInferredInputs.start_date); - } - - if (endDateInput) { - inputs.push(incrementalSyncInferredInputs.end_date); - } - - return inputs.map((input) => - inferredInputOverrides[input.key] - ? { - ...input, - definition: { ...input.definition, ...inferredInputOverrides[input.key] }, - } - : input - ); +export function interpolateConfigKey(key: string): string; +export function interpolateConfigKey(key: string | undefined): string | undefined; +export function interpolateConfigKey(key: string | undefined): string | undefined { + return key ? `{{ config["${key}"] }}` : undefined; } -const interpolateConfigKey = (key: string): string => { - return `{{ config['${key}'] }}`; -}; - -const interpolatedConfigValueRegex = /^{{config(\.(.+)|\[('|"+)(.+)('|"+)\])}}$/; +const interpolatedConfigValueRegexBracket = /^\s*{{\s*config\[('|")+(\S+)('|")+\]\s*}}\s*$/; +const interpolatedConfigValueRegexDot = /^\s*{{\s*config\.(\S+)\s*}}\s*$/; export function isInterpolatedConfigKey(str: string | undefined): boolean { if (str === undefined) { return false; } - const noWhitespaceString = str.replace(/\s/g, ""); - return interpolatedConfigValueRegex.test(noWhitespaceString); + return interpolatedConfigValueRegexBracket.test(str) || interpolatedConfigValueRegexDot.test(str); } +export function extractInterpolatedConfigKey(str: string): string; +export function extractInterpolatedConfigKey(str: string | undefined): string | undefined; export function extractInterpolatedConfigKey(str: string | undefined): string | undefined { /** * This methods does not work for nested configs like `config["credentials"]["client_secret"]` as the interpolated config key would be @@ -611,14 +423,15 @@ export function extractInterpolatedConfigKey(str: string | undefined): string | if (str === undefined) { return undefined; } - const noWhitespaceString = str.replace(/\s/g, ""); - const regexResult = interpolatedConfigValueRegex.exec(noWhitespaceString); - if (regexResult === null) { - return undefined; - } else if (regexResult.length > 2) { - return regexResult[4]; + const regexBracketResult = interpolatedConfigValueRegexBracket.exec(str); + if (regexBracketResult === null) { + const regexDotResult = interpolatedConfigValueRegexDot.exec(str); + if (regexDotResult === null) { + return undefined; + } + return regexDotResult[1]; } - return regexResult[2]; + return regexBracketResult[2]; } const INTERPOLATION_PATTERN = /^\{\{.+\}\}$/; @@ -962,7 +775,7 @@ export const streamSchema = yup.object().shape({ ), start_datetime: yup.object().shape({ value: yup.mixed().when("type", { - is: (val: string) => val === "custom", + is: (val: string) => val === "custom" || val === "user_input", then: yup.string().required(REQUIRED_ERROR), otherwise: strip, }), @@ -970,7 +783,7 @@ export const streamSchema = yup.object().shape({ end_datetime: schemaIfRangeFilter( yup.object().shape({ value: yup.mixed().when("type", { - is: (val: string) => val === "custom", + is: (val: string) => val === "custom" || val === "user_input", then: yup.string().required(REQUIRED_ERROR), otherwise: strip, }), @@ -1043,6 +856,8 @@ function builderAuthenticatorToManifest( return undefined; } if (globalSettings.authenticator.type === "OAuthAuthenticator") { + const { access_token, token_expiry_date, ...refresh_token_updater } = + globalSettings.authenticator.refresh_token_updater ?? {}; return { ...globalSettings.authenticator, refresh_token: @@ -1050,9 +865,19 @@ function builderAuthenticatorToManifest( ? undefined : globalSettings.authenticator.refresh_token, refresh_token_updater: - globalSettings.authenticator.grant_type === "client_credentials" + globalSettings.authenticator.grant_type === "client_credentials" || + !globalSettings.authenticator.refresh_token_updater ? undefined - : globalSettings.authenticator.refresh_token_updater, + : { + ...refresh_token_updater, + access_token_config_path: [ + extractInterpolatedConfigKey(globalSettings.authenticator.refresh_token_updater.access_token), + ], + token_expiry_date_config_path: [ + extractInterpolatedConfigKey(globalSettings.authenticator.refresh_token_updater.token_expiry_date), + ], + refresh_token_config_path: [extractInterpolatedConfigKey(globalSettings.authenticator.refresh_token!)], + }, refresh_request_body: Object.fromEntries(globalSettings.authenticator.refresh_request_body), }; } @@ -1060,6 +885,20 @@ function builderAuthenticatorToManifest( return { ...globalSettings.authenticator, header: undefined, + api_token: globalSettings.authenticator.api_token, + }; + } + if (globalSettings.authenticator.type === "BearerAuthenticator") { + return { + ...globalSettings.authenticator, + api_token: globalSettings.authenticator.api_token, + }; + } + if (globalSettings.authenticator.type === "BasicHttpAuthenticator") { + return { + ...globalSettings.authenticator, + username: globalSettings.authenticator.username, + password: globalSettings.authenticator.password, }; } if (globalSettings.authenticator.type === "SessionTokenAuthenticator") { @@ -1098,10 +937,17 @@ function pathToSafeJinjaAccess(path: string[]): string { function builderPaginationStrategyToManifest( strategy: BuilderPaginator["strategy"] ): DefaultPaginator["pagination_strategy"] { - if (strategy.type === "OffsetIncrement" || strategy.type === "PageIncrement") { - return strategy; + const correctedStrategy = { + ...strategy, + // must manually convert page_size to a number if it exists, because RHF watch() treats all numeric values as strings + page_size: strategy.page_size ? Number(strategy.page_size) : undefined, + }; + + if (correctedStrategy.type === "OffsetIncrement" || correctedStrategy.type === "PageIncrement") { + return correctedStrategy; } - const { cursor, ...rest } = strategy; + + const { cursor, ...rest } = correctedStrategy; return { ...rest, @@ -1161,7 +1007,7 @@ export function builderIncrementalSyncToManifest( } = formValues; const startDatetime = { type: "MinMaxDatetime" as const, - datetime: start_datetime.type === "custom" ? start_datetime.value : `{{ config['start_date'] }}`, + datetime: start_datetime.value, datetime_format: start_datetime.type === "custom" ? start_datetime.format : INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT, }; const manifestIncrementalSync = { @@ -1180,11 +1026,9 @@ export function builderIncrementalSyncToManifest( end_datetime: { type: "MinMaxDatetime", datetime: - end_datetime.type === "custom" - ? end_datetime.value - : end_datetime.type === "now" + end_datetime.type === "now" ? `{{ now_utc().strftime('${INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT}') }}` - : `{{ config['end_date'] }}`, + : end_datetime.value, datetime_format: end_datetime.type === "custom" ? end_datetime.format : INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT, }, step: slicer?.step, @@ -1382,49 +1226,6 @@ function builderStreamToDeclarativeSteam( return merge({}, declarativeStream, stream.unsupportedFields); } -export const orderInputs = ( - inputs: BuilderFormInput[], - inferredInputs: BuilderFormInput[], - storedInputOrder: string[] -) => { - const keyToStoredOrder = storedInputOrder.reduce((map, key, index) => map.set(key, index), new Map()); - - return inferredInputs - .map((input) => { - return { input, isInferred: true, id: input.key }; - }) - .concat( - inputs.map((input) => { - return { input, isInferred: false, id: input.key }; - }) - ) - .sort((inputA, inputB) => { - const storedIndexA = keyToStoredOrder.get(inputA.id); - const storedIndexB = keyToStoredOrder.get(inputB.id); - - if (storedIndexA !== undefined && storedIndexB !== undefined) { - return storedIndexA - storedIndexB; - } - if (storedIndexA !== undefined && storedIndexB === undefined) { - return inputB.isInferred ? 1 : -1; - } - if (storedIndexA === undefined && storedIndexB !== undefined) { - return inputA.isInferred ? -1 : 1; - } - // both indexes are undefined - if (inputA.isInferred && inputB.isInferred) { - return DEFAULT_INFERRED_INPUT_ORDER.indexOf(inputA.id) - DEFAULT_INFERRED_INPUT_ORDER.indexOf(inputB.id); - } - if (inputA.isInferred && !inputB.isInferred) { - return -1; - } - if (!inputA.isInferred && inputB.isInferred) { - return 1; - } - return naturalComparator(inputA.id, inputB.id); - }); -}; - export const builderFormValuesToMetadata = (values: BuilderFormValues): BuilderMetadata => { const componentNameIfString = (componentName: YamlSupportedComponentName, value: unknown) => isYamlString(value) ? [componentName] : []; @@ -1454,35 +1255,25 @@ export const builderFormValuesToMetadata = (values: BuilderFormValues): BuilderM }; }; -export const convertToManifest = (values: BuilderFormValues): ConnectorManifest => { - const manifestStreams: DeclarativeStream[] = values.streams.map((stream) => - builderStreamToDeclarativeSteam(values, stream, []) - ); - - const orderedInputs = orderInputs( - values.inputs, - getInferredInputList( - values.global.authenticator, - values.inferredInputOverrides, - hasIncrementalSyncUserInput(values.streams, "start_datetime"), - hasIncrementalSyncUserInput(values.streams, "end_datetime") - ), - values.inputOrder - ); - const allInputs = orderedInputs.map((orderedInput) => orderedInput.input); - +export const builderInputsToSpec = (inputs: BuilderFormInput[]): Spec => { const specSchema: JSONSchema7 = { $schema: "http://json-schema.org/draft-07/schema#", type: "object", - required: allInputs.filter((input) => input.required).map((input) => input.key), - properties: Object.fromEntries(allInputs.map((input, index) => [input.key, { ...input.definition, order: index }])), + required: inputs.filter((input) => input.required).map((input) => input.key), + properties: Object.fromEntries(inputs.map((input, index) => [input.key, { ...input.definition, order: index }])), additionalProperties: true, }; - const spec: Spec = { + return { connection_specification: specSchema, type: "Spec", }; +}; + +export const convertToManifest = (values: BuilderFormValues): ConnectorManifest => { + const manifestStreams: DeclarativeStream[] = values.streams.map((stream) => + builderStreamToDeclarativeSteam(values, stream, []) + ); const streamNames = values.streams.map((s) => s.name); const validCheckStreamNames = (values.checkStreams ?? []).filter((checkStream) => streamNames.includes(checkStream)); @@ -1519,7 +1310,7 @@ export const convertToManifest = (values: BuilderFormValues): ConnectorManifest }, streams: streamRefs, schemas: streamNameToSchema, - spec, + spec: builderInputsToSpec(values.inputs), metadata: builderFormValuesToMetadata(values), }; }; diff --git a/airbyte-webapp/src/components/connectorBuilder/useInferredInputs.ts b/airbyte-webapp/src/components/connectorBuilder/useInferredInputs.ts deleted file mode 100644 index 6926596c6bd..00000000000 --- a/airbyte-webapp/src/components/connectorBuilder/useInferredInputs.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { useContext } from "react"; - -import { ConnectorBuilderMainRHFContext } from "services/connectorBuilder/ConnectorBuilderStateService"; - -import { getInferredInputList, hasIncrementalSyncUserInput } from "./types"; - -export const useInferredInputs = () => { - const { watch } = useContext(ConnectorBuilderMainRHFContext) || {}; - if (!watch) { - throw new Error("rhf context not available"); - } - const authenticator = watch("formValues.global.authenticator"); - const inferredInputOverrides = watch("formValues.inferredInputOverrides"); - const streams = watch("formValues.streams"); - const startDateInput = hasIncrementalSyncUserInput(streams, "start_datetime"); - const endDateInput = hasIncrementalSyncUserInput(streams, "end_datetime"); - return getInferredInputList(authenticator, inferredInputOverrides, startDateInput, endDateInput); -}; diff --git a/airbyte-webapp/src/components/connectorBuilder/useLockedInputs.ts b/airbyte-webapp/src/components/connectorBuilder/useLockedInputs.ts new file mode 100644 index 00000000000..99c0f424ec9 --- /dev/null +++ b/airbyte-webapp/src/components/connectorBuilder/useLockedInputs.ts @@ -0,0 +1,299 @@ +import { useEffect } from "react"; +import { useFormContext } from "react-hook-form"; + +import { + API_KEY_AUTHENTICATOR, + BASIC_AUTHENTICATOR, + BEARER_AUTHENTICATOR, + BuilderFormAuthenticator, + BuilderFormInput, + BuilderStream, + NO_AUTH, + OAUTH_AUTHENTICATOR, + SESSION_TOKEN_AUTHENTICATOR, + extractInterpolatedConfigKey, + isYamlString, + useBuilderWatch, +} from "./types"; + +export const useUpdateLockedInputs = () => { + const formValues = useBuilderWatch("formValues"); + const { setValue } = useFormContext(); + + useEffect(() => { + const keyToDesiredLockedInput = getKeyToDesiredLockedInput(formValues.global.authenticator, formValues.streams); + + const existingLockedInputKeys = formValues.inputs.filter((input) => input.isLocked).map((input) => input.key); + const lockedInputKeysToCreate = Object.keys(keyToDesiredLockedInput).filter( + (key) => !existingLockedInputKeys.includes(key) + ); + const lockedInputKeysToDelete = existingLockedInputKeys.filter((key) => !keyToDesiredLockedInput[key]); + if (lockedInputKeysToCreate.length === 0 && lockedInputKeysToDelete.length === 0) { + return; + } + + const updatedInputs = formValues.inputs.filter((input) => !lockedInputKeysToDelete.includes(input.key)); + lockedInputKeysToCreate.forEach((key) => { + updatedInputs.push({ + ...keyToDesiredLockedInput[key], + key, + isLocked: true, + }); + }); + setValue("formValues.inputs", updatedInputs); + }, [formValues.global.authenticator, formValues.inputs, formValues.streams, setValue]); +}; + +export const useGetUniqueKey = () => { + const builderInputs = useBuilderWatch("formValues.inputs"); + const builderStreams = useBuilderWatch("formValues.streams"); + + // If reuseIncrementalField is set, find the first stream which has the corresponding incremental field + // set to user input and return its key. Otherwise, return a unique version of the desired key. + return (desiredKey: string, reuseIncrementalField?: "start_datetime" | "end_datetime") => { + if (reuseIncrementalField) { + let existingKey: string | undefined = undefined; + builderStreams.some((stream) => { + if (stream.incrementalSync && !isYamlString(stream.incrementalSync)) { + const incrementalDatetime = stream.incrementalSync[reuseIncrementalField]; + if (incrementalDatetime.type === "user_input") { + existingKey = extractInterpolatedConfigKey(incrementalDatetime.value); + return true; + } + } + return false; + }); + if (existingKey) { + return existingKey; + } + } + + const existingKeys = builderInputs.map((input) => input.key); + let key = desiredKey; + let i = 2; + while (existingKeys.includes(key)) { + key = `${desiredKey}_${i}`; + i++; + } + return key; + }; +}; + +export function getKeyToDesiredLockedInput( + authenticator: BuilderFormAuthenticator, + streams: BuilderStream[] +): Record { + const authKeyToDesiredInput = getAuthKeyToDesiredLockedInput(authenticator); + + const incrementalStartDateKeys = new Set(); + const incrementalEndDateKeys = new Set(); + streams.forEach((stream) => { + if (stream.incrementalSync && !isYamlString(stream.incrementalSync)) { + const startDatetime = stream.incrementalSync.start_datetime; + if (startDatetime.type === "user_input") { + incrementalStartDateKeys.add(extractInterpolatedConfigKey(startDatetime.value)); + } + + const endDatetime = stream.incrementalSync.end_datetime; + if (endDatetime.type === "user_input") { + incrementalEndDateKeys.add(extractInterpolatedConfigKey(endDatetime.value)); + } + } + }); + + const incrementalKeyToDesiredInput: Record = { + ...Array.from(incrementalStartDateKeys).reduce( + (acc, key) => ({ + ...acc, + [key]: LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME.start_datetime, + }), + {} + ), + ...Array.from(incrementalEndDateKeys).reduce( + (acc, key) => ({ + ...acc, + [key]: LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME.end_datetime, + }), + {} + ), + }; + + return { + ...authKeyToDesiredInput, + ...incrementalKeyToDesiredInput, + }; +} + +function getAuthKeyToDesiredLockedInput(authenticator: BuilderFormAuthenticator): Record { + switch (authenticator.type) { + case API_KEY_AUTHENTICATOR: + case BEARER_AUTHENTICATOR: + const apiTokenKey = extractInterpolatedConfigKey(authenticator.api_token); + return { + ...(apiTokenKey && { [apiTokenKey]: LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[authenticator.type].api_token }), + }; + + case BASIC_AUTHENTICATOR: + const usernameKey = extractInterpolatedConfigKey(authenticator.username); + const passwordKey = extractInterpolatedConfigKey(authenticator.password); + return { + [usernameKey]: LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[BASIC_AUTHENTICATOR].username, + ...(passwordKey && { + [passwordKey]: LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[BASIC_AUTHENTICATOR].password, + }), + }; + + case OAUTH_AUTHENTICATOR: + const clientIdKey = extractInterpolatedConfigKey(authenticator.client_id); + const clientSecretKey = extractInterpolatedConfigKey(authenticator.client_secret); + const refreshTokenKey = extractInterpolatedConfigKey(authenticator.refresh_token); + const accessTokenKey = extractInterpolatedConfigKey(authenticator.refresh_token_updater?.access_token); + const tokenExpiryDateKey = extractInterpolatedConfigKey(authenticator.refresh_token_updater?.token_expiry_date); + return { + [clientIdKey]: LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].client_id, + [clientSecretKey]: LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].client_secret, + ...(refreshTokenKey && { + [refreshTokenKey]: LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].refresh_token, + }), + ...(accessTokenKey && { + [accessTokenKey]: + LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].refresh_token_updater.access_token_config_path, + }), + ...(tokenExpiryDateKey && { + [tokenExpiryDateKey]: + LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].refresh_token_updater + .token_expiry_date_config_path, + }), + }; + + case SESSION_TOKEN_AUTHENTICATOR: + const loginRequesterAuthenticator = authenticator.login_requester.authenticator; + return loginRequesterAuthenticator ? getAuthKeyToDesiredLockedInput(loginRequesterAuthenticator) : {}; + + default: + return {}; + } +} + +export const LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE = { + [NO_AUTH]: {}, + [API_KEY_AUTHENTICATOR]: { + api_token: { + key: "api_key", + required: true, + definition: { + type: "string" as const, + title: "API Key", + airbyte_secret: true, + }, + }, + }, + [BEARER_AUTHENTICATOR]: { + api_token: { + key: "api_key", + required: true, + definition: { + type: "string" as const, + title: "API Key", + airbyte_secret: true, + }, + }, + }, + [BASIC_AUTHENTICATOR]: { + username: { + key: "username", + required: true, + definition: { + type: "string" as const, + title: "Username", + }, + }, + password: { + key: "password", + required: false, + definition: { + type: "string" as const, + title: "Password", + always_show: true, + airbyte_secret: true, + }, + }, + }, + [OAUTH_AUTHENTICATOR]: { + client_id: { + key: "client_id", + required: true, + definition: { + type: "string" as const, + title: "Client ID", + airbyte_secret: true, + }, + }, + client_secret: { + key: "client_secret", + required: true, + definition: { + type: "string" as const, + title: "Client secret", + airbyte_secret: true, + }, + }, + refresh_token: { + key: "client_refresh_token", + required: true, + definition: { + type: "string" as const, + title: "Refresh token", + airbyte_secret: true, + }, + }, + refresh_token_updater: { + access_token_config_path: { + key: "oauth_access_token", + required: false, + definition: { + type: "string" as const, + title: "Access token", + airbyte_secret: true, + description: + "The current access token. This field might be overridden by the connector based on the token refresh endpoint response.", + }, + }, + token_expiry_date_config_path: { + key: "oauth_token_expiry_date", + required: false, + definition: { + type: "string" as const, + title: "Token expiry date", + format: "date-time", + description: + "The date the current access token expires in. This field might be overridden by the connector based on the token refresh endpoint response.", + }, + }, + }, + }, + [SESSION_TOKEN_AUTHENTICATOR]: {}, +}; + +export const LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME: Record = { + start_datetime: { + key: "start_date", + required: true, + definition: { + type: "string", + title: "Start date", + format: "date-time", + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", + }, + }, + end_datetime: { + key: "end_date", + required: true, + definition: { + type: "string", + title: "End date", + format: "date-time", + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", + }, + }, +}; diff --git a/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.test.ts b/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.test.ts index 1c4bd13b201..a10a01f042f 100644 --- a/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.test.ts +++ b/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.test.ts @@ -6,6 +6,7 @@ import { DeclarativeStreamIncrementalSync, HttpRequesterErrorHandler, SimpleRetrieverPaginator, + Spec, } from "core/api/types/ConnectorManifest"; import { removeEmptyProperties } from "core/utils/form"; @@ -28,6 +29,68 @@ const baseManifest: ConnectorManifest = { streams: [], }; +const apiAuthRetriever = { + retriever: { + type: "SimpleRetriever", + requester: { + authenticator: { + type: "ApiKeyAuthenticator", + api_token: "{{ config['api_token'] }}", + header: "API_KEY", + }, + }, + }, +}; + +const apiTokenSpec: Spec = { + type: "Spec", + connection_specification: { + type: "object", + required: ["api_token"], + properties: { + api_token: { + type: "string", + title: "API Token", + airbyte_secret: true, + }, + }, + }, +}; + +const oauthSpec: Spec = { + type: "Spec", + connection_specification: { + type: "object", + required: ["client_id", "client_secret", "client_refresh_token"], + properties: { + client_id: { + type: "string", + title: "Client ID", + airbyte_secret: true, + }, + client_secret: { + type: "string", + title: "Client Secret", + airbyte_secret: true, + }, + client_refresh_token: { + type: "string", + title: "Client Refresh Token", + airbyte_secret: true, + }, + oauth_access_token: { + type: "string", + title: "Access Token", + airbyte_secret: true, + }, + oauth_token_expiry_date: { + type: "string", + title: "Token Expiry Date", + }, + }, + }, +}; + const stream1: DeclarativeStream = { type: "DeclarativeStream", name: "stream1", @@ -135,7 +198,7 @@ describe("Conversion throws error when", () => { }; return convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); }; - await expect(convert).rejects.toThrow("api_token value must be of the form {{ config["); + await expect(convert).rejects.toThrow('ApiKeyAuthenticator.api_token must be of the form {{ config["key"] }}'); }); it("manifest has an authenticator with a interpolated secret key of type config.", async () => { @@ -154,12 +217,13 @@ describe("Conversion throws error when", () => { }, }), ], + spec: apiTokenSpec, }; const formValues = await convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); if (formValues.global.authenticator.type !== "ApiKeyAuthenticator") { throw new Error("Has to be ApiKeyAuthenticator"); } - expect(formValues.global.authenticator.api_token).toEqual("{{ config.api_token }}"); + expect(formValues.global.authenticator.api_token).toEqual('{{ config["api_token"] }}'); }); it("manifest has an authenticator with a interpolated secret key of type config['config key']", async () => { @@ -178,48 +242,108 @@ describe("Conversion throws error when", () => { }, }), ], + spec: apiTokenSpec, }; const formValues = await convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); if (formValues.global.authenticator.type !== "ApiKeyAuthenticator") { throw new Error("Has to be ApiKeyAuthenticator"); } - expect(formValues.global.authenticator.api_token).toEqual("{{ config['api_token'] }}"); + expect(formValues.global.authenticator.api_token).toEqual('{{ config["api_token"] }}'); }); - it("manifest has an OAuthAuthenticator with a refresh_request_body containing non-string values", async () => { - const convert = () => { + it("manifest has an authenticator with an interpolated key that doesn't match any spec key", async () => { + const convert = async () => { const manifest: ConnectorManifest = { ...baseManifest, - streams: [ - merge({}, stream1, { - retriever: { - requester: { - authenticator: { - type: "OAuthAuthenticator", - client_id: "{{ config['client_id'] }}", - client_secret: "{{ config['client_secret'] }}", - refresh_token: "{{ config['client_refresh_token'] }}", - refresh_request_body: { - key1: "val1", - key2: { - a: 1, - b: 2, - }, - }, - token_refresh_endpoint: "https://api.com/refresh_token", - grant_type: "client_credentials", - }, + streams: [merge({}, stream1, apiAuthRetriever)], + }; + return convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); + }; + await expect(convert).rejects.toThrow( + 'ApiKeyAuthenticator.api_token references spec key "api_token", which must appear in the spec' + ); + }); + + it("manifest has an authenticator with a required interpolated key that is not required in the spec", async () => { + const convert = async () => { + const manifest: ConnectorManifest = { + ...baseManifest, + streams: [merge({}, stream1, apiAuthRetriever)], + spec: { + type: "Spec", + connection_specification: { + type: "object", + properties: { + api_token: { + type: "string", + title: "API Token", + airbyte_secret: true, }, }, - }), - ], + }, + }, }; return convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); }; - await expect(convert).rejects.toThrow("OAuthAuthenticator contains a refresh_request_body with non-string values"); + await expect(convert).rejects.toThrow( + 'ApiKeyAuthenticator.api_token references spec key "api_token", which must be required in the spec' + ); }); - it("manifest has an OAuthAuthenticator with non-standard access token or token expiry date config path", async () => { + it("manifest has an authenticator with an interpolated key that is not type string in the spec", async () => { + const convert = async () => { + const manifest: ConnectorManifest = { + ...baseManifest, + streams: [merge({}, stream1, apiAuthRetriever)], + spec: { + type: "Spec", + connection_specification: { + type: "object", + required: ["api_token"], + properties: { + api_token: { + type: "integer", + title: "API Token", + airbyte_secret: true, + }, + }, + }, + }, + }; + return convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); + }; + await expect(convert).rejects.toThrow( + 'ApiKeyAuthenticator.api_token references spec key "api_token", which must be of type string' + ); + }); + + it("manifest has an authenticator with an interpolated secret key that is not secret in the spec", async () => { + const convert = async () => { + const manifest: ConnectorManifest = { + ...baseManifest, + streams: [merge({}, stream1, apiAuthRetriever)], + spec: { + type: "Spec", + connection_specification: { + type: "object", + required: ["api_token"], + properties: { + api_token: { + type: "string", + title: "API Token", + }, + }, + }, + }, + }; + return convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); + }; + await expect(convert).rejects.toThrow( + 'ApiKeyAuthenticator.api_token references spec key "api_token", which must have airbyte_secret set to true' + ); + }); + + it("manifest has an OAuthAuthenticator with a refresh_request_body containing non-string values", async () => { const convert = () => { const manifest: ConnectorManifest = { ...baseManifest, @@ -232,13 +356,15 @@ describe("Conversion throws error when", () => { client_id: "{{ config['client_id'] }}", client_secret: "{{ config['client_secret'] }}", refresh_token: "{{ config['client_refresh_token'] }}", + refresh_request_body: { + key1: "val1", + key2: { + a: 1, + b: 2, + }, + }, token_refresh_endpoint: "https://api.com/refresh_token", grant_type: "client_credentials", - refresh_token_updater: { - access_token_config_path: ["credentials", "access_token"], - refresh_token_config_path: ["client_refresh_token"], - token_expiry_date_config_path: ["oauth_token_expiry_date"], - }, }, }, }, @@ -247,9 +373,7 @@ describe("Conversion throws error when", () => { }; return convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); }; - await expect(convert).rejects.toThrow( - "OAuthAuthenticator access token config path needs to be [oauth_access_token]" - ); + await expect(convert).rejects.toThrow("OAuthAuthenticator contains a refresh_request_body with non-string values"); }); it("manifest has a SessionTokenAuthenticator with an unsupported login_requester authenticator type", async () => { @@ -356,11 +480,11 @@ describe("Conversion successfully results in", () => { }, }; const formValues = await convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); - expect(formValues.inferredInputOverrides).toEqual({}); expect(formValues.inputs).toEqual([ { key: "api_key", required: true, + isLocked: false, definition: manifest.spec?.connection_specification.properties.api_key, }, ]); @@ -389,12 +513,13 @@ describe("Conversion successfully results in", () => { { key: "api_key", required: false, + isLocked: false, definition: manifest.spec?.connection_specification.properties.api_key, }, ]); }); - it("spec properties converted to input overrides on matching auth keys", async () => { + it("spec properties converted to locked inputs on matching auth keys", async () => { const manifest: ConnectorManifest = { ...baseManifest, streams: [ @@ -432,15 +557,19 @@ describe("Conversion successfully results in", () => { }; const formValues = await convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); expect(formValues.inputs).toEqual([ + { + key: "api_key", + required: true, + isLocked: true, + definition: manifest.spec?.connection_specification.properties.api_key, + }, { key: "numeric_key", required: false, + isLocked: false, definition: manifest.spec?.connection_specification.properties.numeric_key, }, ]); - expect(formValues.inferredInputOverrides).toEqual({ - api_key: manifest.spec?.connection_specification.properties.api_key, - }); }); it("request options converted to key-value list", async () => { @@ -717,13 +846,14 @@ describe("Conversion successfully results in", () => { }, }), ], + spec: oauthSpec, }; const formValues = await convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); expect(formValues.global.authenticator).toEqual({ type: "OAuthAuthenticator", - client_id: "{{ config['client_id'] }}", - client_secret: "{{ config['client_secret'] }}", - refresh_token: "{{ config['client_refresh_token'] }}", + client_id: '{{ config["client_id"] }}', + client_secret: '{{ config["client_secret"] }}', + refresh_token: '{{ config["client_refresh_token"] }}', refresh_request_body: [ ["key1", "val1"], ["key2", "val2"], @@ -758,21 +888,21 @@ describe("Conversion successfully results in", () => { }, }), ], + spec: oauthSpec, }; const formValues = await convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); expect(formValues.global.authenticator).toEqual({ type: "OAuthAuthenticator", - client_id: "{{ config['client_id'] }}", - client_secret: "{{ config['client_secret'] }}", - refresh_token: "{{ config['client_refresh_token'] }}", + client_id: '{{ config["client_id"] }}', + client_secret: '{{ config["client_secret"] }}', + refresh_token: '{{ config["client_refresh_token"] }}', refresh_request_body: [], token_refresh_endpoint: "https://api.com/refresh_token", grant_type: "refresh_token", refresh_token_updater: { refresh_token_name: "refresh_token", - access_token_config_path: ["oauth_access_token"], - refresh_token_config_path: ["client_refresh_token"], - token_expiry_date_config_path: ["oauth_token_expiry_date"], + access_token: '{{ config["oauth_access_token"] }}', + token_expiry_date: '{{ config["oauth_token_expiry_date"] }}', }, }); }); diff --git a/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.ts b/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.ts index 47ebf6c991e..ed1c34db933 100644 --- a/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.ts +++ b/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.ts @@ -1,4 +1,4 @@ -import { useBuilderResolveManifestQuery } from "core/api"; +import { HttpError, useBuilderResolveManifestQuery } from "core/api"; import { ResolveManifest } from "core/api/types/ConnectorBuilderClient"; import { ConnectorManifest } from "core/api/types/ConnectorManifest"; @@ -20,7 +20,7 @@ export const convertToBuilderFormValues = async ( try { resolveResult = await resolve(manifest, projectId); } catch (e) { - let errorMessage = e.message; + let errorMessage = e instanceof HttpError ? e.response.message : e.message; if (errorMessage[0] === '"') { errorMessage = errorMessage.substring(1, errorMessage.length); } diff --git a/airbyte-webapp/src/components/ui/Button/Button.module.scss b/airbyte-webapp/src/components/ui/Button/Button.module.scss index 3c33fe36721..03dc88f8e28 100644 --- a/airbyte-webapp/src/components/ui/Button/Button.module.scss +++ b/airbyte-webapp/src/components/ui/Button/Button.module.scss @@ -37,7 +37,7 @@ &:disabled:not(.isLoading), &.disabled:not(.isLoading) { - opacity: 0.25; + opacity: 0.5; } .buttonIcon { @@ -262,5 +262,6 @@ border: none; text-decoration: underline; box-shadow: none; + height: unset; } } diff --git a/airbyte-webapp/src/components/ui/Card/Card.module.scss b/airbyte-webapp/src/components/ui/Card/Card.module.scss index 6a356d37c0d..970cd679ce4 100644 --- a/airbyte-webapp/src/components/ui/Card/Card.module.scss +++ b/airbyte-webapp/src/components/ui/Card/Card.module.scss @@ -8,12 +8,15 @@ $default-padding: variables.$spacing-xl; .cardHeader { display: flex; justify-content: space-between; - align-items: center; color: colors.$dark-blue; padding: $default-padding $default-padding 0 $default-padding; border-top-left-radius: $default-border-radius; border-top-right-radius: $default-border-radius; + .helpText { + color: colors.$grey-400; + } + &.withBorderBottom { border-bottom: colors.$grey-100 variables.$border-thin solid; padding-bottom: $default-padding; @@ -37,10 +40,6 @@ $default-padding: variables.$spacing-xl; background: colors.$foreground; border-radius: variables.$border-radius-lg; - .infoTooltip { - color: colors.$foreground; - } - &:has(.cardBody:empty) .cardHeader { // apply the bottom border to the header only if it has a rendered sibling (the card content) // checking for `children` in tsx isn't enough as the passed child can return null @@ -75,3 +74,7 @@ $default-padding: variables.$spacing-xl; .noPadding { padding: 0; } + +.infoTooltip { + color: colors.$foreground; +} diff --git a/airbyte-webapp/src/components/ui/Card/Card.stories.tsx b/airbyte-webapp/src/components/ui/Card/Card.stories.tsx index aa72598afed..f2299570c65 100644 --- a/airbyte-webapp/src/components/ui/Card/Card.stories.tsx +++ b/airbyte-webapp/src/components/ui/Card/Card.stories.tsx @@ -65,3 +65,17 @@ CollapsibleWithPreviewInfo.args = {
    ), }; + +export const CardWithHelpText = Template.bind({}); +CardWithHelpText.args = { + title: "Title", + children: "Card content here", + helpText: "This is helpful text", +}; + +export const CardWithHelpDescription = Template.bind({}); +CardWithHelpDescription.args = { + title: "Title", + children: "Card content here", + description: "This is descriptive text", +}; diff --git a/airbyte-webapp/src/components/ui/Card/Card.tsx b/airbyte-webapp/src/components/ui/Card/Card.tsx index 2f9392188ac..5f7c137e185 100644 --- a/airbyte-webapp/src/components/ui/Card/Card.tsx +++ b/airbyte-webapp/src/components/ui/Card/Card.tsx @@ -16,6 +16,7 @@ interface CardProps { * The title of the card */ title?: string; + helpText?: string; description?: React.ReactNode; /** * override card container styles @@ -46,6 +47,7 @@ interface CardProps { export const Card: React.FC> = ({ children, title, + helpText, description, className, bodyClassName, @@ -61,7 +63,9 @@ export const Card: React.FC> = ({ const [isCollapsed, toggleIsCollapsed] = useToggle(defaultCollapsedState); const headerTitle = ( -
    > = ({ )} )} -
    + {helpText && ( + + {helpText} + + )} +
    ); return ( diff --git a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss index d1459bacd05..c82f709d1bd 100644 --- a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss +++ b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss @@ -75,7 +75,10 @@ $icon-width: 18px; .body { width: 100%; - padding-left: calc($icon-width + variables.$spacing-sm); + + &:not(&--noPadding) { + padding-left: calc($icon-width + variables.$spacing-sm); + } > div:last-child { margin-bottom: 0; diff --git a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx index 62837845cc9..fca9d24ee00 100644 --- a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx +++ b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx @@ -18,6 +18,7 @@ interface CollapsibleProps { hideWhenEmpty?: boolean; "data-testid"?: string; initiallyOpen?: boolean; + noBodyPadding?: boolean; onClick?: (newOpenState: boolean) => void; } @@ -31,6 +32,7 @@ export const Collapsible: React.FC> = children, "data-testid": dataTestId, initiallyOpen = false, + noBodyPadding = false, onClick, }) => { const childrenCount = React.Children.count(children); @@ -70,7 +72,10 @@ export const Collapsible: React.FC> = {showErrorIndicator && } - + {children} diff --git a/airbyte-webapp/src/components/ui/CopyButton/CopyButton.module.scss b/airbyte-webapp/src/components/ui/CopyButton/CopyButton.module.scss index e9f8c5120e0..6393937c7d0 100644 --- a/airbyte-webapp/src/components/ui/CopyButton/CopyButton.module.scss +++ b/airbyte-webapp/src/components/ui/CopyButton/CopyButton.module.scss @@ -3,18 +3,15 @@ .button { padding: variables.$spacing-lg; -} - -.iconContainer { - position: absolute; + position: relative; } .success { position: absolute; width: 13px; height: 13px; - top: -20px; - right: -15px; + top: -6px; + right: -6px; background-color: colors.$foreground; border-radius: 50%; } diff --git a/airbyte-webapp/src/components/ui/CopyButton/CopyButton.tsx b/airbyte-webapp/src/components/ui/CopyButton/CopyButton.tsx index 51a3d2cb0ce..84251915974 100644 --- a/airbyte-webapp/src/components/ui/CopyButton/CopyButton.tsx +++ b/airbyte-webapp/src/components/ui/CopyButton/CopyButton.tsx @@ -8,7 +8,7 @@ import { Icon } from "../Icon"; interface CopyButtonProps { className?: string; - content: string; + content: string | (() => string); title?: string; } @@ -28,7 +28,9 @@ export const CopyButton: React.FC> = ({ clearTimeout(timeoutRef.current); } - navigator.clipboard.writeText(content).then(() => { + const text = typeof content === "string" ? content : content(); + + navigator.clipboard.writeText(text).then(() => { setCopied(true); timeoutRef.current = setTimeout(() => setCopied(false), 2500); }); @@ -40,14 +42,11 @@ export const CopyButton: React.FC> = ({ className={classNames(className, styles.button)} variant="secondary" title={title || formatMessage({ id: "copyButton.title" })} - icon="copy" onClick={handleClick} + icon={children ? "copy" : undefined} > - {copied && ( -
    - -
    - )} + {copied && } + {children ? undefined : } {children} ); diff --git a/airbyte-webapp/src/components/ui/Heading/Heading.tsx b/airbyte-webapp/src/components/ui/Heading/Heading.tsx index 3eab36b3862..66418528802 100644 --- a/airbyte-webapp/src/components/ui/Heading/Heading.tsx +++ b/airbyte-webapp/src/components/ui/Heading/Heading.tsx @@ -1,5 +1,5 @@ import classNames from "classnames"; -import React from "react"; +import React, { HTMLAttributes } from "react"; import styles from "./Heading.module.scss"; @@ -7,14 +7,14 @@ type HeadingSize = "sm" | "md" | "lg" | "xl"; type HeadingColor = "darkBlue" | "blue"; type HeadingElementType = "h1" | "h2" | "h3" | "h4" | "h5" | "h6"; -interface HeadingProps { +type HeadingProps = HTMLAttributes & { className?: string; centered?: boolean; as: HeadingElementType; size?: HeadingSize; color?: HeadingColor; inverseColor?: boolean; -} +}; const sizes: Record = { sm: styles.sm, diff --git a/airbyte-webapp/src/components/ui/Link/Link.tsx b/airbyte-webapp/src/components/ui/Link/Link.tsx index acec03a48cb..ce077141f10 100644 --- a/airbyte-webapp/src/components/ui/Link/Link.tsx +++ b/airbyte-webapp/src/components/ui/Link/Link.tsx @@ -9,6 +9,7 @@ export interface LinkProps { opensInNewTab?: boolean; variant?: "default" | "primary"; onClick?: ComponentProps["onClick"]; + title?: string; } interface InternalLinkProps extends LinkProps { diff --git a/airbyte-webapp/src/components/ui/Message/Message.module.scss b/airbyte-webapp/src/components/ui/Message/Message.module.scss index f6d028a63c8..003b6c132a3 100644 --- a/airbyte-webapp/src/components/ui/Message/Message.module.scss +++ b/airbyte-webapp/src/components/ui/Message/Message.module.scss @@ -23,11 +23,6 @@ $message-icon-size: 22px; } .messageContainer { - display: flex; - flex-direction: row; - align-items: flex-start; - gap: variables.$spacing-xs; - box-sizing: border-box; padding: variables.$spacing-sm; border-radius: variables.$border-radius-md; @@ -37,21 +32,18 @@ $message-icon-size: 22px; @include type("error", colors.$red-300, colors.$red-50); } -.messageContainerWithChildren { - border-radius: variables.$border-radius-md variables.$border-radius-md 0 0; -} - @mixin children-type($name, $color, $background) { @include type($name, $color, $background); &.#{$name} { color: colors.$dark-blue-900; - border: 1px solid $background; + border: variables.$spacing-sm solid $background; + padding: variables.$spacing-sm; background: colors.$foreground; } } .childrenContainer { - border-radius: 0 0 variables.$border-radius-md variables.$border-radius-md; + border-radius: variables.$border-radius-md; font-size: variables.$font-size-lg; @include children-type("info", colors.$blue-400, colors.$blue-50); @@ -62,9 +54,6 @@ $message-icon-size: 22px; .iconContainer { padding: 4px; - display: flex; - align-items: center; - justify-content: center; } .messageIcon { @@ -94,8 +83,7 @@ $message-icon-size: 22px; text-align: left; } -.closeButton { - svg { - color: colors.$dark-blue-900; - } +.alignRightColumn { + align-self: stretch; // flex equivalent of `height: 100%` + max-height: calc(32px + 9px); // 32px for the button's height, allow up to 9px "padding" on the top } diff --git a/airbyte-webapp/src/components/ui/Message/Message.stories.tsx b/airbyte-webapp/src/components/ui/Message/Message.stories.tsx index 621cdfbdbfd..d02ecfc04f6 100644 --- a/airbyte-webapp/src/components/ui/Message/Message.stories.tsx +++ b/airbyte-webapp/src/components/ui/Message/Message.stories.tsx @@ -98,3 +98,18 @@ WithChildren.args = { ), }; + +export const WithExpandableChildren = Template.bind({}); +WithExpandableChildren.args = { + text: "This is an error with more details, but you have to expand to see them.", + secondaryText: "This is a secondary text", + type: "error", + children: ( + + Learn More + Stacktrace + Logs + + ), + isExpandable: true, +}; diff --git a/airbyte-webapp/src/components/ui/Message/Message.tsx b/airbyte-webapp/src/components/ui/Message/Message.tsx index ba1cfb4a349..1daef281998 100644 --- a/airbyte-webapp/src/components/ui/Message/Message.tsx +++ b/airbyte-webapp/src/components/ui/Message/Message.tsx @@ -1,11 +1,12 @@ import classNames from "classnames"; -import React from "react"; +import React, { useState } from "react"; import { Icon, IconType } from "components/ui/Icon"; import { Text } from "components/ui/Text"; import styles from "./Message.module.scss"; import { Button, ButtonProps } from "../Button"; +import { FlexContainer } from "../Flex"; export type MessageType = "warning" | "success" | "error" | "info"; @@ -23,6 +24,7 @@ export interface MessageProps { hideIcon?: boolean; iconOverride?: keyof typeof ICON_MAPPING; textClassName?: string; + isExpandable?: boolean; } const ICON_MAPPING: Readonly> = { @@ -65,14 +67,18 @@ export const Message: React.FC> = ({ children, iconOverride, textClassName, + isExpandable = false, }) => { + const [isExpanded, setIsExpanded] = useState(false); + + const handleToggleExpand = () => { + setIsExpanded((isExpanded) => !isExpanded); + }; + + const isRenderingChildren = children && (!isExpandable || isExpanded); + const mainMessage = ( -
    + <> {!hideIcon && (
    @@ -86,31 +92,44 @@ export const Message: React.FC> = ({ )}
    - {onAction && ( - + {(onAction || isExpandable || onClose) && ( + + {onAction && ( + + )} + {isExpandable && ( + + )} + {onClose && ( +
    + ); - if (!children) { - return mainMessage; - } - return ( -
    - {mainMessage} -
    {children}
    -
    + + + {mainMessage} + + {isRenderingChildren && ( +
    {children}
    + )} +
    ); }; diff --git a/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.module.scss b/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.module.scss new file mode 100644 index 00000000000..3b226d01ebf --- /dev/null +++ b/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.module.scss @@ -0,0 +1,90 @@ +@use "scss/colors"; +@use "scss/variables"; + +$border-radius: 999px; +$button-width: 100px; +$button-height: 24px; +$knob-width: calc($button-height - variables.$border-thin * 2); + +@keyframes candystripe { + to { + background-position: 60px 0; + } +} + +.button { + display: flex; + flex-direction: column; + align-items: flex-start; + padding: 0; + width: $button-width; + height: $button-height; + border-radius: $border-radius; + border: variables.$border-thin solid colors.$grey-200; + background-color: colors.$grey-100; + overflow: hidden; + position: relative; + cursor: pointer; + + &.checked { + background-color: colors.$blue; + align-items: flex-end; + } + + &:disabled { + opacity: 0.7; + cursor: not-allowed; + } + + .stripe { + height: 100%; + width: 100%; + position: absolute; + top: 0; + left: 0; + + &.loading { + background-image: linear-gradient(-65deg, + transparent 25%, + colors.$blue-200 25%, + colors.$blue-200 50%, + transparent 50%, + transparent 75%, + colors.$blue-200 75%, + colors.$blue-200 100% + ); + background-size: 60px 80px; + background-repeat: repeat-x; + animation: candystripe 1s linear infinite; + + &.reverse { + animation-direction: reverse; + } + } + } + + .text { + text-transform: uppercase; + position: absolute; + text-align: center; + top: 50%; + left: 50%; + transform: translate(-35%, -50%); + font-size: variables.$font-size-sm; + font-weight: 500; + color: colors.$grey-500; + + &.checkedText { + color: colors.$white; + transform: translate(-70%, -50%); + } + } + + .knob { + border-radius: $border-radius; + width: $knob-width; + background: colors.$white; + flex: 1; + z-index: 1; + } +} \ No newline at end of file diff --git a/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.stories.tsx b/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.stories.tsx new file mode 100644 index 00000000000..648f4900618 --- /dev/null +++ b/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.stories.tsx @@ -0,0 +1,32 @@ +import { action } from "@storybook/addon-actions"; +import { Meta, StoryFn } from "@storybook/react"; +import { useState } from "react"; + +import { SwitchNext, SwitchNextProps } from "./SwitchNext"; + +export default { + title: "Ui/SwitchNext", + component: SwitchNext, + argTypes: { + checked: { control: "boolean" }, + }, +} as Meta; + +const SwitchNextWithState: StoryFn = ({ checked: initial = false, ...props }: SwitchNextProps) => { + const [checked, setChecked] = useState(initial); + const [loading, setLoading] = useState(false); + + const handleChange = (checked: boolean) => { + action("Switch toggled")(checked); + setLoading(true); + setTimeout(() => { + setChecked(checked); + setLoading(false); + }, 1500); + }; + + return ; +}; + +export const Primary = SwitchNextWithState.bind({}); +Primary.args = {}; diff --git a/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.tsx b/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.tsx new file mode 100644 index 00000000000..9720eb4d2e1 --- /dev/null +++ b/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.tsx @@ -0,0 +1,65 @@ +import { Switch } from "@headlessui/react"; +import classNames from "classnames"; +import { motion } from "framer-motion"; +import React from "react"; +import { useIntl } from "react-intl"; + +import styles from "./SwitchNext.module.scss"; +import { Text } from "../Text"; + +export interface SwitchNextProps { + checked: boolean; + disabled?: boolean; + loading?: boolean; + onChange: (checked: boolean) => void; + name?: string; + checkedText?: string; + uncheckedText?: string; + className?: string; + testId?: string; +} + +export const SwitchNext: React.FC = (props) => { + const { formatMessage } = useIntl(); + + const { + name, + checked, + disabled, + loading, + onChange, + checkedText = formatMessage({ id: "ui.switch.enabled" }), + uncheckedText = formatMessage({ id: "ui.switch.disabled" }), + testId, + className, + } = props; + + return ( + + + + + {checked ? checkedText : uncheckedText} + + + ); +}; diff --git a/airbyte-webapp/src/components/ui/SwitchNext/index.tsx b/airbyte-webapp/src/components/ui/SwitchNext/index.tsx new file mode 100644 index 00000000000..091cf7825c4 --- /dev/null +++ b/airbyte-webapp/src/components/ui/SwitchNext/index.tsx @@ -0,0 +1 @@ +export { SwitchNext } from "./SwitchNext"; diff --git a/airbyte-webapp/src/components/ui/Text/Text.module.scss b/airbyte-webapp/src/components/ui/Text/Text.module.scss index b02dad5f70c..1ce407ee5f8 100644 --- a/airbyte-webapp/src/components/ui/Text/Text.module.scss +++ b/airbyte-webapp/src/components/ui/Text/Text.module.scss @@ -85,6 +85,14 @@ color: colors.$red-200; } +.red400 { + color: colors.$red-400; +} + +.yellow600 { + color: colors.$yellow-600; +} + .bold, .text > strong { font-weight: 600; diff --git a/airbyte-webapp/src/components/ui/Text/Text.tsx b/airbyte-webapp/src/components/ui/Text/Text.tsx index 1e3606eda42..fc3a20efe56 100644 --- a/airbyte-webapp/src/components/ui/Text/Text.tsx +++ b/airbyte-webapp/src/components/ui/Text/Text.tsx @@ -12,10 +12,12 @@ type TextColor = | "green600" | "red" | "red200" + | "red400" | "grey600" | "grey400" | "grey500" - | "blue"; + | "blue" + | "yellow600"; type TextElementType = "p" | "span" | "div"; type TextHTMLElement = HTMLParagraphElement | HTMLSpanElement | HTMLDivElement; @@ -50,10 +52,12 @@ const colors: Record = { grey300: styles.grey300, red: styles.red, red200: styles.red200, + red400: styles.red400, blue: styles.blue, grey400: styles.grey400, grey500: styles.grey500, grey600: styles.grey600, + yellow600: styles.yellow600, }; const textAlignments: Record = { diff --git a/airbyte-webapp/src/core/api/QueryProvider.tsx b/airbyte-webapp/src/core/api/QueryProvider.tsx index 0d733779173..f2fedecb4ea 100644 --- a/airbyte-webapp/src/core/api/QueryProvider.tsx +++ b/airbyte-webapp/src/core/api/QueryProvider.tsx @@ -2,14 +2,31 @@ import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; import { ReactQueryDevtools } from "@tanstack/react-query-devtools"; import React from "react"; +import { HttpError } from "./errors"; import styles from "./QueryProvider.module.scss"; +const RETRY_COUNT = 3; + const queryClient = new QueryClient({ defaultOptions: { queries: { refetchOnWindowFocus: false, refetchOnReconnect: false, - retry: 0, + retry: (failureCount, error) => { + if ( + failureCount < RETRY_COUNT && + error instanceof HttpError && + (error.status === 502 || error.status === 503) + ) { + console.log( + `🔁 Retrying request to ${error.request.url} due to temporarily unavailable server (HTTP ${ + error.status + }). Retry ${failureCount + 1}/${RETRY_COUNT}` + ); + return true; + } + return false; + }, }, }, }); diff --git a/airbyte-webapp/src/core/api/apiCall.ts b/airbyte-webapp/src/core/api/apiCall.ts index 3210bd7c76a..1da8245a1ec 100644 --- a/airbyte-webapp/src/core/api/apiCall.ts +++ b/airbyte-webapp/src/core/api/apiCall.ts @@ -1,7 +1,6 @@ import { trackError } from "core/utils/datadog"; -import { shortUuid } from "core/utils/uuid"; -import { CommonRequestError } from "./errors/CommonRequestError"; +import { HttpError } from "./errors/HttpError"; export interface ApiCallOptions { getAccessToken: () => Promise; @@ -29,10 +28,11 @@ function getRequestBody(data: U) { } export const fetchApiCall = async ( - { url, method, params, data, headers, responseType }: RequestOptions, + request: RequestOptions, options: ApiCallOptions, apiUrl: string ): Promise => { + const { url, method, params, data, headers, responseType } = request; // Remove the `v1/` in the end of the apiUrl for now, during the transition period // to get rid of it from all environment variables. const requestUrl = `${apiUrl.replace(/\/v1\/?$/, "")}${url.startsWith("/") ? "" : "/"}${url}`; @@ -58,11 +58,16 @@ export const fetchApiCall = async ( signal: options.signal, }); - return parseResponse(response, requestUrl, responseType); + return parseResponse(response, request, requestUrl, responseType); }; /** Parses response from server */ -async function parseResponse(response: Response, requestUrl: string, responseType?: "blob"): Promise { +async function parseResponse( + response: Response, + request: RequestOptions, + requestUrl: string, + responseType?: "blob" +): Promise { if (response.status === 204) { return {} as T; } @@ -79,34 +84,18 @@ async function parseResponse(response: Response, requestUrl: string, response : response.json(); } - if (response.headers.get("content-type") === "application/json") { - throw new CommonRequestError(response, await response.json()); - } - - let responseText: string | undefined; - - // Try to load the response body as text, since it wasn't JSON + let responsePayload: unknown; try { - responseText = await response.text(); + responsePayload = + response.headers.get("content-type") === "application/json" ? await response.json() : await response.text(); } catch (e) { - responseText = ""; + responsePayload = ""; } - const requestId = shortUuid(); - - const error = new CommonRequestError(response, { - message: `${ - response.status === 502 || response.status === 503 ? "Server temporarily unavailable" : "Unknown error" - } (http.${response.status}.${requestId})`, - }); - - trackError(error, { - httpStatus: response.status, - httpUrl: requestUrl, - httpBody: responseText, - requestId, - }); - console.error(`${requestUrl}: ${responseText} (http.${response.status}.${requestId})`); - + // Create a HttpError for the request/response. Replace the request url with the full url we called. + const error = new HttpError({ ...request, url: requestUrl }, response.status, responsePayload); + // Track HttpErrors here (instead of the error boundary), so we report all of them, + // even the ones that will handled by our application via e.g. toast notification. + trackError(error, { ...error }); throw error; } diff --git a/airbyte-webapp/src/core/api/errors/CommonRequestError.ts b/airbyte-webapp/src/core/api/errors/CommonRequestError.ts deleted file mode 100644 index 83ffbbcbdae..00000000000 --- a/airbyte-webapp/src/core/api/errors/CommonRequestError.ts +++ /dev/null @@ -1,38 +0,0 @@ -type ErrorDetails = ErrorWithMessage | ErrorWithDetail; -interface ErrorWithMessage { - // Why `string | undefined` instead of an optional field? We may not always be able to - // dynamically find our intended message, but we should still statically verify that - // we're at least *attempting* to set either a message or a detail object by requiring - // one of the property names to exist. - message: string | undefined; - detail?: unknown; -} -interface ErrorWithDetail { - detail: unknown; - message?: string; -} - -export class CommonRequestError extends Error { - __type = "common.error"; - // TODO: Add better error hierarchy - _status?: number; - payload?: ErrorPayload; - - constructor( - protected response: Response | undefined, - payload?: ErrorPayload - ) { - super(response?.statusText); - this.response = response; - this.message = payload?.message ?? JSON.stringify(payload?.detail) ?? "common.error"; - this.payload = payload; - } - - get status() { - return this._status || this.response?.status; - } -} - -export function isCommonRequestError(error: { __type?: string }): error is CommonRequestError { - return error.__type === "common.error"; -} diff --git a/airbyte-webapp/src/core/api/errors/HttpError.ts b/airbyte-webapp/src/core/api/errors/HttpError.ts new file mode 100644 index 00000000000..ed918e4d49f --- /dev/null +++ b/airbyte-webapp/src/core/api/errors/HttpError.ts @@ -0,0 +1,62 @@ +import { shortUuid } from "core/utils/uuid"; + +// Need to explicitally import from the file instead of core/errors to avoid circular dependencies +import { I18nError } from "../../errors/I18nError"; +import { RequestOptions } from "../apiCall"; + +const defaultHttpMessage = (status: number) => { + switch (status) { + case 400: + return "errors.http.badRequest"; + case 401: + return "errors.http.unauthorized"; + case 403: + return "errors.http.forbidden"; + case 404: + return "errors.http.notFound"; + case 410: + return "errors.http.gone"; + case 418: + return "errors.http.teapot"; + case 500: + return "errors.http.internalServerError"; + case 502: + return "errors.http.badGateway"; + case 503: + return "errors.http.serviceUnavailable"; + default: + return "errors.http.default"; + } +}; + +/** + * HttpError represents a non-okay (i.e. 4xx/5xx) response from the server to an API call made. + * It will contain information about the request, the HTTP status code, as well as the response payload. + */ +export class HttpError extends I18nError { + /** + * A uniquely generated request ID for this error that will also be present on the + * datadog error tracing of this error and can be used to find it. + */ + public readonly requestId = shortUuid(); + constructor( + /** + * Information about the request that was made. + */ + public readonly request: RequestOptions, + /** + * HTTP status code of the response. + */ + public readonly status: number, + /** + * The response payload from the server. This could be parsed JSON if the server + * returned it, or just a plain string e.g. in case of returned HTML. + * The generic type of this class can be used to type this parameter for cases + * the payload type is known. + */ + public readonly response: PayloadType + ) { + super(defaultHttpMessage(status), { status }); + this.name = "HttpError"; + } +} diff --git a/airbyte-webapp/src/core/api/errors/index.ts b/airbyte-webapp/src/core/api/errors/index.ts index b7693522111..6615ef7441e 100644 --- a/airbyte-webapp/src/core/api/errors/index.ts +++ b/airbyte-webapp/src/core/api/errors/index.ts @@ -1,2 +1,2 @@ -export * from "./CommonRequestError"; export * from "./ErrorWithJobInfo"; +export * from "./HttpError"; diff --git a/airbyte-webapp/src/core/api/hooks/applications.ts b/airbyte-webapp/src/core/api/hooks/applications.ts index c2195f291e2..a1fae53544f 100644 --- a/airbyte-webapp/src/core/api/hooks/applications.ts +++ b/airbyte-webapp/src/core/api/hooks/applications.ts @@ -43,8 +43,8 @@ export const useCreateApplication = () => { }, onError: () => { registerNotification({ - id: "settings.accessManagement.permissionCreate.error", - text: formatMessage({ id: "settings.accessManagement.permissionCreate.error" }), + id: "settings.application.create.error", + text: formatMessage({ id: "settings.application.create.error" }), type: "error", }); }, diff --git a/airbyte-webapp/src/core/api/hooks/connections.tsx b/airbyte-webapp/src/core/api/hooks/connections.tsx index 5b938166897..75842a6283e 100644 --- a/airbyte-webapp/src/core/api/hooks/connections.tsx +++ b/airbyte-webapp/src/core/api/hooks/connections.tsx @@ -21,6 +21,7 @@ import { getConnectionUptimeHistory, getState, getStateType, + refreshConnectionStream, resetConnection, resetConnectionStream, syncConnection, @@ -195,6 +196,20 @@ export const useResetConnectionStream = (connectionId: string) => { }); }; +export const useRefreshConnectionStreams = (connectionId: string) => { + const queryClient = useQueryClient(); + const requestOptions = useRequestOptions(); + const setConnectionRunState = useSetConnectionRunState(); + + return useMutation(async (streams?: ConnectionStream[]) => { + await refreshConnectionStream({ connectionId, streams }, requestOptions); + setConnectionRunState(connectionId, true); + queryClient.setQueriesData(jobsKeys.useListJobsForConnectionStatus(connectionId), (prevJobList) => + prependArtificialJobToStatus({ status: JobStatus.running, configType: "refresh" }, prevJobList) + ); + }); +}; + export const useGetConnectionQuery = () => { const requestOptions = useRequestOptions(); return useMutation((request: WebBackendConnectionRequestBody) => webBackendGetConnection(request, requestOptions)) @@ -362,7 +377,9 @@ export const useRemoveConnectionsFromList = (): ((connectionIds: string[]) => vo }; export const getConnectionListQueryKey = (connectorIds?: string[]) => { - return connectionsKeys.lists(connectorIds); + return !connectorIds?.length + ? [...connectionsKeys.lists(connectorIds), "empty"] + : connectionsKeys.lists(connectorIds); }; export const useConnectionListQuery = ( diff --git a/airbyte-webapp/src/core/api/hooks/connectorBuilderApi.ts b/airbyte-webapp/src/core/api/hooks/connectorBuilderApi.ts index df59792fc0d..5c6d15bff6d 100644 --- a/airbyte-webapp/src/core/api/hooks/connectorBuilderApi.ts +++ b/airbyte-webapp/src/core/api/hooks/connectorBuilderApi.ts @@ -3,7 +3,7 @@ import { useQuery } from "@tanstack/react-query"; import { DEFAULT_JSON_MANIFEST_VALUES, ManifestValuePerComponentPerStream } from "components/connectorBuilder/types"; import { useCurrentWorkspaceId } from "area/workspace/utils"; -import { CommonRequestError } from "core/api/errors"; +import { HttpError } from "core/api"; import { readStream, resolveManifest } from "../generated/ConnectorBuilderClient"; import { KnownExceptionInfo } from "../generated/ConnectorBuilderClient.schemas"; @@ -54,7 +54,7 @@ export const useBuilderResolvedManifest = ( ) => { const requestOptions = useRequestOptions(); - return useQuery>( + return useQuery>( manifestValuePerComponentPerStream === undefined ? connectorBuilderKeys.resolveYaml(params.manifest) : connectorBuilderKeys.resolveUi(manifestValuePerComponentPerStream), diff --git a/airbyte-webapp/src/core/api/hooks/connectorBuilderProject.ts b/airbyte-webapp/src/core/api/hooks/connectorBuilderProject.ts index 5161c3c1288..85b16047d4c 100644 --- a/airbyte-webapp/src/core/api/hooks/connectorBuilderProject.ts +++ b/airbyte-webapp/src/core/api/hooks/connectorBuilderProject.ts @@ -54,6 +54,7 @@ export interface BuilderProject { export interface BuilderProjectWithManifest { name: string; manifest?: DeclarativeComponentSchema; + yamlManifest?: string; } export const useListBuilderProjects = () => { @@ -216,9 +217,9 @@ export const useUpdateBuilderProject = (projectId: string) => { const workspaceId = useCurrentWorkspaceId(); return useMutation( - ({ name, manifest }) => + ({ name, manifest, yamlManifest }) => updateConnectorBuilderProject( - { workspaceId, builderProjectId: projectId, builderProject: { name, draftManifest: manifest } }, + { workspaceId, builderProjectId: projectId, builderProject: { name, draftManifest: manifest, yamlManifest } }, requestOptions ), { diff --git a/airbyte-webapp/src/core/api/hooks/permissions.ts b/airbyte-webapp/src/core/api/hooks/permissions.ts index 4ae846b7e05..828fef06b93 100644 --- a/airbyte-webapp/src/core/api/hooks/permissions.ts +++ b/airbyte-webapp/src/core/api/hooks/permissions.ts @@ -41,19 +41,17 @@ export const useUpdatePermissions = () => { const { formatMessage } = useIntl(); return useMutation( - (permission: PermissionUpdate): Promise => { + (permission: PermissionUpdate) => { return updatePermission(permission, requestOptions); }, { - onSuccess: (data: PermissionRead) => { + onSuccess: () => { registerNotification({ id: "settings.accessManagement.permissionUpdate.success", text: formatMessage({ id: "settings.accessManagement.permissionUpdate.success" }), type: "success", }); - if (data.organizationId) { - queryClient.invalidateQueries(organizationKeys.listUsers(data.organizationId)); - } + queryClient.invalidateQueries(organizationKeys.allListUsers); queryClient.invalidateQueries(workspaceKeys.allListAccessUsers); }, onError: () => { @@ -82,7 +80,7 @@ export const useCreatePermission = () => { onSuccess: (data: PermissionRead) => { registerNotification({ id: "settings.accessManagement.permissionCreate.success", - text: formatMessage({ id: "settings.accessManagement.permissionCreate.success" }), + text: formatMessage({ id: "userInvitations.create.success.directlyAdded" }), type: "success", }); if (data.organizationId) { @@ -93,7 +91,7 @@ export const useCreatePermission = () => { onError: () => { registerNotification({ id: "settings.accessManagement.permissionCreate.error", - text: formatMessage({ id: "settings.accessManagement.permissionCreate.error" }), + text: formatMessage({ id: "userInvitations.create.error" }), type: "error", }); }, diff --git a/airbyte-webapp/src/core/api/hooks/userInvitations.tsx b/airbyte-webapp/src/core/api/hooks/userInvitations.tsx index ca78728aef5..0d362304f40 100644 --- a/airbyte-webapp/src/core/api/hooks/userInvitations.tsx +++ b/airbyte-webapp/src/core/api/hooks/userInvitations.tsx @@ -79,16 +79,24 @@ export const useCreateUserInvitation = () => { return useMutation(async (invitationCreate: UserInvitationCreateRequestBody) => createUserInvitation(invitationCreate, requestOptions) .then((response) => { + if (response.directlyAdded === true) { + registerNotification({ + type: "success", + text: formatMessage({ id: "userInvitations.create.success.directlyAdded" }), + id: "userInvitations.create.success.directlyAdded", + }); + queryClient.invalidateQueries(workspaceKeys.allListAccessUsers); + + return response; + } registerNotification({ type: "success", text: formatMessage({ id: "userInvitations.create.success" }), id: "userInvitations.create.success", }); const keyScope = invitationCreate.scopeType === "workspace" ? SCOPE_WORKSPACE : SCOPE_ORGANIZATION; - - // this endpoint will direct add users who are already within the org, so we want to invalidate both the invitations and the members lists - queryClient.invalidateQueries(workspaceKeys.allListAccessUsers); queryClient.invalidateQueries([keyScope, "userInvitations"]); + return response; }) .catch((err) => { @@ -127,7 +135,7 @@ export const useCancelUserInvitation = () => { return useMutation(async (inviteCodeRequestBody: InviteCodeRequestBody) => cancelUserInvitation(inviteCodeRequestBody, requestOptions) - .then((res) => { + .then((response) => { registerNotification({ type: "success", text: formatMessage({ id: "userInvitations.cancel.success" }), @@ -135,9 +143,10 @@ export const useCancelUserInvitation = () => { }); queryClient.invalidateQueries([ - res.scopeType === "organization" ? SCOPE_ORGANIZATION : SCOPE_WORKSPACE, + response.scopeType === "organization" ? SCOPE_ORGANIZATION : SCOPE_WORKSPACE, "userInvitations", ]); + return response; }) .catch(() => { registerNotification({ diff --git a/airbyte-webapp/src/core/errors/I18nError.ts b/airbyte-webapp/src/core/errors/I18nError.ts new file mode 100644 index 00000000000..21725bf6ec3 --- /dev/null +++ b/airbyte-webapp/src/core/errors/I18nError.ts @@ -0,0 +1,23 @@ +import type React from "react"; +import type { useIntl } from "react-intl"; + +type FormatMessageFn = ReturnType["formatMessage"]; + +/** + * An error that can be thrown or extended to have an i18n message been rendered in the error view. + * By default the error view will show the error message as is. For I18nError, the error view will + * translate the message/i18n key with the specified i18nParams via react-intl. + */ +export class I18nError extends Error { + constructor( + public readonly i18nKey: string, + public readonly i18nParams?: Parameters[1] + ) { + super(i18nKey); + this.name = "I18nError"; + } + + translate(formatMessage: ReturnType["formatMessage"]): React.ReactNode { + return formatMessage({ id: this.i18nKey }, this.i18nParams); + } +} diff --git a/airbyte-webapp/src/components/common/ApiErrorBoundary/ApiErrorBoundary.test.tsx b/airbyte-webapp/src/core/errors/components/DefaultErrorBoundary.test.tsx similarity index 78% rename from airbyte-webapp/src/components/common/ApiErrorBoundary/ApiErrorBoundary.test.tsx rename to airbyte-webapp/src/core/errors/components/DefaultErrorBoundary.test.tsx index 3af1d0bf0a9..fd1cc9e2d3f 100644 --- a/airbyte-webapp/src/components/common/ApiErrorBoundary/ApiErrorBoundary.test.tsx +++ b/airbyte-webapp/src/core/errors/components/DefaultErrorBoundary.test.tsx @@ -5,7 +5,7 @@ import { mocked, render } from "test-utils"; import { trackError } from "core/utils/datadog"; import { AppMonitoringServiceProvider } from "hooks/services/AppMonitoringService"; -import { ApiErrorBoundary } from "./ApiErrorBoundary"; +import { DefaultErrorBoundary } from "./DefaultErrorBoundary"; const mockError = new Error("oh no!"); @@ -17,7 +17,7 @@ const ChildThatThrowsError = () => { throw mockError; }; -describe(`${ApiErrorBoundary.name}`, () => { +describe(`${DefaultErrorBoundary.name}`, () => { let originalConsoleDebug: typeof console.debug; let originalConsoleError: typeof console.error; @@ -36,9 +36,9 @@ describe(`${ApiErrorBoundary.name}`, () => { it("should render children when no error is thrown", async () => { await render( - +

    test

    -
    +
    ); @@ -48,13 +48,13 @@ describe(`${ApiErrorBoundary.name}`, () => { it("should render error view when an error is thrown", async () => { await render( - + - + ); - expect(screen.getByTestId("errorView")).toBeInTheDocument(); + expect(screen.getByTestId("errorDetails")).toBeInTheDocument(); }); it("should log the error when it throws", async () => { @@ -62,12 +62,12 @@ describe(`${ApiErrorBoundary.name}`, () => { mockTrackError.mockClear(); await render( - + - + ); expect(mockTrackError).toHaveBeenCalledTimes(1); - expect(mockTrackError).toHaveBeenCalledWith(mockError, expect.anything()); + expect(mockTrackError).toHaveBeenCalledWith(mockError); }); }); diff --git a/airbyte-webapp/src/core/errors/components/DefaultErrorBoundary.tsx b/airbyte-webapp/src/core/errors/components/DefaultErrorBoundary.tsx new file mode 100644 index 00000000000..f1d8a4d214a --- /dev/null +++ b/airbyte-webapp/src/core/errors/components/DefaultErrorBoundary.tsx @@ -0,0 +1,71 @@ +import React from "react"; +import { NavigateFunction, useNavigate } from "react-router-dom"; +import { useLocation } from "react-use"; +import { LocationSensorState } from "react-use/lib/useLocation"; + +import { HttpError } from "core/api"; +import { trackError } from "core/utils/datadog"; +import { TrackErrorFn } from "hooks/services/AppMonitoringService"; + +import { ErrorDetails } from "./ErrorDetails"; + +interface ErrorBoundaryState { + error?: Error; + message?: string; +} + +interface ErrorBoundaryHookProps { + location: LocationSensorState; + navigate: NavigateFunction; + trackError: TrackErrorFn; +} + +class WrappedError extends Error { + constructor(public readonly cause: unknown) { + const message = typeof cause === "string" ? cause : "Non Error object thrown"; + super(message); + } +} + +class ErrorBoundaryComponent extends React.Component< + React.PropsWithChildren, + ErrorBoundaryState +> { + state: ErrorBoundaryState = {}; + + static getDerivedStateFromError(error: unknown): ErrorBoundaryState { + return { error: error instanceof Error ? error : new WrappedError(error) }; + } + + override componentDidUpdate(prevProps: ErrorBoundaryHookProps) { + // Clear out the error in case the user navigates to another part of the app + if (this.props.location !== prevProps.location) { + this.setState({ error: undefined }); + } + } + + override componentDidCatch(error: Error) { + if (!(error instanceof HttpError)) { + // Only track non HttpErrors here, since we already track HttpErrors in the apiCall + // method, so that we catch them also in case they aren't handled by an error boundary, + // but e.g. just will result in a toast notification + this.props.trackError(error); + } + } + + override render(): React.ReactNode { + const { error } = this.state; + return error ? : this.props.children; + } +} + +export const DefaultErrorBoundary: React.FC = ({ children }) => { + const location = useLocation(); + const navigate = useNavigate(); + + return ( + + {children} + + ); +}; diff --git a/airbyte-webapp/src/core/errors/components/ErrorDetails.module.scss b/airbyte-webapp/src/core/errors/components/ErrorDetails.module.scss new file mode 100644 index 00000000000..ad35233b509 --- /dev/null +++ b/airbyte-webapp/src/core/errors/components/ErrorDetails.module.scss @@ -0,0 +1,33 @@ +@use "scss/variables"; + +.error { + flex: 1; + height: 100%; + display: flex; + justify-content: center; + flex-direction: column; + align-items: center; +} + +.error__octavia { + image-rendering: pixelated; + width: 29px; + height: fit-content; + flex-shrink: 0; +} + +.error__card { + width: 100%; + max-width: variables.$width-modal-md; + overflow: auto; +} + +.error__cardBody { + display: flex; + flex-direction: column; + gap: variables.$spacing-md; +} + +.error__collapsible { + margin-bottom: 0 !important; +} diff --git a/airbyte-webapp/src/core/errors/components/ErrorDetails.tsx b/airbyte-webapp/src/core/errors/components/ErrorDetails.tsx new file mode 100644 index 00000000000..89557615c1c --- /dev/null +++ b/airbyte-webapp/src/core/errors/components/ErrorDetails.tsx @@ -0,0 +1,104 @@ +import { useCallback, useMemo } from "react"; +import { FormattedMessage, useIntl } from "react-intl"; + +import Logs from "components/Logs"; +import { Box } from "components/ui/Box"; +import { Button } from "components/ui/Button"; +import { Card } from "components/ui/Card"; +import { Collapsible } from "components/ui/Collapsible"; +import { CopyButton } from "components/ui/CopyButton"; +import { FlexContainer, FlexItem } from "components/ui/Flex"; +import { Separator } from "components/ui/Separator"; +import { Text } from "components/ui/Text"; + +import { useGetAllExperiments } from "hooks/services/Experiment"; + +import styles from "./ErrorDetails.module.scss"; +import octavia from "./pixel-octavia.png"; +import { I18nError } from "../I18nError"; + +type FullStoryGlobal = (method: "getSession", options: { format: "url.now" }) => string; + +interface ErrorDetailsProps { + error: Error; +} + +const jsonReplacer = (_: string, value: unknown) => (typeof value === "function" ? `[Function ${value.name}]` : value); + +export const ErrorDetails: React.FC = ({ error }) => { + const { formatMessage } = useIntl(); + const getAllExperiments = useGetAllExperiments(); + const getErrorDetails = useCallback( + () => + JSON.stringify( + { + url: window.location.href, + airbyteVersion: process.env.REACT_APP_VERSION, + errorType: error.name, + errorConstructor: error.constructor.name, + error, + stacktrace: error.stack, + userAgent: navigator.userAgent, + // If fullstory is loaded add the current session recording link + fullStory: (window as { FS?: FullStoryGlobal }).FS?.("getSession", { format: "url.now" }), + featureFlags: getAllExperiments(), + }, + jsonReplacer, + 2 + ), + [error, getAllExperiments] + ); + + const details = useMemo(() => [`// ${error.name}`, ...JSON.stringify(error, jsonReplacer, 2).split("\n")], [error]); + + return ( +
    + + + + + + + + + + + {error instanceof I18nError ? error.translate(formatMessage) : error.message} + + + + + + + + {error.stack && ( + + + + )} + + + + + + + + + +
    + ); +}; diff --git a/airbyte-webapp/src/core/errors/components/index.ts b/airbyte-webapp/src/core/errors/components/index.ts new file mode 100644 index 00000000000..c0b3e8e4c4b --- /dev/null +++ b/airbyte-webapp/src/core/errors/components/index.ts @@ -0,0 +1 @@ +export { DefaultErrorBoundary } from "./DefaultErrorBoundary"; diff --git a/airbyte-webapp/src/core/errors/components/pixel-octavia.png b/airbyte-webapp/src/core/errors/components/pixel-octavia.png new file mode 100644 index 00000000000..7b17f8509c3 Binary files /dev/null and b/airbyte-webapp/src/core/errors/components/pixel-octavia.png differ diff --git a/airbyte-webapp/src/core/errors/index.ts b/airbyte-webapp/src/core/errors/index.ts new file mode 100644 index 00000000000..b80d763f671 --- /dev/null +++ b/airbyte-webapp/src/core/errors/index.ts @@ -0,0 +1,2 @@ +export { DefaultErrorBoundary } from "./components"; +export { I18nError } from "./I18nError"; diff --git a/airbyte-webapp/src/core/form/FormBuildError.ts b/airbyte-webapp/src/core/form/FormBuildError.ts deleted file mode 100644 index 0816e3f0c5d..00000000000 --- a/airbyte-webapp/src/core/form/FormBuildError.ts +++ /dev/null @@ -1,14 +0,0 @@ -export class FormBuildError extends Error { - __type = "form.build"; - - constructor( - public message: string, - public connectorDefinitionId?: string - ) { - super(message); - } -} - -export function isFormBuildError(error: { __type?: string }): error is FormBuildError { - return error.__type === "form.build"; -} diff --git a/airbyte-webapp/src/core/form/FormBuildError.tsx b/airbyte-webapp/src/core/form/FormBuildError.tsx new file mode 100644 index 00000000000..55bed5c887d --- /dev/null +++ b/airbyte-webapp/src/core/form/FormBuildError.tsx @@ -0,0 +1,24 @@ +import { ExternalLink } from "components/ui/Link"; + +import { I18nError } from "core/errors"; +import { links } from "core/utils/links"; + +export class FormBuildError extends I18nError { + constructor( + public message: string, + public connectorDefinitionId?: string + ) { + super(message, { + docLink: (node: React.ReactNode) => ( + + {node} + + ), + }); + this.name = "FormBuildError"; + } +} + +export function isFormBuildError(error: unknown): error is FormBuildError { + return error instanceof FormBuildError; +} diff --git a/airbyte-webapp/src/core/services/auth/AuthContext.ts b/airbyte-webapp/src/core/services/auth/AuthContext.ts index 2cf457058ad..90d41141b4b 100644 --- a/airbyte-webapp/src/core/services/auth/AuthContext.ts +++ b/airbyte-webapp/src/core/services/auth/AuthContext.ts @@ -40,7 +40,9 @@ export interface AuthContextApi { inited: boolean; emailVerified: boolean; loggedOut: boolean; + /** @deprecated use `provider` instead */ providers: string[] | null; + provider: string | null; getAccessToken?: () => Promise; hasPasswordLogin?: () => boolean; login?: AuthLogin; diff --git a/airbyte-webapp/src/core/services/auth/CommunityAuthService.tsx b/airbyte-webapp/src/core/services/auth/CommunityAuthService.tsx index 15fcc2bc02c..0a8a158c9a5 100644 --- a/airbyte-webapp/src/core/services/auth/CommunityAuthService.tsx +++ b/airbyte-webapp/src/core/services/auth/CommunityAuthService.tsx @@ -15,6 +15,7 @@ export const CommunityAuthService: React.FC> = ({ chi inited: true, emailVerified: false, providers: [], + provider: null, loggedOut: false, }} > diff --git a/airbyte-webapp/src/core/services/auth/EnterpriseAuthService.tsx b/airbyte-webapp/src/core/services/auth/EnterpriseAuthService.tsx index 631eb4889b8..87c7b9f7e76 100644 --- a/airbyte-webapp/src/core/services/auth/EnterpriseAuthService.tsx +++ b/airbyte-webapp/src/core/services/auth/EnterpriseAuthService.tsx @@ -128,6 +128,7 @@ const AuthServiceProvider: React.FC> = ({ children }) inited, emailVerified: false, providers: [], + provider: null, loggedOut: false, logout: keycloakAuth.signoutRedirect, getAccessToken, diff --git a/airbyte-webapp/src/core/services/features/constants.ts b/airbyte-webapp/src/core/services/features/constants.ts index f4917e0ed15..128770f9ae3 100644 --- a/airbyte-webapp/src/core/services/features/constants.ts +++ b/airbyte-webapp/src/core/services/features/constants.ts @@ -13,7 +13,9 @@ export const defaultEnterpriseFeatures = [ FeatureItem.AllowAllRBACRoles, FeatureItem.APITokenManagement, FeatureItem.ConnectionHistoryGraphs, + FeatureItem.DisplayOrganizationUsers, FeatureItem.EnterpriseBranding, + FeatureItem.IndicateGuestUsers, FeatureItem.MultiWorkspaceUI, FeatureItem.RBAC, ]; diff --git a/airbyte-webapp/src/core/services/features/types.tsx b/airbyte-webapp/src/core/services/features/types.tsx index 068f92b4c6f..44235c6ed20 100644 --- a/airbyte-webapp/src/core/services/features/types.tsx +++ b/airbyte-webapp/src/core/services/features/types.tsx @@ -17,9 +17,11 @@ export enum FeatureItem { Billing = "BILLING", ConnectionHistoryGraphs = "CONNECTION_HISTORY_GRAPHS", ConnectorBreakingChangeDeadlines = "CONNECTOR_BREAKING_CHANGE_DEADLINES", + DisplayOrganizationUsers = "DISPLAY_ORGANIZATION_USERS", EmailNotifications = "EMAIL_NOTIFICATIONS", EnterpriseBranding = "ENTERPRISE_BRANDING", ExternalInvitations = "EXTERNAL_INVITATIONS", + IndicateGuestUsers = "INDICATE_GUEST_USERS", KeycloakAuthentication = "KEYCLOAK_AUTHENTICATION", MultiWorkspaceUI = "MULTI_WORKSPACE_UI", RBAC = "RBAC", diff --git a/airbyte-webapp/src/core/utils/dataPrivacy.ts b/airbyte-webapp/src/core/utils/dataPrivacy.ts index 53f7424bbab..d6642db28b1 100644 --- a/airbyte-webapp/src/core/utils/dataPrivacy.ts +++ b/airbyte-webapp/src/core/utils/dataPrivacy.ts @@ -67,15 +67,15 @@ export const loadOsano = (): void => { ); document.head.appendChild(style); - // Create and append the script tag to load osano + // Create and append the script tag to load osano const script = document.createElement("script"); script.src = `https://cmp.osano.com/${process.env.REACT_APP_OSANO}/osano.js`; script.addEventListener("load", () => { window.Osano?.cm.addEventListener("osano-cm-script-blocked", (item) => { - console.debug(`Script blocked by Osano: ${item}`); + console.debug(`🛡️ [Osano] Script blocked: ${item}`); }); window.Osano?.cm.addEventListener("osano-cm-cookie-blocked", (item) => { - console.debug(`Cookie blocked by Osano: ${item}`); + console.debug(`️🛡️ [Osano] Cookie blocked: ${item}`); }); }); document.head.appendChild(script); diff --git a/airbyte-webapp/src/core/utils/errorStatusMessage.tsx b/airbyte-webapp/src/core/utils/errorStatusMessage.tsx index bb6cc8682be..ddd850577d9 100644 --- a/airbyte-webapp/src/core/utils/errorStatusMessage.tsx +++ b/airbyte-webapp/src/core/utils/errorStatusMessage.tsx @@ -1,5 +1,9 @@ +import type { useIntl } from "react-intl"; + import { FormattedMessage } from "react-intl"; +import { FailureOrigin, FailureReason } from "core/api/types/AirbyteClient"; + export class FormError extends Error { status?: number; } @@ -19,3 +23,39 @@ export const generateMessageFromError = (error: FormError): JSX.Element | string ); }; + +interface FailureUiDetails { + type: "error" | "warning"; + typeLabel: string; + origin: FailureReason["failureOrigin"]; + message: string; + secondaryMessage?: string; +} +export const failureUiDetailsFromReason = < + T extends FailureReason | undefined | null, + RetVal = T extends FailureReason ? FailureUiDetails : null, +>( + reason: T, + formatMessage: ReturnType["formatMessage"] +): RetVal => { + if (!reason) { + return null as RetVal; + } + + const isConfigError = reason.failureType === "config_error"; + const isSourceError = reason.failureOrigin === FailureOrigin.source; + const isDestinationError = reason.failureOrigin === FailureOrigin.destination; + + const origin = reason.failureOrigin; + const type = isConfigError && (isSourceError || isDestinationError) ? "error" : "warning"; + const typeLabel = formatMessage( + { id: type === "error" ? "failureMessage.type.error" : "failureMessage.type.warning" }, + { origin } + ); + const message = reason.externalMessage ?? formatMessage({ id: "errorView.unknown" }); + const secondaryMessage = + type === "error" && reason.externalMessage !== reason.internalMessage ? undefined : reason.internalMessage; + + const result: FailureUiDetails = { type, typeLabel, origin, message, secondaryMessage }; + return result as RetVal; +}; diff --git a/airbyte-webapp/src/core/utils/links.ts b/airbyte-webapp/src/core/utils/links.ts index d3d87c15b53..4713a55b533 100644 --- a/airbyte-webapp/src/core/utils/links.ts +++ b/airbyte-webapp/src/core/utils/links.ts @@ -52,6 +52,7 @@ export const links = { usingCustomConnectors: `${BASE_DOCS_LINK}/operator-guides/using-custom-connectors/`, gettingSupport: `${BASE_DOCS_LINK}/community/getting-support`, autoRechargeEnrollment: `${BASE_DOCS_LINK}/cloud/managing-airbyte-cloud/manage-credits#automatic-reload-of-credits-beta`, + connectorSpecificationDocs: `${BASE_DOCS_LINK}/connector-development/connector-specification-reference/#airbyte-modifications-to-jsonschema`, } as const; export type OutboundLinks = typeof links; diff --git a/airbyte-webapp/src/hooks/services/ConnectionEdit/ConnectionEditService.tsx b/airbyte-webapp/src/hooks/services/ConnectionEdit/ConnectionEditService.tsx index 845ab2a529a..3e419bdf6d2 100644 --- a/airbyte-webapp/src/hooks/services/ConnectionEdit/ConnectionEditService.tsx +++ b/airbyte-webapp/src/hooks/services/ConnectionEdit/ConnectionEditService.tsx @@ -13,6 +13,7 @@ import { } from "core/api/types/AirbyteClient"; import { useIntent } from "core/utils/rbac"; +import { useAnalyticsTrackFunctions } from "./useAnalyticsTrackFunctions"; import { ConnectionFormServiceProvider } from "../ConnectionForm/ConnectionFormService"; import { useNotificationService } from "../Notification"; @@ -33,6 +34,7 @@ interface ConnectionEditHook { schemaRefreshing: boolean; schemaHasBeenRefreshed: boolean; updateConnection: (connectionUpdates: WebBackendConnectionUpdate) => Promise; + updateConnectionStatus: (status: ConnectionStatus) => Promise; refreshSchema: () => Promise; discardRefreshedSchema: () => void; } @@ -41,6 +43,7 @@ const getConnectionCatalog = (connection: WebBackendConnectionRead): ConnectionC pick(connection, ["syncCatalog", "catalogId"]); const useConnectionEdit = ({ connectionId }: ConnectionEditProps): ConnectionEditHook => { + const { trackConnectionStatusUpdate } = useAnalyticsTrackFunctions(); const { formatMessage } = useIntl(); const { registerNotification, unregisterNotificationById } = useNotificationService(); const getConnectionQuery = useGetConnectionQuery(); @@ -59,6 +62,18 @@ const useConnectionEdit = ({ connectionId }: ConnectionEditProps): ConnectionEdi const { mutateAsync: updateConnectionAction, isLoading: connectionUpdating } = useUpdateConnection(); + const updateConnectionStatus = useCallback( + async (status: ConnectionStatus) => { + const updatedConnection = await updateConnectionAction({ + connectionId, + status, + }); + setConnection(updatedConnection); + trackConnectionStatusUpdate(updatedConnection); + }, + [connectionId, updateConnectionAction, trackConnectionStatusUpdate] + ); + const updateConnection = useCallback( async (connectionUpdates: WebBackendConnectionUpdate) => { const updatedConnection = await updateConnectionAction(connectionUpdates); @@ -146,6 +161,7 @@ const useConnectionEdit = ({ connectionId }: ConnectionEditProps): ConnectionEdi schemaRefreshing, schemaHasBeenRefreshed, updateConnection, + updateConnectionStatus, refreshSchema, discardRefreshedSchema, }; diff --git a/airbyte-webapp/src/hooks/services/ConnectionEdit/useAnalyticsTrackFunctions.tsx b/airbyte-webapp/src/hooks/services/ConnectionEdit/useAnalyticsTrackFunctions.tsx new file mode 100644 index 00000000000..7e32a9135f0 --- /dev/null +++ b/airbyte-webapp/src/hooks/services/ConnectionEdit/useAnalyticsTrackFunctions.tsx @@ -0,0 +1,22 @@ +// eslint-disable-next-line check-file/filename-blocklist +import { ConnectionStatus, WebBackendConnectionRead } from "core/api/types/AirbyteClient"; +import { Action, getFrequencyFromScheduleData, Namespace, useAnalyticsService } from "core/services/analytics"; + +export const useAnalyticsTrackFunctions = () => { + const analyticsService = useAnalyticsService(); + + const trackConnectionStatusUpdate = (updatedConnection: WebBackendConnectionRead) => { + const trackableAction = updatedConnection.status === ConnectionStatus.active ? Action.REENABLE : Action.DISABLE; + + analyticsService.track(Namespace.CONNECTION, trackableAction, { + actionDescription: `${trackableAction} connection`, + connector_source: updatedConnection.source?.sourceName, + connector_source_definition_id: updatedConnection.source?.sourceDefinitionId, + connector_destination: updatedConnection.destination?.destinationName, + connector_destination_definition_id: updatedConnection.destination?.destinationDefinitionId, + frequency: getFrequencyFromScheduleData(updatedConnection.scheduleData), + }); + }; + + return { trackConnectionStatusUpdate }; +}; diff --git a/airbyte-webapp/src/hooks/services/Experiment/ExperimentService.test.tsx b/airbyte-webapp/src/hooks/services/Experiment/ExperimentService.test.tsx index 21bc5f022d4..70b6e391ff2 100644 --- a/airbyte-webapp/src/hooks/services/Experiment/ExperimentService.test.tsx +++ b/airbyte-webapp/src/hooks/services/Experiment/ExperimentService.test.tsx @@ -30,6 +30,7 @@ describe("ExperimentService", () => { removeContext, getExperiment, getExperimentChanges$: () => EMPTY, + getAllExperiments: () => ({}), }} > {children} @@ -48,6 +49,7 @@ describe("ExperimentService", () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any getExperiment: () => undefined as any, getExperimentChanges$: () => EMPTY, + getAllExperiments: () => ({}), }} > {children} @@ -72,6 +74,7 @@ describe("ExperimentService", () => { getExperiment, // eslint-disable-next-line @typescript-eslint/no-explicit-any getExperimentChanges$: () => subject.asObservable() as any, + getAllExperiments: () => ({}), }} > {children} diff --git a/airbyte-webapp/src/hooks/services/Experiment/ExperimentService.tsx b/airbyte-webapp/src/hooks/services/Experiment/ExperimentService.tsx index 60a027747d0..d1ccc6619a7 100644 --- a/airbyte-webapp/src/hooks/services/Experiment/ExperimentService.tsx +++ b/airbyte-webapp/src/hooks/services/Experiment/ExperimentService.tsx @@ -1,6 +1,6 @@ import type { Experiments } from "./experiments"; -import { createContext, useContext, useEffect, useMemo } from "react"; +import { createContext, useCallback, useContext, useEffect, useMemo } from "react"; import { useObservable } from "react-use"; import { EMPTY, Observable } from "rxjs"; @@ -31,6 +31,7 @@ export interface ExperimentService { removeContext: (kind: Exclude) => void; getExperiment(key: K, defaultValue: Experiments[K]): Experiments[K]; getExperimentChanges$(key: K): Observable; + getAllExperiments(): Partial; } const debugContext = isDevelopment() ? (msg: string) => console.debug(`%c${msg}`, "color: SlateBlue") : () => undefined; @@ -88,4 +89,9 @@ const isCypress = window.hasOwnProperty("Cypress"); export const useExperiment = !isCypress && process.env.NODE_ENV === "development" ? useExperimentWithOverwrites : useExperimentHook; +export const useGetAllExperiments = () => { + const experimentService = useContext(experimentContext); + return useCallback(() => experimentService?.getAllExperiments() ?? {}, [experimentService]); +}; + export const ExperimentProvider = experimentContext.Provider; diff --git a/airbyte-webapp/src/hooks/services/Experiment/experiments.ts b/airbyte-webapp/src/hooks/services/Experiment/experiments.ts index c315c339727..2549d1e57d0 100644 --- a/airbyte-webapp/src/hooks/services/Experiment/experiments.ts +++ b/airbyte-webapp/src/hooks/services/Experiment/experiments.ts @@ -7,6 +7,7 @@ */ export interface Experiments { + "authPage.keycloak": boolean; "authPage.rightSideUrl": string | undefined; "authPage.signup.hideCompanyName": boolean; "authPage.signup.hideName": boolean; @@ -26,7 +27,6 @@ export interface Experiments { "connector.suggestedDestinationConnectors": string; "platform.auto-backfill-on-new-columns": boolean; "settings.breakingChangeNotifications": boolean; - "settings.invitationSystemv2": boolean; "settings.token-management-ui": boolean; "settings.showAdvancedSettings": boolean; "upcomingFeaturesPage.url": string; diff --git a/airbyte-webapp/src/hooks/services/Experiment/index.ts b/airbyte-webapp/src/hooks/services/Experiment/index.ts index 3accba23cca..8f78311552f 100644 --- a/airbyte-webapp/src/hooks/services/Experiment/index.ts +++ b/airbyte-webapp/src/hooks/services/Experiment/index.ts @@ -1,2 +1,2 @@ -export { useExperiment, ExperimentProvider, useExperimentContext } from "./ExperimentService"; +export { useExperiment, ExperimentProvider, useExperimentContext, useGetAllExperiments } from "./ExperimentService"; export type { ExperimentService, ContextKind } from "./ExperimentService"; diff --git a/airbyte-webapp/src/hooks/services/useConnectorAuth.tsx b/airbyte-webapp/src/hooks/services/useConnectorAuth.tsx index 6ff3d93e878..5bbcf78e4c0 100644 --- a/airbyte-webapp/src/hooks/services/useConnectorAuth.tsx +++ b/airbyte-webapp/src/hooks/services/useConnectorAuth.tsx @@ -4,7 +4,7 @@ import { FormattedMessage, useIntl } from "react-intl"; import { useAsyncFn, useEffectOnce, useEvent, useUnmount } from "react-use"; import { v4 as uuid } from "uuid"; -import { useCompleteOAuth, useConsentUrls, isCommonRequestError } from "core/api"; +import { HttpError, useCompleteOAuth, useConsentUrls } from "core/api"; import { CompleteOAuthResponse, CompleteOAuthResponseAuthPayload, @@ -131,7 +131,7 @@ export function useConnectorAuth(): { return { consentUrl: response.consentUrl, payload }; } catch (e) { // If this API returns a 404 the OAuth credentials have not been added to the database. - if (isCommonRequestError(e) && e.status === 404) { + if (e instanceof HttpError && e.status === 404) { if (process.env.NODE_ENV === "development") { notificationService.registerNotification({ id: "oauthConnector.credentialsMissing", diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index a8e40fa5d34..86092ee0419 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -148,6 +148,7 @@ "form.resetData": "Reset your data", "form.resetData.description": "Resetting your data will delete all the data for this connection in your destination and the next sync will start from scratch.", "form.resetData.successfulStart": "Your reset has started.", + "form.clearData.successfulStart": "Data clearing started.", "form.resetDataText": "Resetting your data will delete all the data for this connection in your destination and the next sync will start from scratch. Are you sure you want to do this?", "form.dockerError": "Could not find docker image", "form.edit": "Edit", @@ -210,7 +211,6 @@ "connectionForm.questionnaire.incrementOrRefresh.refresh.title": "Append Full Snapshots", "connectionForm.questionnaire.incrementOrRefresh.refresh.subtitle": "Append a full copy of your source data every sync.", "connectionForm.questionnaire.incrementOrRefresh.refresh.warning": "This option potentially increases costs and sync times", - "connectionForm.questionnaire.result": "We've chosen a sync mode for your streams.", "connectionForm.namespaceDefinition.title": "Destination Namespace", "connectionForm.namespaceDefinition.subtitle": "Where data will be synced in the destination", "connectionForm.namespaceDefinition.subtitleNext": "The location where the replicated data will be stored in the destination", @@ -227,6 +227,7 @@ "connectionForm.selectStreams": "Select streams", "connectionForm.selectStreams.readonly": "Selected streams", "connectionForm.selectSyncMode": "Select sync mode", + "connectionForm.selectSyncModeDescription": "Tell us how you want your data moved and we'll select the right sync mode for your streams.", "connectionForm.destinationNew": "Set up a new destination", "connectionForm.destinationNewDescription": "Configure a new destination from Airbyte's catalog of available connectors", "connectionForm.sourceFormat": "Mirror source structure", @@ -341,9 +342,9 @@ "connectorForm.revocation.succeeded": "Your {connector} integration has been disconnected.", "connectorForm.reauthenticate": "Re-authenticate", "connectorForm.expandForm": "Expand this form to continue setting up your connector", - "connectorForm.error.oneOfWithNonObjects": "Spec uses oneOf without using object types for all conditions", - "connectorForm.error.oneOfWithoutConst": "Spec uses oneOf without a shared const property", - "connectorForm.error.topLevelNonObject": "Top level configuration has to be an object", + "connectorForm.error.oneOfWithNonObjects": "Spec uses oneOf without using object types for all conditions. Make sure your connectors are up to date. See connector docs for more information.", + "connectorForm.error.oneOfWithoutConst": "Spec uses oneOf without a shared const property. Make sure your connectors are up to date. See connector docs for more information.", + "connectorForm.error.topLevelNonObject": "Top level configuration has to be an object. Make sure your connectors are up to date. See connector docs for more information.", "connectorForm.allowlistIp.message": "Please allow inbound traffic from the following Airbyte IPs in your firewall whether connecting directly or via SSH Tunnel (more info):", "connectorForm.allowlistIp.addressesLabel": "Airbyte IP addresses", @@ -469,6 +470,7 @@ "jobs.failure.originLabel": "Failure origin:", "jobs.failure.typeLabel": "Failure type:", "jobs.failure.seeMore": "See more", + "jobs.failure.seeLess": "See less", "jobs.failure.copyText": "Copy text", "jobs.failure.copyText.success": "Text copied to clipboard", "jobs.noMetadataAvailable": "No job metadata available", @@ -547,8 +549,12 @@ "connection.onboarding.demoInstance": "or play around in our demo instance.", "connection.resetModalTitle": "Stream configuration changed", "connection.streamResetHint": "Due to changes in the stream configuration, we recommend a data reset. A reset will delete data in the destination of the affected streams and then re-sync that data. Skipping the reset is discouraged and might lead to unexpected behavior.", + "connection.clearDataHint": "Due to changes in the stream configuration, we recommend clearing the data from your destination. Clearing data will delete data in the destination of the affected streams.", + "connection.clearDataHint.emphasized": "You will need to trigger a re-sync after this operation to bring your data up to date. Skipping these steps are discouraged and might lead to unexpected behavior.", "connection.saveWithReset": "Reset affected streams (recommended)", "connection.saveWithFullReset": "Reset all streams (recommended)", + "connection.saveWithDataClear": "Clear data from affected streams (recommended)", + "connection.saveWithFullDataClear": "Clear all data (recommended)", "connection.save": "Save connection", "connection.title": "Connection", "connection.fromTo": "{source} → {destination}", @@ -612,6 +618,7 @@ "connection.refreshSchema": "Refresh schema", "connection.replication": "Replication", "connection.schema": "Schema", + "connection.syncStatusCard.title": "Sync Status", "connection.streams": "Streams", "connection.transfer": "Transfer", "connection.linkCopied": "Link copied!", @@ -666,9 +673,10 @@ "connection.stream.status.error": "Error", "connection.stream.status.late": "Late", "connection.stream.status.pending": "Pending", - "connection.stream.status.title": "Enabled streams", + "connection.stream.status.title": "Active Streams", "connection.stream.status.seeLogs": "See logs", - "connection.stream.status.gotoSettings": "Go to settings", + "connection.stream.status.checkSourceSettings": "Check source", + "connection.stream.status.checkDestinationSettings": "Check destination", "connection.stream.status.genericError": "There was an error with your {syncType}. See logs for details.", "connection.stream.status.table.status": "Status", "connection.stream.status.table.streamName": "Stream name", @@ -678,6 +686,7 @@ "connection.stream.status.table.emptyTable.callToAction": "Re-enable", "connection.stream.actions.resetThisStream": "Reset this stream", "connection.stream.actions.clearData": "Clear data", + "connection.actions.clearData": "Clearing your data will delete all data in your destination.", "connection.stream.actions.clearData.confirm.title": "Are you sure you want to clear data from the {streamName} stream?", "connection.actions.clearData.confirm.title": "Are you sure you want to clear data from this connection?", "connection.actions.clearData.confirm.text": "Clearing data for this connection will delete all data in your destination for this connection.", @@ -923,11 +932,9 @@ "settings.accessManagement.removePermissions": "Are you sure you want to remove {user} from {resource}?", "settings.accessManagement.youHint": "You", "settings.accessManagement.members": "Members", - "settings.accessManagement.permissionCreate.success": "User added successfully!", - "settings.accessManagement.permissionCreate.error": "There was an error adding this user.", - "settings.accessManagement.permissionUpdate.success": "Permission updated successfully!", + "settings.accessManagement.permissionUpdate.success": "Permission updated successfully", "settings.accessManagement.permissionUpdate.error": "There was an error updating this permission.", - "settings.accessManagement.permissionDelete.success": "User removed successfully!", + "settings.accessManagement.permissionDelete.success": "User removed successfully", "settings.accessManagement.permissionDelete.error": "There was an error removing this user.", "settings.accessManagement.guestUser": "This user is not a member of the organization and is a guest of this workspace.", "settings.accessManagement.cannotDemoteOrgAdmin": "You cannot demote an organization admin within a workspace.", @@ -1055,12 +1062,20 @@ "credits.noBillingAccount": "Your account is excluded from billing requirements and credits are not required.", "docs.notFoundError": "We were not able to receive docs. Please click the link above to open docs on our website", - "errorView.notFound": "Resource not found.", - "errorView.notAuthorized": "You don’t have permission to access this page.", + "errors.title": "Sorry, something went wrong.", + "errors.reload": "Reload", + "errors.copyDetails": "Copy details", + "errors.http.badRequest": "There was an error in the request sent to the API. If possible, change your inputs and try again. (HTTP 400)", + "errors.http.unauthorized": "It seems you're not authorized. Please try logging in again. (HTTP 401)", + "errors.http.forbidden": "You don't have the right permissions to take this action. (HTTP 403)", + "errors.http.notFound": "We can't seem to find what you're looking for. (HTTP 404)", + "errors.http.gone": "It seems what you're looking for no longer exists. (HTTP 410)", + "errors.http.teapot": "I'm a teapot 🫖 (HTTP 418)", + "errors.http.internalServerError": "An unexpected error occurred. Please report this if the issue persists. (HTTP 500)", + "errors.http.badGateway": "Airbyte is temporarily unavailable. Please try again. (HTTP 502)", + "errors.http.serviceUnavailable": "Airbyte is temporarily unavailable. Please try again. (HTTP 503)", + "errors.http.default": "An unknown error occurred. (HTTP {status})", "errorView.title": "Oops! Something went wrong…", - "errorView.docLink": "Check out the documentation", - "errorView.upgradeConnectors": "Make sure your connectors are up to date", - "errorView.retry": "Retry", "errorView.unknown": "Unknown", "errorView.unknownError": "Unknown error occurred", @@ -1084,6 +1099,8 @@ "ui.loading": "Loading …", "ui.markdown.copyCode": "Copy code", "ui.markdown.copied": "Copied", + "ui.switch.enabled": "Enabled", + "ui.switch.disabled": "Disabled", "airbyte.datatype.string": "String", "airbyte.datatype.binary_data": "Binary Data", @@ -1191,12 +1208,12 @@ "connectorBuilder.inputModal.enum": "Allowed values", "connectorBuilder.inputModal.enumTooltip": "The user will only be able to choose from one of these values. If none are provided the user will be able to enter any value", "connectorBuilder.inputModal.unsupportedInput": "Detailed configuration for this property type is disabled, switch to YAML view to edit", - "connectorBuilder.inputModal.inferredInputMessage": "Detailed configuration for this user input is disabled as it is tied to the configuration", + "connectorBuilder.inputModal.lockedInput": "Some configuration options are not shown, since this input is auto-generated and linked to the current configuration of your connector.", "connectorBuilder.key": "Key", "connectorBuilder.value": "Value", "connectorBuilder.addKeyValue": "Add", "connectorBuilder.saveInputsForm": "Save", - "connectorBuilder.inputsFormWarning": "Testing values are not saved with the connector when publishing or releasing. They are required in order to test your streams, and will be asked to the end user in order to setup this connector", + "connectorBuilder.inputsFormMessage": "Testing values are not saved with the connector when publishing or releasing. They are required in order to test your streams, and will be asked to the end user in order to setup this connector", "connectorBuilder.inputsError": "User inputs form could not be rendered: {error}. Make sure the spec in the YAML conforms to the specified standard.", "connectorBuilder.inputsErrorDocumentation": "Check out the documentation", "connectorBuilder.goToYaml": "Switch to YAML view", @@ -1484,7 +1501,6 @@ "jobHistory.logs.logDownloadPending": "Downloading logs for job {jobId}…", "jobHistory.logs.logDownloadFailed": "Failed to download logs for job {jobId}.", "jobHistory.logs.searchPlaceholder": "Search logs", - "jobHistory.logs.failureReason": "Failure reason: {reason}", "jobHistory.logs.logOrigin.all": "All logs", "jobHistory.logs.logOrigin.source": "source", "jobHistory.logs.logOrigin.destination": "destination", @@ -1594,6 +1610,7 @@ "confirmResetPassword.link.invalid": "The password reset link is invalid. Please double check the reset email.", "confirmResetPassword.password.weak": "Your password does not meet the minimum length", + "connection.header.frequency.tooltip": "Click to edit in Settings", "connection.dbtCloudJobs.cardTitle": "Transformations", "connection.dbtCloudJobs.addJob": "Add transformation", "connection.dbtCloudJobs.dbtError": "There was an error communicating with dbt Cloud: {displayMessage}", @@ -1723,6 +1740,7 @@ "userInvitations.create.modal.addNew": "Add new member", "userInvitations.create.modal.search": "Type to add a new member", "userInvitations.create.success": "Invitation created successfully", + "userInvitations.create.success.directlyAdded": "User added successfully", "userInvitations.create.error": "There was an error inviting this user. Please try again.", "userInvitations.create.error.duplicate": "There is already a pending invitation for this email.", "userInvitations.create.modal.emptyList": "No matching users found", @@ -1738,5 +1756,9 @@ "userInvitations.cancel.success": "User invitation successfully cancelled", "userInvitations.cancel.error": "There was an error cancelling this invitation. Please try again.", "userInvitations.cancel.confirm.text": "Are you sure you want to cancel this invitation for {user} to {resource}?", - "userInvitations.cancel.confirm.title": "Cancel invitation" + "userInvitations.cancel.confirm.title": "Cancel invitation", + + "failureMessage.type.error": "Failure in {origin}", + "failureMessage.type.warning": "Warning from {origin}", + "failureMessage.label": "{type} {message}" } diff --git a/airbyte-webapp/src/packages/cloud/App.tsx b/airbyte-webapp/src/packages/cloud/App.tsx index 6bbbdbc38fb..d6be7011032 100644 --- a/airbyte-webapp/src/packages/cloud/App.tsx +++ b/airbyte-webapp/src/packages/cloud/App.tsx @@ -2,12 +2,12 @@ import React, { Suspense } from "react"; import { HelmetProvider } from "react-helmet-async"; import { createBrowserRouter, RouterProvider } from "react-router-dom"; -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; import { DeployPreviewMessage } from "components/DeployPreviewMessage"; import { DevToolsToggle } from "components/DevToolsToggle"; import LoadingPage from "components/LoadingPage"; import { QueryProvider } from "core/api"; +import { DefaultErrorBoundary } from "core/errors"; import { AnalyticsProvider } from "core/services/analytics"; import { defaultCloudFeatures, FeatureService } from "core/services/features"; import { I18nProvider } from "core/services/i18n"; @@ -51,7 +51,7 @@ const App: React.FC = () => { }> - + @@ -60,7 +60,7 @@ const App: React.FC = () => { - + diff --git a/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx b/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx index 1e6cf10551b..6d049ec4561 100644 --- a/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx +++ b/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx @@ -2,12 +2,12 @@ import React, { PropsWithChildren, Suspense, useMemo } from "react"; import { createSearchParams, Navigate, Route, Routes, useLocation } from "react-router-dom"; import { useEffectOnce } from "react-use"; -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; import LoadingPage from "components/LoadingPage"; import { useCurrentWorkspaceId } from "area/workspace/utils"; import { useCurrentOrganizationInfo, useCurrentWorkspace, useInvalidateAllWorkspaceScopeOnChange } from "core/api"; import { usePrefetchCloudWorkspaceData } from "core/api/cloud"; +import { DefaultErrorBoundary } from "core/errors"; import { useAnalyticsIdentifyUser, useAnalyticsRegisterValues } from "core/services/analytics/useAnalyticsService"; import { useAuthService } from "core/services/auth"; import { FeatureItem, useFeature } from "core/services/features"; @@ -87,7 +87,7 @@ const MainRoutes: React.FC = () => { const supportsDataResidency = useFeature(FeatureItem.AllowChangeDataGeographies); return ( - + } /> @@ -134,7 +134,7 @@ const MainRoutes: React.FC = () => { } /> } /> - + ); }; @@ -172,7 +172,7 @@ const CloudWorkspaceDataPrefetcher: React.FC> = ({ ch }; export const Routing: React.FC = () => { - const { user, inited, providers, loggedOut, requirePasswordReset } = useAuthService(); + const { user, inited, providers, provider, loggedOut, requirePasswordReset } = useAuthService(); const workspaceId = useCurrentWorkspaceId(); const { pathname: originalPathname, search, hash } = useLocation(); @@ -203,9 +203,15 @@ export const Routing: React.FC = () => { const userTraits = useMemo( () => user - ? { providers, email: user.email, isCorporate: isCorporateEmail(user.email), currentWorkspaceId: workspaceId } + ? { + providers, + provider, + email: user.email, + isCorporate: isCorporateEmail(user.email), + currentWorkspaceId: workspaceId, + } : {}, - [providers, user, workspaceId] + [provider, providers, user, workspaceId] ); useEffectOnce(() => { diff --git a/airbyte-webapp/src/packages/cloud/services/auth/CloudAuthService.tsx b/airbyte-webapp/src/packages/cloud/services/auth/CloudAuthService.tsx index a917af07521..6668a01c878 100644 --- a/airbyte-webapp/src/packages/cloud/services/auth/CloudAuthService.tsx +++ b/airbyte-webapp/src/packages/cloud/services/auth/CloudAuthService.tsx @@ -181,6 +181,7 @@ export const CloudAuthService: React.FC = ({ children }) => { }, hasPasswordLogin: () => !!firebaseUser.providerData.filter(({ providerId }) => providerId === "password"), providers: firebaseUser.providerData.map(({ providerId }) => providerId), + provider: null, sendEmailVerification: async () => { if (!firebaseUser) { console.error("sendEmailVerifiedLink should be used within auth flow"); @@ -202,9 +203,24 @@ export const CloudAuthService: React.FC = ({ children }) => { emailVerified: keycloakAuth.keycloakUser?.profile.email_verified ?? false, email: keycloakAuth.keycloakUser?.profile.email ?? null, getAccessToken: () => Promise.resolve(keycloakAuth.accessTokenRef?.current), + updateName: async (name: string) => { + const user = keycloakAuth.airbyteUser; + if (!user) { + throw new Error("Cannot change name, airbyteUser is null"); + } + await updateAirbyteUser({ + userUpdate: { userId: user.userId, name }, + getAccessToken: async () => keycloakAuth.accessTokenRef?.current ?? "", + }).then(() => { + keycloakAuth.updateAirbyteUser({ ...user, name }); + }); + }, logout, loggedOut: false, providers: null, + provider: keycloakAuth.isSso + ? "sso" + : (keycloakAuth.keycloakUser?.profile.identity_provider as string | undefined) ?? "none", }; } // The context value for an unauthenticated user @@ -216,6 +232,7 @@ export const CloudAuthService: React.FC = ({ children }) => { emailVerified: false, loggedOut: true, providers: null, + provider: null, login: async ({ email, password }: { email: string; password: string }) => { await signInWithEmailAndPassword(firebaseAuth, email, password) .then(() => { diff --git a/airbyte-webapp/src/packages/cloud/services/auth/KeycloakService/KeycloakService.tsx b/airbyte-webapp/src/packages/cloud/services/auth/KeycloakService/KeycloakService.tsx index 58c29d1ce0c..881ca836b72 100644 --- a/airbyte-webapp/src/packages/cloud/services/auth/KeycloakService/KeycloakService.tsx +++ b/airbyte-webapp/src/packages/cloud/services/auth/KeycloakService/KeycloakService.tsx @@ -1,3 +1,5 @@ +import { useQueryClient } from "@tanstack/react-query"; +import { BroadcastChannel } from "broadcast-channel"; import Keycloak from "keycloak-js"; import isEqual from "lodash/isEqual"; import { User, WebStorageStateStore, UserManager } from "oidc-client-ts"; @@ -30,6 +32,7 @@ export type KeycloakServiceContext = { changeRealmAndRedirectToSignin: (realm: string) => Promise; // The access token is stored in a ref so we don't cause a re-render each time it changes. Instead, we can use the current ref value when we call the API. accessTokenRef: MutableRefObject; + updateAirbyteUser: (airbyteUser: UserRead) => void; redirectToSignInWithGoogle: () => Promise; redirectToSignInWithGithub: () => Promise; redirectToSignInWithPassword: () => Promise; @@ -54,6 +57,7 @@ interface KeycloakAuthState { error: Error | null; didInitialize: boolean; isAuthenticated: boolean; + isSso: boolean | null; } const keycloakAuthStateInitialState: KeycloakAuthState = { @@ -62,6 +66,7 @@ const keycloakAuthStateInitialState: KeycloakAuthState = { error: null, didInitialize: false, isAuthenticated: false, + isSso: null, }; type KeycloakAuthStateAction = @@ -76,8 +81,14 @@ type KeycloakAuthStateAction = | { type: "error"; error: Error; + } + | { + type: "userUpdated"; + airbyteUser: UserRead; }; +type BroadcastEvent = Extract; + const keycloakAuthStateReducer = (state: KeycloakAuthState, action: KeycloakAuthStateAction): KeycloakAuthState => { switch (action.type) { case "userLoaded": @@ -87,8 +98,15 @@ const keycloakAuthStateReducer = (state: KeycloakAuthState, action: KeycloakAuth airbyteUser: action.airbyteUser, isAuthenticated: true, didInitialize: true, + // We are using an SSO login if we're not in the AIRBYTE_CLOUD_REALM, which would be the end of the issuer + isSso: !action.keycloakUser.profile.iss.endsWith(AIRBYTE_CLOUD_REALM), error: null, }; + case "userUpdated": + return { + ...state, + airbyteUser: action.airbyteUser, + }; case "userUnloaded": return { ...state, @@ -96,6 +114,7 @@ const keycloakAuthStateReducer = (state: KeycloakAuthState, action: KeycloakAuth airbyteUser: null, isAuthenticated: false, didInitialize: true, + isSso: null, error: null, }; case "error": @@ -107,8 +126,11 @@ const keycloakAuthStateReducer = (state: KeycloakAuthState, action: KeycloakAuth } }; +const broadcastChannel = new BroadcastChannel("keycloak-state-sync"); + export const KeycloakService: React.FC = ({ children }) => { const userSigninInitialized = useRef(false); + const queryClient = useQueryClient(); const [userManager] = useState(initializeUserManager); const [authState, dispatch] = useReducer(keycloakAuthStateReducer, keycloakAuthStateInitialState); const { mutateAsync: getAirbyteUser } = useGetOrCreateUser(); @@ -116,6 +138,23 @@ export const KeycloakService: React.FC = ({ children }) => { // Allows us to get the access token as a callback, instead of re-rendering every time a new access token arrives const keycloakAccessTokenRef = useRef(null); + useEffect(() => { + broadcastChannel.onmessage = (event) => { + console.log("broadcastChannel.onmessage", event); + if (event.type === "userUnloaded") { + console.debug("🔑 Received userUnloaded event from other tab."); + dispatch({ type: "userUnloaded" }); + // Need to clear all queries from cache. In the tab that triggered the logout this is + // handled inside CloudAuthService.logout + queryClient.removeQueries(); + } else if (event.type === "userLoaded") { + console.debug("🔑 Received userLoaded event from other tab."); + keycloakAccessTokenRef.current = event.keycloakUser.access_token; + dispatch({ type: "userLoaded", keycloakUser: event.keycloakUser, airbyteUser: event.airbyteUser }); + } + }; + }, [queryClient]); + // Initialization of the current user useEffect(() => { if (!userManager || userSigninInitialized.current) { @@ -168,12 +207,16 @@ export const KeycloakService: React.FC = ({ children }) => { // Only if actual user values (not just access_token) have changed, do we need to update the state and cause a re-render if (!usersAreSame({ keycloakUser, airbyteUser }, authState)) { dispatch({ type: "userLoaded", keycloakUser, airbyteUser }); + // Notify other tabs that this tab got a new user loaded (usually meaning this tab signed in) + broadcastChannel.postMessage({ type: "userLoaded", keycloakUser, airbyteUser }); } }; userManager.events.addUserLoaded(handleUserLoaded); const handleUserUnloaded = () => { dispatch({ type: "userUnloaded" }); + // Notify other open tabs that the user got unloaded (i.e. this tab signed out) + broadcastChannel.postMessage({ type: "userUnloaded" }); }; userManager.events.addUserUnloaded(handleUserUnloaded); @@ -237,12 +280,17 @@ export const KeycloakService: React.FC = ({ children }) => { keycloak.register({ redirectUri: createRedirectUri(AIRBYTE_CLOUD_REALM) }); }, []); + const updateAirbyteUser = useCallback((airbyteUser: UserRead) => { + dispatch({ type: "userUpdated", airbyteUser }); + }, []); + const contextValue = useMemo(() => { const value = { ...authState, userManager, signin: () => userManager.signinRedirect(), signout: () => userManager.signoutRedirect({ post_logout_redirect_uri: window.location.origin }), + updateAirbyteUser, isAuthenticated: authState.isAuthenticated, changeRealmAndRedirectToSignin, accessTokenRef: keycloakAccessTokenRef, @@ -255,6 +303,7 @@ export const KeycloakService: React.FC = ({ children }) => { }, [ authState, userManager, + updateAirbyteUser, changeRealmAndRedirectToSignin, redirectToSignInWithGoogle, redirectToSignInWithGithub, diff --git a/airbyte-webapp/src/packages/cloud/services/thirdParty/launchdarkly/LDExperimentService.tsx b/airbyte-webapp/src/packages/cloud/services/thirdParty/launchdarkly/LDExperimentService.tsx index 7a2ac7cf94c..826cfe55cc9 100644 --- a/airbyte-webapp/src/packages/cloud/services/thirdParty/launchdarkly/LDExperimentService.tsx +++ b/airbyte-webapp/src/packages/cloud/services/thirdParty/launchdarkly/LDExperimentService.tsx @@ -215,6 +215,10 @@ const LDInitializationWrapper: React.FC { + // Return all feature flags from the LD client + return ldClient.current?.allFlags() ?? {}; + }, }), [addContext, removeContext] ); diff --git a/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx b/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx index f1a9a694a3c..97d0beb16f0 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx @@ -18,6 +18,7 @@ import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { useAuthService } from "core/services/auth"; import { useLocalStorage } from "core/utils/useLocalStorage"; import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; +import { useExperiment } from "hooks/services/Experiment"; import { useNotificationService } from "hooks/services/Notification"; import { CloudRoutes } from "packages/cloud/cloudRoutePaths"; import { LoginFormErrorCodes } from "packages/cloud/services/auth/types"; @@ -54,7 +55,9 @@ export const LoginPage: React.FC = () => { const { registerNotification } = useNotificationService(); const { trackError } = useAppMonitoringService(); const [searchParams] = useSearchParams(); - const [keycloakAuthEnabled] = useLocalStorage("airbyte_keycloak-auth-ui", false); + const [keycloakAuthEnabledLocalStorage] = useLocalStorage("airbyte_keycloak-auth-ui", true); + const keycloakAuthEnabledExperiment = useExperiment("authPage.keycloak", true); + const keycloakAuthEnabled = keycloakAuthEnabledExperiment || keycloakAuthEnabledLocalStorage; const loginRedirectString = searchParams.get("loginRedirect"); const isAcceptingInvitation = loginRedirectString?.includes("accept-invite"); diff --git a/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.test.tsx b/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.test.tsx index c053fc9d00a..b1059f82966 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.test.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.test.tsx @@ -8,8 +8,14 @@ import { OAuthLogin } from "./OAuthLogin"; const mockLoginWithOAuth = jest.fn(); +const mockRedirectToSignInWithGithub = jest.fn().mockReturnValue(Promise.resolve()); +const mockRedirectToSignInWithGoogle = jest.fn().mockReturnValue(Promise.resolve()); + jest.mock("packages/cloud/services/auth/KeycloakService", () => ({ - useKeycloakService: () => ({ redirectToSignInWithGithub: jest.fn(), redirectToSignInWithGoogle: jest.fn() }), + useKeycloakService: () => ({ + redirectToSignInWithGithub: mockRedirectToSignInWithGithub, + redirectToSignInWithGoogle: mockRedirectToSignInWithGoogle, + }), })); describe("OAuthLogin", () => { @@ -22,7 +28,7 @@ describe("OAuthLogin", () => { wrapper: TestWrapper, }); await userEvents.click(getByTestId("googleOauthLogin")); - expect(mockLoginWithOAuth).toHaveBeenCalledWith("google"); + expect(mockRedirectToSignInWithGoogle).toHaveBeenCalled(); }); it("should call auth service for GitHub", async () => { @@ -30,6 +36,6 @@ describe("OAuthLogin", () => { wrapper: TestWrapper, }); await userEvents.click(getByTestId("githubOauthLogin")); - expect(mockLoginWithOAuth).toHaveBeenCalledWith("github"); + expect(mockRedirectToSignInWithGithub).toHaveBeenCalled(); }); }); diff --git a/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.tsx b/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.tsx index 2cb48497aa6..4999c145bad 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.tsx @@ -14,6 +14,7 @@ import { Spinner } from "components/ui/Spinner"; import { AuthOAuthLogin, OAuthProviders } from "core/services/auth"; import { useLocalStorage } from "core/utils/useLocalStorage"; +import { useExperiment } from "hooks/services/Experiment"; import { CloudRoutes } from "packages/cloud/cloudRoutePaths"; import { useKeycloakService } from "packages/cloud/services/auth/KeycloakService"; @@ -75,7 +76,9 @@ export const OAuthLogin: React.FC = ({ loginWithOAuth, type }) const [searchParams] = useSearchParams(); const loginRedirect = searchParams.get("loginRedirect"); const navigate = useNavigate(); - const [keycloakAuthEnabled] = useLocalStorage("airbyte_keycloak-auth-ui", false); + const [keycloakAuthEnabledLocalStorage] = useLocalStorage("airbyte_keycloak-auth-ui", true); + const keycloakAuthEnabledExperiment = useExperiment("authPage.keycloak", true); + const keycloakAuthEnabled = keycloakAuthEnabledExperiment || keycloakAuthEnabledLocalStorage; const { redirectToSignInWithGithub, redirectToSignInWithGoogle, diff --git a/airbyte-webapp/src/packages/cloud/views/auth/SignupPage/SignupPage.tsx b/airbyte-webapp/src/packages/cloud/views/auth/SignupPage/SignupPage.tsx index ca0919704fc..2300d49285c 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/SignupPage/SignupPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/SignupPage/SignupPage.tsx @@ -11,6 +11,7 @@ import { Icon } from "components/ui/Icon"; import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { useAuthService } from "core/services/auth"; +import { useExperiment } from "hooks/services/Experiment"; import { SignupForm } from "./components/SignupForm"; import styles from "./SignupPage.module.scss"; @@ -32,7 +33,9 @@ const Detail: React.FC> = ({ children }) => { }; const SignupPage: React.FC = () => { - const [keycloakAuthEnabled] = useLocalStorage("airbyte_keycloak-auth-ui", false); + const [keycloakAuthEnabledLocalStorage] = useLocalStorage("airbyte_keycloak-auth-ui", true); + const keycloakAuthEnabledExperiment = useExperiment("authPage.keycloak", true); + const keycloakAuthEnabled = keycloakAuthEnabledExperiment || keycloakAuthEnabledLocalStorage; const { loginWithOAuth, signUp } = useAuthService(); useTrackPage(PageTrackingCodes.SIGNUP); diff --git a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudMainView.tsx b/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudMainView.tsx index f174d47b853..7aa480f4205 100644 --- a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudMainView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudMainView.tsx @@ -7,9 +7,7 @@ import { FlexContainer } from "components/ui/Flex"; import { useCurrentWorkspace } from "core/api"; import { useGetCloudWorkspaceAsync, useListCloudWorkspacesInfinite } from "core/api/cloud"; -import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; -import { ResourceNotFoundErrorBoundary } from "views/common/ResourceNotFoundErrorBoundary"; -import { StartOverErrorView } from "views/common/StartOverErrorView"; +import { DefaultErrorBoundary } from "core/errors"; import { SideBar } from "views/layout/SideBar/SideBar"; import { CloudHelpDropdown } from "./CloudHelpDropdown"; @@ -20,17 +18,15 @@ const CloudMainView: React.FC = (props) => { const workspace = useCurrentWorkspace(); const cloudWorkspace = useGetCloudWorkspaceAsync(workspace.workspaceId); - const { trackError } = useAppMonitoringService(); - return ( {cloudWorkspace && } } />
    - } trackError={trackError}> + }>{props.children ?? } - +
    diff --git a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersHint/InviteUsersHint.tsx b/airbyte-webapp/src/packages/cloud/views/users/InviteUsersHint/InviteUsersHint.tsx index 761d3a6c8c3..866d02673ae 100644 --- a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersHint/InviteUsersHint.tsx +++ b/airbyte-webapp/src/packages/cloud/views/users/InviteUsersHint/InviteUsersHint.tsx @@ -8,12 +8,10 @@ import { Text } from "components/ui/Text"; import { useCurrentWorkspace } from "core/api"; import { FeatureItem, useFeature } from "core/services/features"; import { useIntent } from "core/utils/rbac"; -import { useExperiment } from "hooks/services/Experiment"; import { useModalService } from "hooks/services/Modal"; import styles from "./InviteUsersHint.module.scss"; import { AddUserModal } from "../../workspaces/WorkspaceSettingsView/components/AddUserModal"; -import { InviteUsersModal } from "../InviteUsersModal"; export interface InviteUsersHintProps { connectorType: "source" | "destination"; @@ -26,7 +24,6 @@ export const InviteUsersHint: React.FC = ({ connectorType const { workspaceId } = useCurrentWorkspace(); const canInviteUsers = useIntent("UpdateWorkspacePermissions", { workspaceId }); const { openModal } = useModalService(); - const inviteSystemv2 = useExperiment("settings.invitationSystemv2", false); if (!inviteUsersHintVisible || !canInviteUsers) { return null; @@ -35,12 +32,7 @@ export const InviteUsersHint: React.FC = ({ connectorType const onOpenInviteUsersModal = () => openModal({ title: formatMessage({ id: "userInvitations.create.modal.title" }, { workspace: workspace.name }), - content: ({ onComplete, onCancel }) => - inviteSystemv2 ? ( - - ) : ( - - ), + content: ({ onComplete }) => , size: "md", }); diff --git a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/EmailFormControlList.tsx b/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/EmailFormControlList.tsx deleted file mode 100644 index b1bf0d00ae0..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/EmailFormControlList.tsx +++ /dev/null @@ -1,47 +0,0 @@ -import React from "react"; -import { useFieldArray, useFormState } from "react-hook-form"; -import { FormattedMessage } from "react-intl"; - -import { FormControl } from "components/forms"; -import { Button } from "components/ui/Button"; -import { FlexContainer, FlexItem } from "components/ui/Flex"; - -import { InviteUsersFormValues } from "./InviteUsersModal"; - -export const EmailFormControlList: React.FC = () => { - const { isValid, isDirty } = useFormState(); - const { fields, remove, append } = useFieldArray({ - name: "users", - }); - - const appendNewRow = () => - append({ - email: "", - role: "admin", // the only role we currently have - }); - - return ( - <> - {fields.map((field, index) => ( - - - - - - - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/InviteUsersModal.tsx b/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/InviteUsersModal.tsx deleted file mode 100644 index 9d2ed870bc0..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/InviteUsersModal.tsx +++ /dev/null @@ -1,105 +0,0 @@ -import React from "react"; -import { useIntl } from "react-intl"; -import * as yup from "yup"; -import { SchemaOf } from "yup"; - -import { Form } from "components/forms"; -import { ModalFormSubmissionButtons } from "components/forms/ModalFormSubmissionButtons"; -import { FlexContainer } from "components/ui/Flex"; -import { ModalBody, ModalFooter } from "components/ui/Modal"; - -import { useUserHook } from "core/api/cloud"; -import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; -import { trackError } from "core/utils/datadog"; -import { useNotificationService } from "hooks/services/Notification"; -import { useCurrentWorkspace } from "hooks/services/useWorkspace"; - -import { EmailFormControlList } from "./EmailFormControlList"; - -export interface InviteUsersFormValues { - users: Array<{ - role: string; - email: string; - }>; -} - -const requestConnectorValidationSchema: SchemaOf = yup.object({ - users: yup.array().of( - yup.object().shape({ - role: yup.string().required("form.empty.error"), - email: yup.string().required("form.empty.error").email("form.email.error"), - }) - ), -}); -/** - * - * @deprecated This component is deprecated and should not be used in new code. - * @see AddUserModal - */ -export const InviteUsersModal: React.FC<{ - invitedFrom: "source" | "destination" | "user.settings"; - onSubmit: () => void; - onCancel: () => void; -}> = ({ invitedFrom, onSubmit, onCancel }) => { - const { formatMessage } = useIntl(); - const { workspaceId } = useCurrentWorkspace(); - const { inviteUserLogic } = useUserHook(); - const { mutateAsync: invite } = inviteUserLogic; - - const { registerNotification } = useNotificationService(); - const analyticsService = useAnalyticsService(); - - const onSubmitBtnClick = async (values: InviteUsersFormValues) => { - await invite({ users: values.users, workspaceId }); - - analyticsService.track(Namespace.USER, Action.INVITE, { - invited_from: invitedFrom, - }); - }; - - const onSuccess = () => { - registerNotification({ - id: "invite-users-success", - text: formatMessage({ id: "inviteUsers.invitationsSentSuccess" }), - type: "success", - }); - onSubmit(); - }; - - const onError = (e: Error, { users }: InviteUsersFormValues) => { - trackError(e, { users }); - registerNotification({ - id: "invite-users-error", - text: formatMessage({ id: "inviteUsers.invitationsSentError" }), - type: "error", - }); - }; - - const formDefaultValues = { - users: [ - { - email: "", - role: "admin", // the only role we have for now - }, - ], - }; - - return ( - - schema={requestConnectorValidationSchema} - defaultValues={formDefaultValues} - onSubmit={onSubmitBtnClick} - onSuccess={onSuccess} - onError={onError} - > - - - - - - - - - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/index.tsx b/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/index.tsx deleted file mode 100644 index 1e5b24a28a9..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/index.tsx +++ /dev/null @@ -1 +0,0 @@ -export { InviteUsersModal } from "./InviteUsersModal"; diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/FirebaseInviteUserButton.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/FirebaseInviteUserButton.tsx deleted file mode 100644 index 251b13fabc0..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/FirebaseInviteUserButton.tsx +++ /dev/null @@ -1,39 +0,0 @@ -import { FormattedMessage, useIntl } from "react-intl"; - -import { Button } from "components/ui/Button"; - -import { useCurrentWorkspace } from "core/api"; -import { useIntent } from "core/utils/rbac"; -import { useModalService } from "hooks/services/Modal"; -import { InviteUsersModal } from "packages/cloud/views/users/InviteUsersModal"; - -/** - * - * @deprecated This component is deprecated and should not be used in new code. It is a part of our legacy invitation system. - */ -export const FirebaseInviteUserButton: React.FC = () => { - const { openModal } = useModalService(); - const { formatMessage } = useIntl(); - const { workspaceId } = useCurrentWorkspace(); - const canUpdateWorkspacePermissions = useIntent("UpdateWorkspacePermissions", { workspaceId }); - - const onOpenInviteUsersModal = () => - openModal({ - title: formatMessage({ id: "modals.addUser.title" }), - content: ({ onComplete, onCancel }) => ( - - ), - size: "md", - }); - - return ( - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.test.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.test.tsx index f6bbb920186..fbc63ff348c 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.test.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.test.tsx @@ -5,7 +5,7 @@ import { useListPermissions } from "core/api"; import { useListCloudWorkspacesInfinite } from "core/api/cloud"; import { OrganizationRead } from "core/api/types/AirbyteClient"; -import { CloudWorkspacesPage } from "./CloudWorkspacesPage"; +import { CloudWorkspacesPageInner } from "./CloudWorkspacesPage"; jest.mock("core/services/auth", () => ({ useAuthService: () => ({}), @@ -47,7 +47,7 @@ describe("CloudWorkspacesPage", () => { { permissionType: "organization_member", userId: "123", permissionId: "123", organizationId: "321" }, ], }); - const wrapper = await render(); + const wrapper = await render(); expect(wrapper.queryByTestId("noWorkspacePermissionsBanner")).toBeInTheDocument(); expect(wrapper.getByTestId("noWorkspacePermissionsBanner")).toHaveTextContent("321@example.com"); }); @@ -58,7 +58,7 @@ describe("CloudWorkspacesPage", () => { { permissionType: "organization_member", userId: "123", permissionId: "123", organizationId: "456" }, ], }); - const wrapper = await render(); + const wrapper = await render(); expect(wrapper.queryByTestId("noWorkspacePermissionsBanner")).toBeInTheDocument(); expect(wrapper.getByTestId("noWorkspacePermissionsBanner")).toHaveTextContent("321@example.com"); }); @@ -76,7 +76,7 @@ describe("CloudWorkspacesPage", () => { { permissionType: "instance_admin", userId: "123", permissionId: "2" }, ], }); - const wrapper = await render(); + const wrapper = await render(); expect(wrapper.queryByTestId("noWorkspacePermissionsBanner")).not.toBeInTheDocument(); }); it("should not show if you see any workspaces (e.g. as an instance admin)", async () => { @@ -92,7 +92,7 @@ describe("CloudWorkspacesPage", () => { { permissionType: "organization_member", userId: "123", permissionId: "123", organizationId: "321" }, ], }); - const wrapper = await render(); + const wrapper = await render(); expect(wrapper.queryByTestId("noWorkspacePermissionsBanner")).not.toBeInTheDocument(); }); @@ -103,7 +103,7 @@ describe("CloudWorkspacesPage", () => { { permissionType: "organization_editor", userId: "123", permissionId: "2", organizationId: "456" }, ], }); - const wrapper = await render(); + const wrapper = await render(); expect(wrapper.queryByTestId("noWorkspacePermissionsBanner")).not.toBeInTheDocument(); }); }); diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.tsx index e6f4a1026bf..f2827d41f8b 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.tsx @@ -2,6 +2,7 @@ import { useMutation } from "@tanstack/react-query"; import React, { useDeferredValue, useState } from "react"; import { FormattedMessage } from "react-intl"; +import { HeadTitle } from "components/common/HeadTitle"; import AirbyteLogo from "components/illustrations/airbyte-logo.svg?react"; import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; @@ -22,7 +23,7 @@ import { WORKSPACE_LIST_LENGTH } from "pages/workspaces/WorkspacesPage"; import { CloudWorkspacesCreateControl } from "./CloudWorkspacesCreateControl"; import styles from "./CloudWorkspacesPage.module.scss"; -export const CloudWorkspacesPage: React.FC = () => { +export const CloudWorkspacesPageInner: React.FC = () => { const { isLoading: isLogoutLoading, mutateAsync: handleLogout } = useMutation(() => logout?.() ?? Promise.resolve()); useTrackPage(PageTrackingCodes.WORKSPACES); const [searchValue, setSearchValue] = useState(""); @@ -107,3 +108,12 @@ export const CloudWorkspacesPage: React.FC = () => { ); }; + +export const CloudWorkspacesPage = () => { + return ( + <> + + + + ); +}; diff --git a/airbyte-webapp/src/pages/SettingsPage/GeneralOrganizationSettingsPage.tsx b/airbyte-webapp/src/pages/SettingsPage/GeneralOrganizationSettingsPage.tsx index fc1996ec9fe..43e0d3bfec8 100644 --- a/airbyte-webapp/src/pages/SettingsPage/GeneralOrganizationSettingsPage.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/GeneralOrganizationSettingsPage.tsx @@ -14,6 +14,7 @@ import { UpdateOrganizationSettingsForm } from "./UpdateOrganizationSettingsForm export const GeneralOrganizationSettingsPage: React.FC = () => { useTrackPage(PageTrackingCodes.SETTINGS_ORGANIZATION); const isAccessManagementEnabled = useFeature(FeatureItem.RBAC); + const displayOrganizationUsers = useFeature(FeatureItem.DisplayOrganizationUsers); return ( @@ -22,7 +23,7 @@ export const GeneralOrganizationSettingsPage: React.FC = () => { - {isAccessManagementEnabled && ( + {isAccessManagementEnabled && displayOrganizationUsers && ( <> diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceAccessManagementSection.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceAccessManagementSection.tsx index 7ebe302cb97..83f1ac2242e 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceAccessManagementSection.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceAccessManagementSection.tsx @@ -9,19 +9,11 @@ import { Heading } from "components/ui/Heading"; import { SearchInput } from "components/ui/SearchInput"; import { Text } from "components/ui/Text"; -import { - useCurrentOrganizationInfo, - useCurrentWorkspace, - useListUserInvitations, - useListWorkspaceAccessUsers, -} from "core/api"; +import { useCurrentWorkspace, useListUserInvitations, useListWorkspaceAccessUsers } from "core/api"; import { useIntent } from "core/utils/rbac"; -import { useExperiment } from "hooks/services/Experiment"; import { useModalService } from "hooks/services/Modal"; import { AddUserModal } from "packages/cloud/views/workspaces/WorkspaceSettingsView/components/AddUserModal"; -import { FirebaseInviteUserButton } from "packages/cloud/views/workspaces/WorkspaceSettingsView/components/FirebaseInviteUserButton"; -import { AddUserControl } from "./components/AddUserControl"; import { UnifiedWorkspaceUserModel, unifyWorkspaceUserData } from "./components/useGetAccessManagementData"; import styles from "./WorkspaceAccessManagementSection.module.scss"; import { WorkspaceUsersTable } from "./WorkspaceUsersTable"; @@ -30,8 +22,6 @@ const SEARCH_PARAM = "search"; const WorkspaceAccessManagementSection: React.FC = () => { const workspace = useCurrentWorkspace(); - const organization = useCurrentOrganizationInfo(); - const canViewOrgMembers = useIntent("ListOrganizationMembers", { organizationId: organization?.organizationId }); const canUpdateWorkspacePermissions = useIntent("UpdateWorkspacePermissions", { workspaceId: workspace.workspaceId }); const { openModal } = useModalService(); @@ -49,10 +39,6 @@ const WorkspaceAccessManagementSection: React.FC = () => { const debouncedUserFilter = useDeferredValue(userFilter); const { formatMessage } = useIntl(); - const showAddUserButton = organization?.sso && canUpdateWorkspacePermissions && canViewOrgMembers; - const showFirebaseInviteButton = !organization?.sso && canUpdateWorkspacePermissions; - const invitationSystemv2 = useExperiment("settings.invitationSystemv2", false); - const onOpenInviteUsersModal = () => openModal({ title: formatMessage({ id: "userInvitations.create.modal.title" }, { workspace: workspace.name }), @@ -87,16 +73,9 @@ const WorkspaceAccessManagementSection: React.FC = () => { setUserFilter(e.target.value)} /> - {!invitationSystemv2 ? ( - <> - {showFirebaseInviteButton && } - {showAddUserButton && } - - ) : ( - - )} + {filteredWorkspaceUsers && filteredWorkspaceUsers.length > 0 ? ( diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/AddUserControl.module.scss b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/AddUserControl.module.scss deleted file mode 100644 index e2871173b51..00000000000 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/AddUserControl.module.scss +++ /dev/null @@ -1,20 +0,0 @@ -@use "scss/variables"; -@use "scss/colors"; - -.addUserControl__dropdown { - padding-bottom: 0; - width: variables.$width-wide-menu; - background-color: colors.$foreground; -} - -.addUserControl__dropdownMenu { - display: block; // default is `flex` which shrinks the options to fit the box, instead overflowing into scroll - overflow: auto; - max-height: variables.$height-long-listbox-options-list; -} - -.addUserControl__buttonName { - overflow: hidden; - text-overflow: ellipsis; - white-space: nowrap; -} diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/AddUserControl.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/AddUserControl.tsx deleted file mode 100644 index fc498ea4d32..00000000000 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/AddUserControl.tsx +++ /dev/null @@ -1,135 +0,0 @@ -import { useState } from "react"; -import { FormattedMessage } from "react-intl"; -import * as yup from "yup"; - -import { Form, FormControl } from "components/forms"; -import { FormSubmissionButtons } from "components/forms/FormSubmissionButtons"; -import { Button } from "components/ui/Button"; -import { FlexContainer } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; -import { ListBoxControlButtonProps } from "components/ui/ListBox"; -import { Text } from "components/ui/Text"; - -import { - useCreatePermission, - useCurrentWorkspace, - useListUsersInOrganization, - useListWorkspaceAccessUsers, -} from "core/api"; -import { OrganizationUserRead, PermissionCreate, PermissionType } from "core/api/types/AirbyteClient"; -import { useIntent } from "core/utils/rbac"; - -import styles from "./AddUserControl.module.scss"; - -/** - * The name of this component is based on what a user sees... not so much what it does. - * This button will NOT create a user, it will create a permission for an existing organization member to access a given workspace. - */ - -const createPermissionControlSchema = yup.object().shape({ - userId: yup.string().required(), - permissionType: yup.mixed().oneOf(Object.values(PermissionType)).required(), - workspaceId: yup.string(), - permissionId: yup.string().strip(), // this property is defined on the type solely for migration purposes - organizationId: yup.string().strip(), // we do not have a mechanism for creating an organization permission with this control as of yet -}); - -const AddUserForm: React.FC<{ - usersToAdd: OrganizationUserRead[]; - workspaceId: string; - setIsEditMode: (mode: boolean) => void; -}> = ({ usersToAdd, workspaceId, setIsEditMode }) => { - const { mutateAsync: createPermission } = useCreatePermission(); - const canUpdateWorkspacePermissions = useIntent("UpdateWorkspacePermissions", { workspaceId }); - - const onSubmitClick = async (values: PermissionCreate) => { - await createPermission(values).then(() => setIsEditMode(false)); - }; - - const AddUserListBoxControl = ({ selectedOption }: ListBoxControlButtonProps) => { - const value = selectedOption?.value; - const userToAdd = usersToAdd.find((user) => user.userId === value); - const nameToDisplay = userToAdd?.name ? userToAdd.name : userToAdd?.email; - - if (!userToAdd) { - return null; - } - - return ( - <> - - {nameToDisplay} - - - - ); - }; - - return ( - - schema={createPermissionControlSchema} - defaultValues={{ - userId: usersToAdd[0].userId, - permissionType: PermissionType.workspace_admin, - workspaceId, - }} - onSubmit={onSubmitClick} - disabled={!canUpdateWorkspacePermissions} - > - - - containerControlClassName={styles.addUserControl__dropdown} - optionsMenuClassName={styles.addUserControl__dropdownMenu} - controlButton={AddUserListBoxControl} - name="userId" - fieldType="dropdown" - options={usersToAdd.map((user) => { - return { - value: user.userId, - label: ( - - - {user.name ? user.name : user.email} - - - {user.email} - - - ), - }; - })} - /> - setIsEditMode(false)} - allowNonDirtyCancel - allowNonDirtySubmit - /> - - - ); -}; -export const AddUserControl: React.FC = () => { - const [isEditMode, setIsEditMode] = useState(false); - const workspace = useCurrentWorkspace(); - - const workspaceAccessUsers = useListWorkspaceAccessUsers(workspace.workspaceId); - const { users } = useListUsersInOrganization(workspace.organizationId); - - const usersToAdd = users.filter( - (organizationUser) => - !workspaceAccessUsers.usersWithAccess.find((workspaceUser) => workspaceUser.userId === organizationUser.userId) - ); - - if (!usersToAdd || usersToAdd.length === 0) { - return null; - } - - return !isEditMode ? ( - - ) : ( - - ); -}; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementCell.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementCell.tsx index 84d3e997540..28871057744 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementCell.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementCell.tsx @@ -8,6 +8,7 @@ import { Tooltip } from "components/ui/Tooltip"; import { useCurrentOrganizationInfo, useCurrentWorkspace } from "core/api"; import { useCurrentUser } from "core/services/auth"; +import { FeatureItem, useFeature } from "core/services/features"; import { useIntent } from "core/utils/rbac"; import { GuestBadge } from "./GuestBadge"; @@ -49,6 +50,7 @@ export const RoleManagementCell: React.FC = ({ user, re const canListOrganizationUsers = useIntent("ListOrganizationMembers", { organizationId: organizationInfo?.organizationId, }); + const indicateGuestUsers = useFeature(FeatureItem.IndicateGuestUsers); const cannotDemoteUser = resourceType === "workspace" && orgPermissionType === "organization_admin"; const shouldHidePopover = cannotDemoteUser || !canEditPermissions || user.id === currentUser.userId; @@ -77,7 +79,7 @@ export const RoleManagementCell: React.FC = ({ user, re )} - {canListOrganizationUsers && organizationInfo?.organizationId && ( + {canListOrganizationUsers && organizationInfo?.organizationId && indicateGuestUsers && ( )} {user.invitationStatus === "pending" && ( diff --git a/airbyte-webapp/src/pages/connections/ConnectionJobHistoryPage/ConnectionJobHistoryPage.tsx b/airbyte-webapp/src/pages/connections/ConnectionJobHistoryPage/ConnectionJobHistoryPage.tsx index 43e1d372def..b5056ff7b31 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionJobHistoryPage/ConnectionJobHistoryPage.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionJobHistoryPage/ConnectionJobHistoryPage.tsx @@ -32,6 +32,7 @@ import { useAnalyticsService, } from "core/services/analytics"; import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; +import { useExperiment } from "hooks/services/Experiment"; import styles from "./ConnectionJobHistoryPage.module.scss"; import JobsList from "./JobsList"; @@ -49,6 +50,7 @@ interface JobHistoryFilterValues { } export const ConnectionJobHistoryPage: React.FC = () => { + const isSimplifiedCreation = useExperiment("connection.simplifiedCreation", false); const { connection } = useConnectionEditService(); useTrackPage(PageTrackingCodes.CONNECTIONS_ITEM_STATUS); const [filterValues, setFilterValue, setFilters] = useFilters({ @@ -127,7 +129,9 @@ export const ConnectionJobHistoryPage: React.FC = () => { - } /> + {!isSimplifiedCreation && ( + } /> + )} { () => location.pathname.includes(`/${ConnectionRoutePaths.Replication}`), [location.pathname] ); - const { trackError } = useAppMonitoringService(); useTrackPage(PageTrackingCodes.CONNECTIONS_ITEM); return ( - } trackError={trackError}> + } pageTitle={} @@ -63,7 +60,7 @@ export const ConnectionPage: React.FC = () => { - + ); }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionPageHeader.tsx b/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionPageHeader.tsx index e21c7d33454..71298239762 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionPageHeader.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionPageHeader.tsx @@ -2,6 +2,7 @@ import { useMemo } from "react"; import { FormattedMessage, useIntl } from "react-intl"; import { useParams } from "react-router-dom"; +import { ConnectionSyncContextProvider } from "components/connection/ConnectionSync/ConnectionSyncContext"; import { ChangesStatusIcon } from "components/EntityTable/components/ChangesStatusIcon"; import { FlexContainer } from "components/ui/Flex"; import { PageHeaderWithNavigation } from "components/ui/PageHeader"; @@ -12,6 +13,7 @@ import { useExperiment } from "hooks/services/Experiment"; import { RoutePaths, ConnectionRoutePaths } from "pages/routePaths"; import { ConnectionTitleBlock } from "./ConnectionTitleBlock"; +import { ConnectionTitleBlockNext } from "./ConnectionTitleBlockNext"; export const ConnectionPageHeader = () => { const params = useParams<{ workspaceId: string; connectionId: string; "*": ConnectionRoutePaths }>(); @@ -73,7 +75,13 @@ export const ConnectionPageHeader = () => { return ( - + {isSimplifiedCreation ? ( + + + + ) : ( + + )} {tabsData.map((tabItem) => ( = ({ name, icon, id, supportLevel, custom, type, version }) => { + const params = useParams<{ workspaceId: string; connectionId: string; "*": ConnectionRoutePaths }>(); + const basePath = `/${RoutePaths.Workspaces}/${params.workspaceId}`; + const connectorTypePath = type === "source" ? RoutePaths.Source : RoutePaths.Destination; + const [connectionDetails] = useLocalStorage("airbyte_connection-additional-details", false); + + return ( + + + + + {name} + {connectionDetails && <> (v{version})} + + + + + ); +}; + +export const ConnectionTitleBlockNext = () => { + const { connection } = useConnectionEditService(); + const { name, source, destination, status: connectionStatus } = connection; + const { isRunning, status } = useConnectionStatus(connection.connectionId); + const { sourceDefinition, sourceDefinitionVersion, destDefinition, destDefinitionVersion } = + useConnectionFormService(); + + return ( + <> + {connectionStatus === ConnectionStatus.deprecated && ( + } /> + )} + + + + + + {name} + + + + + + + + + + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ResetWarningModal.tsx b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ResetWarningModal.tsx index 95d8a41ce33..c3496ba784a 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ResetWarningModal.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ResetWarningModal.tsx @@ -2,11 +2,13 @@ import { useState } from "react"; import { FormattedMessage, useIntl } from "react-intl"; import { LabeledSwitch } from "components"; +import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { ModalBody, ModalFooter } from "components/ui/Modal"; import { Text } from "components/ui/Text"; import { ConnectionStateType } from "core/api/types/AirbyteClient"; +import { useExperiment } from "hooks/services/Experiment"; interface ResetWarningModalProps { onComplete: (withReset: boolean) => void; @@ -18,20 +20,35 @@ export const ResetWarningModal: React.FC = ({ onCancel, const { formatMessage } = useIntl(); const [withReset, setWithReset] = useState(true); const requireFullReset = stateType === ConnectionStateType.legacy; + const sayClearInsteadOfReset = useExperiment("connection.clearNotReset", false); + const checkboxLabel = sayClearInsteadOfReset + ? requireFullReset + ? "connection.saveWithFullDataClear" + : "connection.saveWithDataClear" + : requireFullReset + ? "connection.saveWithFullReset" + : "connection.saveWithReset"; return ( <> - + + {sayClearInsteadOfReset && ( + + + + + + )}

    setWithReset(ev.target.checked)} label={formatMessage({ - id: requireFullReset ? "connection.saveWithFullReset" : "connection.saveWithReset", + id: checkboxLabel, })} checkbox data-testid="resetModal-reset-checkbox" diff --git a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/__snapshots__/ConnectionReplicationPage.test.tsx.snap b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/__snapshots__/ConnectionReplicationPage.test.tsx.snap index 62de775d9ea..7556895cc7e 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/__snapshots__/ConnectionReplicationPage.test.tsx.snap +++ b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/__snapshots__/ConnectionReplicationPage.test.tsx.snap @@ -765,18 +765,22 @@ exports[`ConnectionReplicationPage should show an error if there is a schemaErro >

    -
    -
    - +
    +
    +
    - Sorry. Something went wrong... - + + Sorry. Something went wrong... + +
    diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusCard.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusCard.tsx index 00acff7e9c6..4e401521d6f 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusCard.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusCard.tsx @@ -33,7 +33,9 @@ export const ConnectionStatusCard: React.FC = () => { } />
    - + + + {showHistoricalOverview && } ); diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.module.scss b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.module.scss index cf024d44608..4136eeb0669 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.module.scss +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.module.scss @@ -1,3 +1,5 @@ +@use "scss/mixins"; + .error { flex: 1; } @@ -5,3 +7,16 @@ .breakingChangeButton { align-self: center; } + +.internalErrorMessage { + font-family: monospace; + max-height: calc( + 100vh - var(--message-children-top-distance, 336px) - var(--message-children-bottom-distance, 25px) + ); // 336 offset + 25 for bottom padding + + overflow-y: auto; +} + +.buttonLikeLink { + @include mixins.link-text; +} diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.tsx index 8c2615cd254..81ad599c2f9 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.tsx @@ -3,15 +3,19 @@ import { useIntl } from "react-intl"; import { useNavigate } from "react-router-dom"; import { useConnectionStatus } from "components/connection/ConnectionStatus/useConnectionStatus"; -import { Box } from "components/ui/Box"; -import { FlexContainer } from "components/ui/Flex"; +import { CopyButton } from "components/ui/CopyButton"; +import { FlexContainer, FlexItem } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; +import { Link } from "components/ui/Link"; import { Message, MessageProps, MessageType, isHigherSeverity, MESSAGE_SEVERITY_LEVELS } from "components/ui/Message"; +import { Text } from "components/ui/Text"; import { useCurrentWorkspaceId } from "area/workspace/utils"; import { useDestinationDefinitionVersion, useSourceDefinitionVersion } from "core/api"; -import { ActorDefinitionVersionRead, FailureOrigin, FailureType } from "core/api/types/AirbyteClient"; +import { ActorDefinitionVersionRead, FailureOrigin } from "core/api/types/AirbyteClient"; import { shouldDisplayBreakingChangeBanner, getHumanReadableUpgradeDeadline } from "core/domain/connector"; import { FeatureItem, useFeature } from "core/services/features"; +import { failureUiDetailsFromReason } from "core/utils/errorStatusMessage"; import { useSchemaChanges } from "hooks/connection/useSchemaChanges"; import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; import { ConnectionRoutePaths, RoutePaths } from "pages/routePaths"; @@ -74,27 +78,70 @@ export const ConnectionStatusMessages: React.FC = () => { // If we have an error message and no breaking schema changes, show the error message if (failureReason && !hasBreakingSchemaChange) { - const isConfigError = failureReason.failureType === FailureType.config_error; - const isSourceError = failureReason.failureOrigin === FailureOrigin.source; - const isDestinationError = failureReason.failureOrigin === FailureOrigin.destination; + const failureUiDetails = failureUiDetailsFromReason(failureReason, formatMessage); + + const isError = failureUiDetails.type === "error"; + if (isError) { + const isSourceError = failureUiDetails.origin === FailureOrigin.source; - if (isConfigError && (isSourceError || isDestinationError)) { const targetRoute = isSourceError ? RoutePaths.Source : RoutePaths.Destination; const targetRouteId = isSourceError ? connection.sourceId : connection.destinationId; const configError = { - text: failureReason.externalMessage, + text: formatMessage( + { id: "failureMessage.label" }, + { + type: ( + + {failureUiDetails.typeLabel}: + + ), + message: failureUiDetails.message, + } + ), onAction: () => navigate(`/${RoutePaths.Workspaces}/${workspaceId}/${targetRoute}/${targetRouteId}`), - actionBtnText: formatMessage({ id: "connection.stream.status.gotoSettings" }), - type: "warning", + actionBtnText: formatMessage({ + id: isSourceError + ? "connection.stream.status.checkSourceSettings" + : "connection.stream.status.checkDestinationSettings", + }), + type: "error", } as const; errorMessages.push(configError); } else { + const hasInternalErrorMessage = !!failureUiDetails.secondaryMessage; const goToLogError = { - text: failureReason.externalMessage, - onAction: () => navigate(`../${ConnectionRoutePaths.JobHistory}#${lastSyncJobId}::${lastSyncAttemptNumber}`), - actionBtnText: formatMessage({ id: "connection.stream.status.seeLogs" }), + text: formatMessage( + { id: "failureMessage.label" }, + { + type: ( + + {failureUiDetails.typeLabel}: + + ), + message: failureUiDetails.message, + } + ), type: "warning", + children: hasInternalErrorMessage && ( + + + {failureUiDetails.secondaryMessage} + + + + + + + + + ), + childrenClassName: styles.internalErrorMessage, + isExpandable: hasInternalErrorMessage, } as const; errorMessages.push(goToLogError); } @@ -222,18 +269,16 @@ export const ConnectionStatusMessages: React.FC = () => { if (errorMessagesToDisplay.length > 0) { return ( - - - {errorMessagesToDisplay.map((message, index) => ( - - ))} - - + + {errorMessagesToDisplay.map((message, index) => ( + + ))} + ); } diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionSyncStatusCard.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionSyncStatusCard.tsx new file mode 100644 index 00000000000..dfed09ed298 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionSyncStatusCard.tsx @@ -0,0 +1,24 @@ +import { useIntl } from "react-intl"; + +import { Card } from "components/ui/Card"; + +import { HistoricalOverview } from "area/connection/components"; +import { FeatureItem, useFeature } from "core/services/features"; +import { useExperiment } from "hooks/services/Experiment"; + +export const ConnectionSyncStatusCard: React.FC = () => { + const { formatMessage } = useIntl(); + const showHistoricalOverviewFeature = useFeature(FeatureItem.ConnectionHistoryGraphs); + const showHistoricalOverviewExperiment = useExperiment("connection.streamCentricUI.historicalOverview", false); + const showHistoricalOverview = showHistoricalOverviewFeature && showHistoricalOverviewExperiment; + + if (!showHistoricalOverview) { + return null; + } + + return ( + + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.tsx index edef23770d5..fbac916abca 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.tsx @@ -1,16 +1,28 @@ import { ConnectionSyncContextProvider } from "components/connection/ConnectionSync/ConnectionSyncContext"; import { FlexContainer } from "components/ui/Flex"; +import { useExperiment } from "hooks/services/Experiment"; + import { ConnectionStatusCard } from "./ConnectionStatusCard"; +import { ConnectionStatusMessages } from "./ConnectionStatusMessages"; +import { ConnectionSyncStatusCard } from "./ConnectionSyncStatusCard"; import { StreamsList } from "./StreamsList"; import { StreamsListContextProvider } from "./StreamsListContext"; export const StreamStatusPage = () => { + const isSimplifiedCreation = useExperiment("connection.simplifiedCreation", false); return ( - + {isSimplifiedCreation ? ( + <> + + + + ) : ( + + )} diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.tsx index f557b4df9b5..04420032561 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.tsx @@ -5,6 +5,8 @@ import React, { useMemo } from "react"; import { FormattedMessage } from "react-intl"; import { useToggle } from "react-use"; +import { useConnectionStatus } from "components/connection/ConnectionStatus/useConnectionStatus"; +import { ConnectionStatusIndicatorStatus } from "components/connection/ConnectionStatusIndicator"; import { StreamStatusIndicator } from "components/connection/StreamStatusIndicator"; import { Box } from "components/ui/Box"; import { Card } from "components/ui/Card"; @@ -16,6 +18,7 @@ import { Text } from "components/ui/Text"; import { ConnectionStatus } from "core/api/types/AirbyteClient"; import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; +import { useExperiment } from "hooks/services/Experiment"; import { StreamActionsMenu } from "./StreamActionsMenu"; import { StreamSearchFiltering } from "./StreamSearchFiltering"; @@ -48,6 +51,7 @@ const LastSync: React.FC<{ transitionedAt: number | undefined; showRelativeTime: }; export const StreamsList = () => { + const useSimpliedCreation = useExperiment("connection.simplifiedCreation", false); const [showRelativeTime, setShowRelativeTime] = useToggle(true); const { filteredStreams } = useStreamsListContext(); @@ -102,6 +106,7 @@ export const StreamsList = () => { ); const { connection } = useConnectionEditService(); + const { status, nextSync } = useConnectionStatus(connection.connectionId); const showTable = connection.status !== ConnectionStatus.inactive; @@ -109,9 +114,35 @@ export const StreamsList = () => { - - - + {useSimpliedCreation ? ( + + + + + + + {status === ConnectionStatusIndicatorStatus.OnTime && nextSync && ( + + )} + {(status === ConnectionStatusIndicatorStatus.Late || + status === ConnectionStatusIndicatorStatus.OnTrack) && + nextSync && ( + + )} + + + + ) : ( + + + + )} diff --git a/airbyte-webapp/src/pages/destination/DestinationItemPage/DestinationItemPage.tsx b/airbyte-webapp/src/pages/destination/DestinationItemPage/DestinationItemPage.tsx index b3e99f5eda5..7d79d22bc0f 100644 --- a/airbyte-webapp/src/pages/destination/DestinationItemPage/DestinationItemPage.tsx +++ b/airbyte-webapp/src/pages/destination/DestinationItemPage/DestinationItemPage.tsx @@ -3,7 +3,6 @@ import { useIntl } from "react-intl"; import { Outlet, useParams } from "react-router-dom"; import { LoadingPage } from "components"; -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; import { HeadTitle } from "components/common/HeadTitle"; import { ConnectorNavigationTabs } from "components/connector/ConnectorNavigationTabs"; import { ConnectorTitleBlock } from "components/connector/ConnectorTitleBlock"; @@ -12,11 +11,9 @@ import { PageHeaderWithNavigation } from "components/ui/PageHeader"; import { useGetDestinationFromParams } from "area/connector/utils"; import { useDestinationDefinitionVersion, useDestinationDefinition } from "core/api"; +import { DefaultErrorBoundary } from "core/errors"; import { useTrackPage, PageTrackingCodes } from "core/services/analytics"; -import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; import { RoutePaths } from "pages/routePaths"; -import { ResourceNotFoundErrorBoundary } from "views/common/ResourceNotFoundErrorBoundary"; -import { StartOverErrorView } from "views/common/StartOverErrorView"; import { ConnectorDocumentationWrapper } from "views/Connector/ConnectorDocumentationLayout"; export const DestinationItemPage: React.FC = () => { @@ -27,8 +24,6 @@ export const DestinationItemPage: React.FC = () => { const actorDefinitionVersion = useDestinationDefinitionVersion(destination.destinationId); const { formatMessage } = useIntl(); - const { trackError } = useAppMonitoringService(); - const breadcrumbBasePath = `/${RoutePaths.Workspaces}/${params.workspaceId}/${RoutePaths.Destination}`; const breadcrumbsData = [ @@ -40,7 +35,7 @@ export const DestinationItemPage: React.FC = () => { ]; return ( - } trackError={trackError}> + @@ -52,11 +47,11 @@ export const DestinationItemPage: React.FC = () => { }> - + - + - + ); }; diff --git a/airbyte-webapp/src/pages/routes.tsx b/airbyte-webapp/src/pages/routes.tsx index 81f5537e481..c581f55b30f 100644 --- a/airbyte-webapp/src/pages/routes.tsx +++ b/airbyte-webapp/src/pages/routes.tsx @@ -2,13 +2,12 @@ import React, { useMemo } from "react"; import { Navigate, Route, Routes, useLocation, useSearchParams } from "react-router-dom"; import { useEffectOnce } from "react-use"; -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; - import { useGetInstanceConfiguration, useInvalidateAllWorkspaceScopeOnChange, useListWorkspacesInfinite, } from "core/api"; +import { DefaultErrorBoundary } from "core/errors"; import { useAnalyticsIdentifyUser, useAnalyticsRegisterValues } from "core/services/analytics"; import { useAuthService } from "core/services/auth"; import { FeatureItem, useFeature } from "core/services/features"; @@ -72,7 +71,7 @@ const MainViewRoutes: React.FC = () => { return ( - + } /> @@ -119,7 +118,7 @@ const MainViewRoutes: React.FC = () => { } /> - + ); }; diff --git a/airbyte-webapp/src/pages/source/SourceItemPage/SourceItemPage.tsx b/airbyte-webapp/src/pages/source/SourceItemPage/SourceItemPage.tsx index f920e443449..79c352b6bd3 100644 --- a/airbyte-webapp/src/pages/source/SourceItemPage/SourceItemPage.tsx +++ b/airbyte-webapp/src/pages/source/SourceItemPage/SourceItemPage.tsx @@ -2,7 +2,6 @@ import React, { Suspense } from "react"; import { useIntl } from "react-intl"; import { Outlet, useParams } from "react-router-dom"; -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; import { HeadTitle } from "components/common/HeadTitle"; import { ConnectorNavigationTabs } from "components/connector/ConnectorNavigationTabs"; import { ConnectorTitleBlock } from "components/connector/ConnectorTitleBlock"; @@ -12,11 +11,9 @@ import { PageHeaderWithNavigation } from "components/ui/PageHeader"; import { useGetSourceFromParams } from "area/connector/utils"; import { useSourceDefinitionVersion, useSourceDefinition } from "core/api"; +import { DefaultErrorBoundary } from "core/errors"; import { useTrackPage, PageTrackingCodes } from "core/services/analytics"; -import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; import { RoutePaths } from "pages/routePaths"; -import { ResourceNotFoundErrorBoundary } from "views/common/ResourceNotFoundErrorBoundary"; -import { StartOverErrorView } from "views/common/StartOverErrorView"; import { ConnectorDocumentationWrapper } from "views/Connector/ConnectorDocumentationLayout"; export const SourceItemPage: React.FC = () => { @@ -37,10 +34,8 @@ export const SourceItemPage: React.FC = () => { { label: source.name }, ]; - const { trackError } = useAppMonitoringService(); - return ( - } trackError={trackError}> + @@ -52,11 +47,11 @@ export const SourceItemPage: React.FC = () => { }> - + - + - + ); }; diff --git a/airbyte-webapp/src/scss/_mixins.scss b/airbyte-webapp/src/scss/_mixins.scss index 390a66c46a7..896eb6aac60 100644 --- a/airbyte-webapp/src/scss/_mixins.scss +++ b/airbyte-webapp/src/scss/_mixins.scss @@ -1,3 +1,4 @@ +@use "./colors"; @use "./variables"; @mixin overflow-ellipsis { @@ -67,3 +68,44 @@ $stripes-width: 83px; white-space: nowrap; width: 1px; } + +// need something to look like a button but don't want to @forward the button module, this is it! +@mixin base-button { + // base "button" + transition: 0.2s ease-in; + display: inline-flex; + align-items: center; + justify-content: center; + text-decoration: none; + border-radius: variables.$border-radius-sm; + font-weight: 600; + cursor: pointer; + + // sizeXS + height: variables.$button-height-xs; + font-size: variables.$font-size-sm; + line-height: 15px; + padding: 10px; +} + +// looks like a buttonp[variant=link], like if you need a to look like a button +@mixin link-text { + @include base-button; + + // secondary + color: colors.$grey-400; + border: 1px solid colors.$grey-300; + + &:hover { + border-color: colors.$grey-400; + color: colors.$grey-500; + } + + &:active { + border-color: colors.$grey-500; + color: colors.$grey-500; + } + + // custom background (secondary button has no background) + background-color: colors.$foreground; +} diff --git a/airbyte-webapp/src/scss/_variables.scss b/airbyte-webapp/src/scss/_variables.scss index 701a7482dbb..e23bfab098c 100644 --- a/airbyte-webapp/src/scss/_variables.scss +++ b/airbyte-webapp/src/scss/_variables.scss @@ -24,6 +24,7 @@ $box-shadow-popup: var(--box-shadow-popup); $box-shadow-sidebar: var(--box-shadow-sidebar); $box-shadow-inset: var(--box-shadow-inset); $box-shadow-menu: var(--box-shadow-menu); +$box-shadow-highlight: 0 0 47px -5px; $spacing-xs: 3px; $spacing-sm: 5px; diff --git a/airbyte-webapp/src/scss/connection/_stream-status-colors.scss b/airbyte-webapp/src/scss/connection/_stream-status-colors.scss index b9a005beeb7..506a62bae5d 100644 --- a/airbyte-webapp/src/scss/connection/_stream-status-colors.scss +++ b/airbyte-webapp/src/scss/connection/_stream-status-colors.scss @@ -9,12 +9,12 @@ $up-to-date: colors.$green; $cancelled: colors.$grey-400; $action-required-light: colors.$dark-blue-40; -$disabled-light: colors.$grey-40; +$disabled-light: colors.$grey-100; $error-light: colors.$red-40; $late-light: colors.$blue-40; -$pending-light: colors.$grey-40; +$pending-light: colors.$grey-100; $up-to-date-light: colors.$green-40; -$cancelled-light: colors.$grey-40; +$cancelled-light: colors.$grey-100; $by-stream-status: "actionRequired" $action-required $action-required-light, "disabled" $disabled $disabled-light, "error" $error $error-light, "late" $late $late-light, "pending" $pending $pending-light, diff --git a/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx b/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx index 2259a6d4a12..c65c89f34d8 100644 --- a/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx +++ b/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx @@ -19,6 +19,7 @@ import { getManifestValuePerComponentPerStream, useBuilderWatch, } from "components/connectorBuilder/types"; +import { useUpdateLockedInputs } from "components/connectorBuilder/useLockedInputs"; import { formatJson } from "components/connectorBuilder/utils"; import { useCurrentWorkspaceId } from "area/workspace/utils"; @@ -26,7 +27,7 @@ import { BuilderProject, BuilderProjectPublishBody, BuilderProjectWithManifest, - CommonRequestError, + HttpError, NewVersionBody, useBuilderProject, useBuilderProjectReadStream, @@ -68,6 +69,13 @@ export type SavingState = "loading" | "invalid" | "saved" | "error" | "readonly" export type ConnectorBuilderPermission = "write" | "readOnly" | "adminReadOnly"; +export type TestingValuesUpdate = UseMutateAsyncFunction< + ConnectorBuilderProjectTestingValues, + Error, + Omit, + unknown +>; + interface FormStateContext { jsonManifest: DeclarativeComponentSchema; yamlEditorIsMounted: boolean; @@ -82,7 +90,7 @@ interface FormStateContext { formValuesValid: boolean; resolvedManifest: ConnectorManifest; resolveErrorMessage: string | undefined; - resolveError: CommonRequestError | null; + resolveError: HttpError | null; isResolving: boolean; streamNames: string[]; setDisplayedVersion: (value: number | undefined, manifest: DeclarativeComponentSchema) => void; @@ -94,12 +102,7 @@ interface FormStateContext { releaseNewVersion: (options: NewVersionBody) => Promise; toggleUI: (newMode: BuilderState["mode"]) => Promise; setFormValuesValid: (value: boolean) => void; - updateTestingValues: UseMutateAsyncFunction< - ConnectorBuilderProjectTestingValues, - Error, - Omit, - unknown - >; + updateTestingValues: TestingValuesUpdate; } interface TestReadLimits { @@ -258,8 +261,8 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< ); const unknownErrorMessage = formatMessage({ id: "connectorBuilder.unknownError" }); const resolveErrorMessage = isResolveError - ? resolveError instanceof Error - ? resolveError.message || unknownErrorMessage + ? resolveError instanceof HttpError + ? resolveError.response?.message || unknownErrorMessage : unknownErrorMessage : undefined; @@ -457,7 +460,11 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< if (modeRef.current === "ui" && !formAndResolveValid) { return; } - const newProject: BuilderProjectWithManifest = { name, manifest: jsonManifest }; + const newProject: BuilderProjectWithManifest = { + name, + manifest: jsonManifest, + yamlManifest: convertJsonToYaml(jsonManifest), + }; await updateProject(newProject); setPersistedState(newProject); }, [permission, name, formAndResolveValid, jsonManifest, updateProject]); @@ -486,6 +493,8 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< useUpdateTestingValuesOnSpecChange(jsonManifest.spec, updateTestingValues); + useUpdateLockedInputs(); + const ctx: FormStateContext = { jsonManifest, yamlEditorIsMounted, diff --git a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/DocumentationPanel.tsx b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/DocumentationPanel.tsx index 50e0f75872b..92f38c86644 100644 --- a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/DocumentationPanel.tsx +++ b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/DocumentationPanel.tsx @@ -48,7 +48,7 @@ const ImgRelativePathReplacer: React.FC< if (src === undefined || actorType === undefined) { newSrc = src; - } else if (src.startsWith("../")) { + } else if (src.startsWith("../") || src.startsWith("./")) { if (isDev) { newSrc = actorType === "source" ? path.join(LOCAL_DOCS_SOURCES_PATH, src) : path.join(LOCAL_DOCS_DESTINATIONS_PATH, src); @@ -73,7 +73,7 @@ const LinkRelativePathReplacer: React.FC< {children} ); - } else if (href && href.startsWith("../")) { + } else if (href && (href.startsWith("../") || href.startsWith("./"))) { const docPath = href.replace(/\.md$/, ""); const url = actorType === "source" diff --git a/airbyte-webapp/src/views/common/ErrorOccurredView/ErrorOccurredView.tsx b/airbyte-webapp/src/views/common/ErrorOccurredView/ErrorOccurredView.tsx index 3437cb0f43c..4d29629b8ea 100644 --- a/airbyte-webapp/src/views/common/ErrorOccurredView/ErrorOccurredView.tsx +++ b/airbyte-webapp/src/views/common/ErrorOccurredView/ErrorOccurredView.tsx @@ -9,20 +9,14 @@ import styles from "./ErrorOccurredView.module.scss"; interface ErrorOccurredViewProps { message: React.ReactNode; - /** - * URL to relevant documentation for the error if available - */ - docLink?: string; ctaButtonText?: React.ReactNode; onCtaButtonClick?: React.MouseEventHandler; } -export const ErrorOccurredView: React.FC = ({ - message, - onCtaButtonClick, - ctaButtonText, - docLink, -}) => { +/** + * @deprecated Replaced by `ErrorDetails` component. Will be removed once the speakeasy portal forward has been removed. + */ +export const ErrorOccurredView: React.FC = ({ message, onCtaButtonClick, ctaButtonText }) => { return (
    @@ -31,13 +25,6 @@ export const ErrorOccurredView: React.FC = ({

    {message}

    - {docLink && ( -

    - - - -

    - )} {onCtaButtonClick && ctaButtonText && (